gcc/ChangeLog
[official-gcc.git] / gcc / function.c
blob2c5a6d484f6816b513f46e5d6a6b51cb00671e49
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "cfghooks.h"
39 #include "tree.h"
40 #include "rtl.h"
41 #include "df.h"
42 #include "rtl-error.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "stor-layout.h"
46 #include "varasm.h"
47 #include "stringpool.h"
48 #include "flags.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "insn-codes.h"
59 #include "optabs.h"
60 #include "libfuncs.h"
61 #include "regs.h"
62 #include "recog.h"
63 #include "output.h"
64 #include "tm_p.h"
65 #include "langhooks.h"
66 #include "target.h"
67 #include "common/common-target.h"
68 #include "gimple-expr.h"
69 #include "gimplify.h"
70 #include "tree-pass.h"
71 #include "cfgrtl.h"
72 #include "cfganal.h"
73 #include "cfgbuild.h"
74 #include "cfgcleanup.h"
75 #include "cfgexpand.h"
76 #include "basic-block.h"
77 #include "df.h"
78 #include "params.h"
79 #include "bb-reorder.h"
80 #include "shrink-wrap.h"
81 #include "toplev.h"
82 #include "rtl-iter.h"
83 #include "tree-chkp.h"
84 #include "rtl-chkp.h"
86 /* So we can assign to cfun in this file. */
87 #undef cfun
89 #ifndef STACK_ALIGNMENT_NEEDED
90 #define STACK_ALIGNMENT_NEEDED 1
91 #endif
93 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
95 /* Round a value to the lowest integer less than it that is a multiple of
96 the required alignment. Avoid using division in case the value is
97 negative. Assume the alignment is a power of two. */
98 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
100 /* Similar, but round to the next highest integer that meets the
101 alignment. */
102 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
104 /* Nonzero once virtual register instantiation has been done.
105 assign_stack_local uses frame_pointer_rtx when this is nonzero.
106 calls.c:emit_library_call_value_1 uses it to set up
107 post-instantiation libcalls. */
108 int virtuals_instantiated;
110 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
111 static GTY(()) int funcdef_no;
113 /* These variables hold pointers to functions to create and destroy
114 target specific, per-function data structures. */
115 struct machine_function * (*init_machine_status) (void);
117 /* The currently compiled function. */
118 struct function *cfun = 0;
120 /* These hashes record the prologue and epilogue insns. */
122 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
124 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
125 static bool equal (rtx a, rtx b) { return a == b; }
128 static GTY((cache))
129 hash_table<insn_cache_hasher> *prologue_insn_hash;
130 static GTY((cache))
131 hash_table<insn_cache_hasher> *epilogue_insn_hash;
134 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
135 vec<tree, va_gc> *types_used_by_cur_var_decl;
137 /* Forward declarations. */
139 static struct temp_slot *find_temp_slot_from_address (rtx);
140 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
141 static void pad_below (struct args_size *, machine_mode, tree);
142 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
143 static int all_blocks (tree, tree *);
144 static tree *get_block_vector (tree, int *);
145 extern tree debug_find_var_in_block_tree (tree, tree);
146 /* We always define `record_insns' even if it's not used so that we
147 can always export `prologue_epilogue_contains'. */
148 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
149 ATTRIBUTE_UNUSED;
150 static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
151 static void prepare_function_start (void);
152 static void do_clobber_return_reg (rtx, void *);
153 static void do_use_return_reg (rtx, void *);
154 static rtx rtl_for_parm (struct assign_parm_data_all *, tree);
155 static void maybe_reset_rtl_for_parm (tree);
156 static bool parm_in_unassigned_mem_p (tree, rtx);
159 /* Stack of nested functions. */
160 /* Keep track of the cfun stack. */
162 typedef struct function *function_p;
164 static vec<function_p> function_context_stack;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
169 void
170 push_function_context (void)
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
175 function_context_stack.safe_push (cfun);
176 set_cfun (NULL);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
182 void
183 pop_function_context (void)
185 struct function *p = function_context_stack.pop ();
186 set_cfun (p);
187 current_function_decl = p->decl;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
198 void
199 free_after_parsing (struct function *f)
201 f->language = 0;
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
208 void
209 free_after_compilation (struct function *f)
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
214 free (crtl->emit.regno_pointer_align);
216 memset (crtl, 0, sizeof (struct rtl_data));
217 f->eh = NULL;
218 f->machine = NULL;
219 f->cfg = NULL;
220 f->curr_properties &= ~PROP_cfg;
222 regno_reg_rtx = NULL;
225 /* Return size needed for stack frame based on slots so far allocated.
226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
227 the caller may have to do that. */
229 HOST_WIDE_INT
230 get_frame_size (void)
232 if (FRAME_GROWS_DOWNWARD)
233 return -frame_offset;
234 else
235 return frame_offset;
238 /* Issue an error message and return TRUE if frame OFFSET overflows in
239 the signed target pointer arithmetics for function FUNC. Otherwise
240 return FALSE. */
242 bool
243 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
245 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
247 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
248 /* Leave room for the fixed part of the frame. */
249 - 64 * UNITS_PER_WORD)
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects too large");
253 return TRUE;
256 return FALSE;
259 /* Return stack slot alignment in bits for TYPE and MODE. */
261 static unsigned int
262 get_stack_local_alignment (tree type, machine_mode mode)
264 unsigned int alignment;
266 if (mode == BLKmode)
267 alignment = BIGGEST_ALIGNMENT;
268 else
269 alignment = GET_MODE_ALIGNMENT (mode);
271 /* Allow the frond-end to (possibly) increase the alignment of this
272 stack slot. */
273 if (! type)
274 type = lang_hooks.types.type_for_mode (mode, 0);
276 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
279 /* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
286 static bool
287 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
288 HOST_WIDE_INT size, unsigned int alignment,
289 HOST_WIDE_INT *poffset)
291 HOST_WIDE_INT this_frame_offset;
292 int frame_off, frame_alignment, frame_phase;
294 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0;
300 /* Round the frame offset to the specified alignment. */
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase,
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase);
311 else
312 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase,
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset)
321 frame_offset = this_frame_offset;
322 else if (this_frame_offset < start)
323 return false;
324 else if (start + length == frame_offset
325 && this_frame_offset + size > start + length)
326 frame_offset = this_frame_offset + size;
327 else if (this_frame_offset + size > start + length)
328 return false;
330 *poffset = this_frame_offset;
331 return true;
334 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
338 static void
339 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
341 struct frame_space *space = ggc_alloc<frame_space> ();
342 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space;
344 space->start = start;
345 space->length = end - start;
348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
363 We do not round to stack_boundary here. */
366 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
367 int align, int kind)
369 rtx x, addr;
370 int bigend_correction = 0;
371 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
372 unsigned int alignment, alignment_in_bits;
374 if (align == 0)
376 alignment = get_stack_local_alignment (NULL, mode);
377 alignment /= BITS_PER_UNIT;
379 else if (align == -1)
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment);
384 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
386 else
387 alignment = align / BITS_PER_UNIT;
389 alignment_in_bits = alignment * BITS_PER_UNIT;
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
395 alignment = alignment_in_bits / BITS_PER_UNIT;
398 if (SUPPORTS_STACK_ALIGNMENT)
400 if (crtl->stack_alignment_estimated < alignment_in_bits)
402 if (!crtl->stack_realign_processed)
403 crtl->stack_alignment_estimated = alignment_in_bits;
404 else
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
410 below. */
411 gcc_assert (!crtl->stack_realign_finalized);
412 if (!crtl->stack_realign_needed)
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
418 || size == 0
419 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT;
428 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits;
430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits;
433 if (mode != BLKmode || size != 0)
435 if (kind & ASLK_RECORD_PAD)
437 struct frame_space **psp;
439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
441 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset))
444 continue;
445 *psp = space->next;
446 if (slot_offset > space->start)
447 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length)
449 add_frame_space (slot_offset + size,
450 space->start + space->length);
451 goto found_space;
455 else if (!STACK_ALIGNMENT_NEEDED)
457 slot_offset = frame_offset;
458 goto found_space;
461 old_frame_offset = frame_offset;
463 if (FRAME_GROWS_DOWNWARD)
465 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
468 if (kind & ASLK_RECORD_PAD)
470 if (slot_offset > frame_offset)
471 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset)
473 add_frame_space (slot_offset + size, old_frame_offset);
476 else
478 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
481 if (kind & ASLK_RECORD_PAD)
483 if (slot_offset > old_frame_offset)
484 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset)
486 add_frame_space (slot_offset + size, frame_offset);
490 found_space:
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
494 bigend_correction = size - GET_MODE_SIZE (mode);
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated)
499 addr = plus_constant (Pmode, frame_pointer_rtx,
500 trunc_int_for_mode
501 (slot_offset + bigend_correction
502 + STARTING_FRAME_OFFSET, Pmode));
503 else
504 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
505 trunc_int_for_mode
506 (slot_offset + bigend_correction,
507 Pmode));
509 x = gen_rtx_MEM (mode, addr);
510 set_mem_align (x, alignment_in_bits);
511 MEM_NOTRAP_P (x) = 1;
513 stack_slot_list
514 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
516 if (frame_offset_overflow (frame_offset, current_function_decl))
517 frame_offset = 0;
519 return x;
522 /* Wrap up assign_stack_local_1 with last parameter as false. */
525 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
527 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
530 /* In order to evaluate some expressions, such as function calls returning
531 structures in memory, we need to temporarily allocate stack locations.
532 We record each allocated temporary in the following structure.
534 Associated with each temporary slot is a nesting level. When we pop up
535 one level, all temporaries associated with the previous level are freed.
536 Normally, all temporaries are freed after the execution of the statement
537 in which they were created. However, if we are inside a ({...}) grouping,
538 the result may be in a temporary and hence must be preserved. If the
539 result could be in a temporary, we preserve it if we can determine which
540 one it is in. If we cannot determine which temporary may contain the
541 result, all temporaries are preserved. A temporary is preserved by
542 pretending it was allocated at the previous nesting level. */
544 struct GTY(()) temp_slot {
545 /* Points to next temporary slot. */
546 struct temp_slot *next;
547 /* Points to previous temporary slot. */
548 struct temp_slot *prev;
549 /* The rtx to used to reference the slot. */
550 rtx slot;
551 /* The size, in units, of the slot. */
552 HOST_WIDE_INT size;
553 /* The type of the object in the slot, or zero if it doesn't correspond
554 to a type. We use this to determine whether a slot can be reused.
555 It can be reused if objects of the type of the new slot will always
556 conflict with objects of the type of the old slot. */
557 tree type;
558 /* The alignment (in bits) of the slot. */
559 unsigned int align;
560 /* Nonzero if this temporary is currently in use. */
561 char in_use;
562 /* Nesting level at which this slot is being used. */
563 int level;
564 /* The offset of the slot from the frame_pointer, including extra space
565 for alignment. This info is for combine_temp_slots. */
566 HOST_WIDE_INT base_offset;
567 /* The size of the slot, including extra space for alignment. This
568 info is for combine_temp_slots. */
569 HOST_WIDE_INT full_size;
572 /* Entry for the below hash table. */
573 struct GTY((for_user)) temp_slot_address_entry {
574 hashval_t hash;
575 rtx address;
576 struct temp_slot *temp_slot;
579 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
581 static hashval_t hash (temp_slot_address_entry *);
582 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
585 /* A table of addresses that represent a stack slot. The table is a mapping
586 from address RTXen to a temp slot. */
587 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
588 static size_t n_temp_slots_in_use;
590 /* Removes temporary slot TEMP from LIST. */
592 static void
593 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
595 if (temp->next)
596 temp->next->prev = temp->prev;
597 if (temp->prev)
598 temp->prev->next = temp->next;
599 else
600 *list = temp->next;
602 temp->prev = temp->next = NULL;
605 /* Inserts temporary slot TEMP to LIST. */
607 static void
608 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
610 temp->next = *list;
611 if (*list)
612 (*list)->prev = temp;
613 temp->prev = NULL;
614 *list = temp;
617 /* Returns the list of used temp slots at LEVEL. */
619 static struct temp_slot **
620 temp_slots_at_level (int level)
622 if (level >= (int) vec_safe_length (used_temp_slots))
623 vec_safe_grow_cleared (used_temp_slots, level + 1);
625 return &(*used_temp_slots)[level];
628 /* Returns the maximal temporary slot level. */
630 static int
631 max_slot_level (void)
633 if (!used_temp_slots)
634 return -1;
636 return used_temp_slots->length () - 1;
639 /* Moves temporary slot TEMP to LEVEL. */
641 static void
642 move_slot_to_level (struct temp_slot *temp, int level)
644 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
645 insert_slot_to_list (temp, temp_slots_at_level (level));
646 temp->level = level;
649 /* Make temporary slot TEMP available. */
651 static void
652 make_slot_available (struct temp_slot *temp)
654 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
655 insert_slot_to_list (temp, &avail_temp_slots);
656 temp->in_use = 0;
657 temp->level = -1;
658 n_temp_slots_in_use--;
661 /* Compute the hash value for an address -> temp slot mapping.
662 The value is cached on the mapping entry. */
663 static hashval_t
664 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
666 int do_not_record = 0;
667 return hash_rtx (t->address, GET_MODE (t->address),
668 &do_not_record, NULL, false);
671 /* Return the hash value for an address -> temp slot mapping. */
672 hashval_t
673 temp_address_hasher::hash (temp_slot_address_entry *t)
675 return t->hash;
678 /* Compare two address -> temp slot mapping entries. */
679 bool
680 temp_address_hasher::equal (temp_slot_address_entry *t1,
681 temp_slot_address_entry *t2)
683 return exp_equiv_p (t1->address, t2->address, 0, true);
686 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
687 static void
688 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
690 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
691 t->address = address;
692 t->temp_slot = temp_slot;
693 t->hash = temp_slot_address_compute_hash (t);
694 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
697 /* Remove an address -> temp slot mapping entry if the temp slot is
698 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
700 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
702 const struct temp_slot_address_entry *t = *slot;
703 if (! t->temp_slot->in_use)
704 temp_slot_address_table->clear_slot (slot);
705 return 1;
708 /* Remove all mappings of addresses to unused temp slots. */
709 static void
710 remove_unused_temp_slot_addresses (void)
712 /* Use quicker clearing if there aren't any active temp slots. */
713 if (n_temp_slots_in_use)
714 temp_slot_address_table->traverse
715 <void *, remove_unused_temp_slot_addresses_1> (NULL);
716 else
717 temp_slot_address_table->empty ();
720 /* Find the temp slot corresponding to the object at address X. */
722 static struct temp_slot *
723 find_temp_slot_from_address (rtx x)
725 struct temp_slot *p;
726 struct temp_slot_address_entry tmp, *t;
728 /* First try the easy way:
729 See if X exists in the address -> temp slot mapping. */
730 tmp.address = x;
731 tmp.temp_slot = NULL;
732 tmp.hash = temp_slot_address_compute_hash (&tmp);
733 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
734 if (t)
735 return t->temp_slot;
737 /* If we have a sum involving a register, see if it points to a temp
738 slot. */
739 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
740 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
741 return p;
742 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
743 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
744 return p;
746 /* Last resort: Address is a virtual stack var address. */
747 if (GET_CODE (x) == PLUS
748 && XEXP (x, 0) == virtual_stack_vars_rtx
749 && CONST_INT_P (XEXP (x, 1)))
751 int i;
752 for (i = max_slot_level (); i >= 0; i--)
753 for (p = *temp_slots_at_level (i); p; p = p->next)
755 if (INTVAL (XEXP (x, 1)) >= p->base_offset
756 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
757 return p;
761 return NULL;
764 /* Allocate a temporary stack slot and record it for possible later
765 reuse.
767 MODE is the machine mode to be given to the returned rtx.
769 SIZE is the size in units of the space required. We do no rounding here
770 since assign_stack_local will do any required rounding.
772 TYPE is the type that will be used for the stack slot. */
775 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
776 tree type)
778 unsigned int align;
779 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
780 rtx slot;
782 /* If SIZE is -1 it means that somebody tried to allocate a temporary
783 of a variable size. */
784 gcc_assert (size != -1);
786 align = get_stack_local_alignment (type, mode);
788 /* Try to find an available, already-allocated temporary of the proper
789 mode which meets the size and alignment requirements. Choose the
790 smallest one with the closest alignment.
792 If assign_stack_temp is called outside of the tree->rtl expansion,
793 we cannot reuse the stack slots (that may still refer to
794 VIRTUAL_STACK_VARS_REGNUM). */
795 if (!virtuals_instantiated)
797 for (p = avail_temp_slots; p; p = p->next)
799 if (p->align >= align && p->size >= size
800 && GET_MODE (p->slot) == mode
801 && objects_must_conflict_p (p->type, type)
802 && (best_p == 0 || best_p->size > p->size
803 || (best_p->size == p->size && best_p->align > p->align)))
805 if (p->align == align && p->size == size)
807 selected = p;
808 cut_slot_from_list (selected, &avail_temp_slots);
809 best_p = 0;
810 break;
812 best_p = p;
817 /* Make our best, if any, the one to use. */
818 if (best_p)
820 selected = best_p;
821 cut_slot_from_list (selected, &avail_temp_slots);
823 /* If there are enough aligned bytes left over, make them into a new
824 temp_slot so that the extra bytes don't get wasted. Do this only
825 for BLKmode slots, so that we can be sure of the alignment. */
826 if (GET_MODE (best_p->slot) == BLKmode)
828 int alignment = best_p->align / BITS_PER_UNIT;
829 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
831 if (best_p->size - rounded_size >= alignment)
833 p = ggc_alloc<temp_slot> ();
834 p->in_use = 0;
835 p->size = best_p->size - rounded_size;
836 p->base_offset = best_p->base_offset + rounded_size;
837 p->full_size = best_p->full_size - rounded_size;
838 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
839 p->align = best_p->align;
840 p->type = best_p->type;
841 insert_slot_to_list (p, &avail_temp_slots);
843 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
844 stack_slot_list);
846 best_p->size = rounded_size;
847 best_p->full_size = rounded_size;
852 /* If we still didn't find one, make a new temporary. */
853 if (selected == 0)
855 HOST_WIDE_INT frame_offset_old = frame_offset;
857 p = ggc_alloc<temp_slot> ();
859 /* We are passing an explicit alignment request to assign_stack_local.
860 One side effect of that is assign_stack_local will not round SIZE
861 to ensure the frame offset remains suitably aligned.
863 So for requests which depended on the rounding of SIZE, we go ahead
864 and round it now. We also make sure ALIGNMENT is at least
865 BIGGEST_ALIGNMENT. */
866 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
867 p->slot = assign_stack_local_1 (mode,
868 (mode == BLKmode
869 ? CEIL_ROUND (size,
870 (int) align
871 / BITS_PER_UNIT)
872 : size),
873 align, 0);
875 p->align = align;
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 if (FRAME_GROWS_DOWNWARD)
885 p->size = frame_offset_old - frame_offset;
886 else
887 p->size = size;
889 /* Now define the fields used by combine_temp_slots. */
890 if (FRAME_GROWS_DOWNWARD)
892 p->base_offset = frame_offset;
893 p->full_size = frame_offset_old - frame_offset;
895 else
897 p->base_offset = frame_offset_old;
898 p->full_size = frame_offset - frame_offset_old;
901 selected = p;
904 p = selected;
905 p->in_use = 1;
906 p->type = type;
907 p->level = temp_slot_level;
908 n_temp_slots_in_use++;
910 pp = temp_slots_at_level (p->level);
911 insert_slot_to_list (p, pp);
912 insert_temp_slot_address (XEXP (p->slot, 0), p);
914 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
915 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
916 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
918 /* If we know the alias set for the memory that will be used, use
919 it. If there's no TYPE, then we don't know anything about the
920 alias set for the memory. */
921 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
922 set_mem_align (slot, align);
924 /* If a type is specified, set the relevant flags. */
925 if (type != 0)
926 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
927 MEM_NOTRAP_P (slot) = 1;
929 return slot;
932 /* Allocate a temporary stack slot and record it for possible later
933 reuse. First two arguments are same as in preceding function. */
936 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
938 return assign_stack_temp_for_type (mode, size, NULL_TREE);
941 /* Assign a temporary.
942 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
943 and so that should be used in error messages. In either case, we
944 allocate of the given type.
945 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
946 it is 0 if a register is OK.
947 DONT_PROMOTE is 1 if we should not promote values in register
948 to wider modes. */
951 assign_temp (tree type_or_decl, int memory_required,
952 int dont_promote ATTRIBUTE_UNUSED)
954 tree type, decl;
955 machine_mode mode;
956 #ifdef PROMOTE_MODE
957 int unsignedp;
958 #endif
960 if (DECL_P (type_or_decl))
961 decl = type_or_decl, type = TREE_TYPE (decl);
962 else
963 decl = NULL, type = type_or_decl;
965 mode = TYPE_MODE (type);
966 #ifdef PROMOTE_MODE
967 unsignedp = TYPE_UNSIGNED (type);
968 #endif
970 if (mode == BLKmode || memory_required)
972 HOST_WIDE_INT size = int_size_in_bytes (type);
973 rtx tmp;
975 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
976 problems with allocating the stack space. */
977 if (size == 0)
978 size = 1;
980 /* Unfortunately, we don't yet know how to allocate variable-sized
981 temporaries. However, sometimes we can find a fixed upper limit on
982 the size, so try that instead. */
983 else if (size == -1)
984 size = max_int_size_in_bytes (type);
986 /* The size of the temporary may be too large to fit into an integer. */
987 /* ??? Not sure this should happen except for user silliness, so limit
988 this to things that aren't compiler-generated temporaries. The
989 rest of the time we'll die in assign_stack_temp_for_type. */
990 if (decl && size == -1
991 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
993 error ("size of variable %q+D is too large", decl);
994 size = 1;
997 tmp = assign_stack_temp_for_type (mode, size, type);
998 return tmp;
1001 #ifdef PROMOTE_MODE
1002 if (! dont_promote)
1003 mode = promote_mode (type, mode, &unsignedp);
1004 #endif
1006 return gen_reg_rtx (mode);
1009 /* Combine temporary stack slots which are adjacent on the stack.
1011 This allows for better use of already allocated stack space. This is only
1012 done for BLKmode slots because we can be sure that we won't have alignment
1013 problems in this case. */
1015 static void
1016 combine_temp_slots (void)
1018 struct temp_slot *p, *q, *next, *next_q;
1019 int num_slots;
1021 /* We can't combine slots, because the information about which slot
1022 is in which alias set will be lost. */
1023 if (flag_strict_aliasing)
1024 return;
1026 /* If there are a lot of temp slots, don't do anything unless
1027 high levels of optimization. */
1028 if (! flag_expensive_optimizations)
1029 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1030 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1031 return;
1033 for (p = avail_temp_slots; p; p = next)
1035 int delete_p = 0;
1037 next = p->next;
1039 if (GET_MODE (p->slot) != BLKmode)
1040 continue;
1042 for (q = p->next; q; q = next_q)
1044 int delete_q = 0;
1046 next_q = q->next;
1048 if (GET_MODE (q->slot) != BLKmode)
1049 continue;
1051 if (p->base_offset + p->full_size == q->base_offset)
1053 /* Q comes after P; combine Q into P. */
1054 p->size += q->size;
1055 p->full_size += q->full_size;
1056 delete_q = 1;
1058 else if (q->base_offset + q->full_size == p->base_offset)
1060 /* P comes after Q; combine P into Q. */
1061 q->size += p->size;
1062 q->full_size += p->full_size;
1063 delete_p = 1;
1064 break;
1066 if (delete_q)
1067 cut_slot_from_list (q, &avail_temp_slots);
1070 /* Either delete P or advance past it. */
1071 if (delete_p)
1072 cut_slot_from_list (p, &avail_temp_slots);
1076 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1077 slot that previously was known by OLD_RTX. */
1079 void
1080 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1082 struct temp_slot *p;
1084 if (rtx_equal_p (old_rtx, new_rtx))
1085 return;
1087 p = find_temp_slot_from_address (old_rtx);
1089 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1090 NEW_RTX is a register, see if one operand of the PLUS is a
1091 temporary location. If so, NEW_RTX points into it. Otherwise,
1092 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1093 in common between them. If so, try a recursive call on those
1094 values. */
1095 if (p == 0)
1097 if (GET_CODE (old_rtx) != PLUS)
1098 return;
1100 if (REG_P (new_rtx))
1102 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1103 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1104 return;
1106 else if (GET_CODE (new_rtx) != PLUS)
1107 return;
1109 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1110 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1111 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1112 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1113 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1114 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1115 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1116 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1118 return;
1121 /* Otherwise add an alias for the temp's address. */
1122 insert_temp_slot_address (new_rtx, p);
1125 /* If X could be a reference to a temporary slot, mark that slot as
1126 belonging to the to one level higher than the current level. If X
1127 matched one of our slots, just mark that one. Otherwise, we can't
1128 easily predict which it is, so upgrade all of them.
1130 This is called when an ({...}) construct occurs and a statement
1131 returns a value in memory. */
1133 void
1134 preserve_temp_slots (rtx x)
1136 struct temp_slot *p = 0, *next;
1138 if (x == 0)
1139 return;
1141 /* If X is a register that is being used as a pointer, see if we have
1142 a temporary slot we know it points to. */
1143 if (REG_P (x) && REG_POINTER (x))
1144 p = find_temp_slot_from_address (x);
1146 /* If X is not in memory or is at a constant address, it cannot be in
1147 a temporary slot. */
1148 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1149 return;
1151 /* First see if we can find a match. */
1152 if (p == 0)
1153 p = find_temp_slot_from_address (XEXP (x, 0));
1155 if (p != 0)
1157 if (p->level == temp_slot_level)
1158 move_slot_to_level (p, temp_slot_level - 1);
1159 return;
1162 /* Otherwise, preserve all non-kept slots at this level. */
1163 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1165 next = p->next;
1166 move_slot_to_level (p, temp_slot_level - 1);
1170 /* Free all temporaries used so far. This is normally called at the
1171 end of generating code for a statement. */
1173 void
1174 free_temp_slots (void)
1176 struct temp_slot *p, *next;
1177 bool some_available = false;
1179 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1181 next = p->next;
1182 make_slot_available (p);
1183 some_available = true;
1186 if (some_available)
1188 remove_unused_temp_slot_addresses ();
1189 combine_temp_slots ();
1193 /* Push deeper into the nesting level for stack temporaries. */
1195 void
1196 push_temp_slots (void)
1198 temp_slot_level++;
1201 /* Pop a temporary nesting level. All slots in use in the current level
1202 are freed. */
1204 void
1205 pop_temp_slots (void)
1207 free_temp_slots ();
1208 temp_slot_level--;
1211 /* Initialize temporary slots. */
1213 void
1214 init_temp_slots (void)
1216 /* We have not allocated any temporaries yet. */
1217 avail_temp_slots = 0;
1218 vec_alloc (used_temp_slots, 0);
1219 temp_slot_level = 0;
1220 n_temp_slots_in_use = 0;
1222 /* Set up the table to map addresses to temp slots. */
1223 if (! temp_slot_address_table)
1224 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1225 else
1226 temp_slot_address_table->empty ();
1229 /* Functions and data structures to keep track of the values hard regs
1230 had at the start of the function. */
1232 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1233 and has_hard_reg_initial_val.. */
1234 struct GTY(()) initial_value_pair {
1235 rtx hard_reg;
1236 rtx pseudo;
1238 /* ??? This could be a VEC but there is currently no way to define an
1239 opaque VEC type. This could be worked around by defining struct
1240 initial_value_pair in function.h. */
1241 struct GTY(()) initial_value_struct {
1242 int num_entries;
1243 int max_entries;
1244 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1247 /* If a pseudo represents an initial hard reg (or expression), return
1248 it, else return NULL_RTX. */
1251 get_hard_reg_initial_reg (rtx reg)
1253 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1254 int i;
1256 if (ivs == 0)
1257 return NULL_RTX;
1259 for (i = 0; i < ivs->num_entries; i++)
1260 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1261 return ivs->entries[i].hard_reg;
1263 return NULL_RTX;
1266 /* Make sure that there's a pseudo register of mode MODE that stores the
1267 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1270 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1272 struct initial_value_struct *ivs;
1273 rtx rv;
1275 rv = has_hard_reg_initial_val (mode, regno);
1276 if (rv)
1277 return rv;
1279 ivs = crtl->hard_reg_initial_vals;
1280 if (ivs == 0)
1282 ivs = ggc_alloc<initial_value_struct> ();
1283 ivs->num_entries = 0;
1284 ivs->max_entries = 5;
1285 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1286 crtl->hard_reg_initial_vals = ivs;
1289 if (ivs->num_entries >= ivs->max_entries)
1291 ivs->max_entries += 5;
1292 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1293 ivs->max_entries);
1296 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1297 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1299 return ivs->entries[ivs->num_entries++].pseudo;
1302 /* See if get_hard_reg_initial_val has been used to create a pseudo
1303 for the initial value of hard register REGNO in mode MODE. Return
1304 the associated pseudo if so, otherwise return NULL. */
1307 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1309 struct initial_value_struct *ivs;
1310 int i;
1312 ivs = crtl->hard_reg_initial_vals;
1313 if (ivs != 0)
1314 for (i = 0; i < ivs->num_entries; i++)
1315 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1316 && REGNO (ivs->entries[i].hard_reg) == regno)
1317 return ivs->entries[i].pseudo;
1319 return NULL_RTX;
1322 unsigned int
1323 emit_initial_value_sets (void)
1325 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1326 int i;
1327 rtx_insn *seq;
1329 if (ivs == 0)
1330 return 0;
1332 start_sequence ();
1333 for (i = 0; i < ivs->num_entries; i++)
1334 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1335 seq = get_insns ();
1336 end_sequence ();
1338 emit_insn_at_entry (seq);
1339 return 0;
1342 /* Return the hardreg-pseudoreg initial values pair entry I and
1343 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1344 bool
1345 initial_value_entry (int i, rtx *hreg, rtx *preg)
1347 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1348 if (!ivs || i >= ivs->num_entries)
1349 return false;
1351 *hreg = ivs->entries[i].hard_reg;
1352 *preg = ivs->entries[i].pseudo;
1353 return true;
1356 /* These routines are responsible for converting virtual register references
1357 to the actual hard register references once RTL generation is complete.
1359 The following four variables are used for communication between the
1360 routines. They contain the offsets of the virtual registers from their
1361 respective hard registers. */
1363 static int in_arg_offset;
1364 static int var_offset;
1365 static int dynamic_offset;
1366 static int out_arg_offset;
1367 static int cfa_offset;
1369 /* In most machines, the stack pointer register is equivalent to the bottom
1370 of the stack. */
1372 #ifndef STACK_POINTER_OFFSET
1373 #define STACK_POINTER_OFFSET 0
1374 #endif
1376 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1377 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1378 #endif
1380 /* If not defined, pick an appropriate default for the offset of dynamically
1381 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1382 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1384 #ifndef STACK_DYNAMIC_OFFSET
1386 /* The bottom of the stack points to the actual arguments. If
1387 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1388 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1389 stack space for register parameters is not pushed by the caller, but
1390 rather part of the fixed stack areas and hence not included in
1391 `crtl->outgoing_args_size'. Nevertheless, we must allow
1392 for it when allocating stack dynamic objects. */
1394 #ifdef INCOMING_REG_PARM_STACK_SPACE
1395 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1396 ((ACCUMULATE_OUTGOING_ARGS \
1397 ? (crtl->outgoing_args_size \
1398 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1399 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1400 : 0) + (STACK_POINTER_OFFSET))
1401 #else
1402 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1403 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1404 + (STACK_POINTER_OFFSET))
1405 #endif
1406 #endif
1409 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1410 is a virtual register, return the equivalent hard register and set the
1411 offset indirectly through the pointer. Otherwise, return 0. */
1413 static rtx
1414 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1416 rtx new_rtx;
1417 HOST_WIDE_INT offset;
1419 if (x == virtual_incoming_args_rtx)
1421 if (stack_realign_drap)
1423 /* Replace virtual_incoming_args_rtx with internal arg
1424 pointer if DRAP is used to realign stack. */
1425 new_rtx = crtl->args.internal_arg_pointer;
1426 offset = 0;
1428 else
1429 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1431 else if (x == virtual_stack_vars_rtx)
1432 new_rtx = frame_pointer_rtx, offset = var_offset;
1433 else if (x == virtual_stack_dynamic_rtx)
1434 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1435 else if (x == virtual_outgoing_args_rtx)
1436 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1437 else if (x == virtual_cfa_rtx)
1439 #ifdef FRAME_POINTER_CFA_OFFSET
1440 new_rtx = frame_pointer_rtx;
1441 #else
1442 new_rtx = arg_pointer_rtx;
1443 #endif
1444 offset = cfa_offset;
1446 else if (x == virtual_preferred_stack_boundary_rtx)
1448 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1449 offset = 0;
1451 else
1452 return NULL_RTX;
1454 *poffset = offset;
1455 return new_rtx;
1458 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1459 registers present inside of *LOC. The expression is simplified,
1460 as much as possible, but is not to be considered "valid" in any sense
1461 implied by the target. Return true if any change is made. */
1463 static bool
1464 instantiate_virtual_regs_in_rtx (rtx *loc)
1466 if (!*loc)
1467 return false;
1468 bool changed = false;
1469 subrtx_ptr_iterator::array_type array;
1470 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1472 rtx *loc = *iter;
1473 if (rtx x = *loc)
1475 rtx new_rtx;
1476 HOST_WIDE_INT offset;
1477 switch (GET_CODE (x))
1479 case REG:
1480 new_rtx = instantiate_new_reg (x, &offset);
1481 if (new_rtx)
1483 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1484 changed = true;
1486 iter.skip_subrtxes ();
1487 break;
1489 case PLUS:
1490 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1491 if (new_rtx)
1493 XEXP (x, 0) = new_rtx;
1494 *loc = plus_constant (GET_MODE (x), x, offset, true);
1495 changed = true;
1496 iter.skip_subrtxes ();
1497 break;
1500 /* FIXME -- from old code */
1501 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1502 we can commute the PLUS and SUBREG because pointers into the
1503 frame are well-behaved. */
1504 break;
1506 default:
1507 break;
1511 return changed;
1514 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1515 matches the predicate for insn CODE operand OPERAND. */
1517 static int
1518 safe_insn_predicate (int code, int operand, rtx x)
1520 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1523 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1524 registers present inside of insn. The result will be a valid insn. */
1526 static void
1527 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1529 HOST_WIDE_INT offset;
1530 int insn_code, i;
1531 bool any_change = false;
1532 rtx set, new_rtx, x;
1533 rtx_insn *seq;
1535 /* There are some special cases to be handled first. */
1536 set = single_set (insn);
1537 if (set)
1539 /* We're allowed to assign to a virtual register. This is interpreted
1540 to mean that the underlying register gets assigned the inverse
1541 transformation. This is used, for example, in the handling of
1542 non-local gotos. */
1543 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1544 if (new_rtx)
1546 start_sequence ();
1548 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1549 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1550 gen_int_mode (-offset, GET_MODE (new_rtx)));
1551 x = force_operand (x, new_rtx);
1552 if (x != new_rtx)
1553 emit_move_insn (new_rtx, x);
1555 seq = get_insns ();
1556 end_sequence ();
1558 emit_insn_before (seq, insn);
1559 delete_insn (insn);
1560 return;
1563 /* Handle a straight copy from a virtual register by generating a
1564 new add insn. The difference between this and falling through
1565 to the generic case is avoiding a new pseudo and eliminating a
1566 move insn in the initial rtl stream. */
1567 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1568 if (new_rtx && offset != 0
1569 && REG_P (SET_DEST (set))
1570 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1572 start_sequence ();
1574 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1575 gen_int_mode (offset,
1576 GET_MODE (SET_DEST (set))),
1577 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1578 if (x != SET_DEST (set))
1579 emit_move_insn (SET_DEST (set), x);
1581 seq = get_insns ();
1582 end_sequence ();
1584 emit_insn_before (seq, insn);
1585 delete_insn (insn);
1586 return;
1589 extract_insn (insn);
1590 insn_code = INSN_CODE (insn);
1592 /* Handle a plus involving a virtual register by determining if the
1593 operands remain valid if they're modified in place. */
1594 if (GET_CODE (SET_SRC (set)) == PLUS
1595 && recog_data.n_operands >= 3
1596 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1597 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1598 && CONST_INT_P (recog_data.operand[2])
1599 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1601 offset += INTVAL (recog_data.operand[2]);
1603 /* If the sum is zero, then replace with a plain move. */
1604 if (offset == 0
1605 && REG_P (SET_DEST (set))
1606 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1608 start_sequence ();
1609 emit_move_insn (SET_DEST (set), new_rtx);
1610 seq = get_insns ();
1611 end_sequence ();
1613 emit_insn_before (seq, insn);
1614 delete_insn (insn);
1615 return;
1618 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1620 /* Using validate_change and apply_change_group here leaves
1621 recog_data in an invalid state. Since we know exactly what
1622 we want to check, do those two by hand. */
1623 if (safe_insn_predicate (insn_code, 1, new_rtx)
1624 && safe_insn_predicate (insn_code, 2, x))
1626 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1627 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1628 any_change = true;
1630 /* Fall through into the regular operand fixup loop in
1631 order to take care of operands other than 1 and 2. */
1635 else
1637 extract_insn (insn);
1638 insn_code = INSN_CODE (insn);
1641 /* In the general case, we expect virtual registers to appear only in
1642 operands, and then only as either bare registers or inside memories. */
1643 for (i = 0; i < recog_data.n_operands; ++i)
1645 x = recog_data.operand[i];
1646 switch (GET_CODE (x))
1648 case MEM:
1650 rtx addr = XEXP (x, 0);
1652 if (!instantiate_virtual_regs_in_rtx (&addr))
1653 continue;
1655 start_sequence ();
1656 x = replace_equiv_address (x, addr, true);
1657 /* It may happen that the address with the virtual reg
1658 was valid (e.g. based on the virtual stack reg, which might
1659 be acceptable to the predicates with all offsets), whereas
1660 the address now isn't anymore, for instance when the address
1661 is still offsetted, but the base reg isn't virtual-stack-reg
1662 anymore. Below we would do a force_reg on the whole operand,
1663 but this insn might actually only accept memory. Hence,
1664 before doing that last resort, try to reload the address into
1665 a register, so this operand stays a MEM. */
1666 if (!safe_insn_predicate (insn_code, i, x))
1668 addr = force_reg (GET_MODE (addr), addr);
1669 x = replace_equiv_address (x, addr, true);
1671 seq = get_insns ();
1672 end_sequence ();
1673 if (seq)
1674 emit_insn_before (seq, insn);
1676 break;
1678 case REG:
1679 new_rtx = instantiate_new_reg (x, &offset);
1680 if (new_rtx == NULL)
1681 continue;
1682 if (offset == 0)
1683 x = new_rtx;
1684 else
1686 start_sequence ();
1688 /* Careful, special mode predicates may have stuff in
1689 insn_data[insn_code].operand[i].mode that isn't useful
1690 to us for computing a new value. */
1691 /* ??? Recognize address_operand and/or "p" constraints
1692 to see if (plus new offset) is a valid before we put
1693 this through expand_simple_binop. */
1694 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1695 gen_int_mode (offset, GET_MODE (x)),
1696 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1697 seq = get_insns ();
1698 end_sequence ();
1699 emit_insn_before (seq, insn);
1701 break;
1703 case SUBREG:
1704 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1705 if (new_rtx == NULL)
1706 continue;
1707 if (offset != 0)
1709 start_sequence ();
1710 new_rtx = expand_simple_binop
1711 (GET_MODE (new_rtx), PLUS, new_rtx,
1712 gen_int_mode (offset, GET_MODE (new_rtx)),
1713 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1714 seq = get_insns ();
1715 end_sequence ();
1716 emit_insn_before (seq, insn);
1718 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1719 GET_MODE (new_rtx), SUBREG_BYTE (x));
1720 gcc_assert (x);
1721 break;
1723 default:
1724 continue;
1727 /* At this point, X contains the new value for the operand.
1728 Validate the new value vs the insn predicate. Note that
1729 asm insns will have insn_code -1 here. */
1730 if (!safe_insn_predicate (insn_code, i, x))
1732 start_sequence ();
1733 if (REG_P (x))
1735 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1736 x = copy_to_reg (x);
1738 else
1739 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1740 seq = get_insns ();
1741 end_sequence ();
1742 if (seq)
1743 emit_insn_before (seq, insn);
1746 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1747 any_change = true;
1750 if (any_change)
1752 /* Propagate operand changes into the duplicates. */
1753 for (i = 0; i < recog_data.n_dups; ++i)
1754 *recog_data.dup_loc[i]
1755 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1757 /* Force re-recognition of the instruction for validation. */
1758 INSN_CODE (insn) = -1;
1761 if (asm_noperands (PATTERN (insn)) >= 0)
1763 if (!check_asm_operands (PATTERN (insn)))
1765 error_for_asm (insn, "impossible constraint in %<asm%>");
1766 /* For asm goto, instead of fixing up all the edges
1767 just clear the template and clear input operands
1768 (asm goto doesn't have any output operands). */
1769 if (JUMP_P (insn))
1771 rtx asm_op = extract_asm_operands (PATTERN (insn));
1772 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1773 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1774 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1776 else
1777 delete_insn (insn);
1780 else
1782 if (recog_memoized (insn) < 0)
1783 fatal_insn_not_found (insn);
1787 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1788 do any instantiation required. */
1790 void
1791 instantiate_decl_rtl (rtx x)
1793 rtx addr;
1795 if (x == 0)
1796 return;
1798 /* If this is a CONCAT, recurse for the pieces. */
1799 if (GET_CODE (x) == CONCAT)
1801 instantiate_decl_rtl (XEXP (x, 0));
1802 instantiate_decl_rtl (XEXP (x, 1));
1803 return;
1806 /* If this is not a MEM, no need to do anything. Similarly if the
1807 address is a constant or a register that is not a virtual register. */
1808 if (!MEM_P (x))
1809 return;
1811 addr = XEXP (x, 0);
1812 if (CONSTANT_P (addr)
1813 || (REG_P (addr)
1814 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1815 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1816 return;
1818 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1821 /* Helper for instantiate_decls called via walk_tree: Process all decls
1822 in the given DECL_VALUE_EXPR. */
1824 static tree
1825 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1827 tree t = *tp;
1828 if (! EXPR_P (t))
1830 *walk_subtrees = 0;
1831 if (DECL_P (t))
1833 if (DECL_RTL_SET_P (t))
1834 instantiate_decl_rtl (DECL_RTL (t));
1835 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1836 && DECL_INCOMING_RTL (t))
1837 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1838 if ((TREE_CODE (t) == VAR_DECL
1839 || TREE_CODE (t) == RESULT_DECL)
1840 && DECL_HAS_VALUE_EXPR_P (t))
1842 tree v = DECL_VALUE_EXPR (t);
1843 walk_tree (&v, instantiate_expr, NULL, NULL);
1847 return NULL;
1850 /* Subroutine of instantiate_decls: Process all decls in the given
1851 BLOCK node and all its subblocks. */
1853 static void
1854 instantiate_decls_1 (tree let)
1856 tree t;
1858 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1860 if (DECL_RTL_SET_P (t))
1861 instantiate_decl_rtl (DECL_RTL (t));
1862 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1864 tree v = DECL_VALUE_EXPR (t);
1865 walk_tree (&v, instantiate_expr, NULL, NULL);
1869 /* Process all subblocks. */
1870 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1871 instantiate_decls_1 (t);
1874 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1875 all virtual registers in their DECL_RTL's. */
1877 static void
1878 instantiate_decls (tree fndecl)
1880 tree decl;
1881 unsigned ix;
1883 /* Process all parameters of the function. */
1884 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1886 instantiate_decl_rtl (DECL_RTL (decl));
1887 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1888 if (DECL_HAS_VALUE_EXPR_P (decl))
1890 tree v = DECL_VALUE_EXPR (decl);
1891 walk_tree (&v, instantiate_expr, NULL, NULL);
1895 if ((decl = DECL_RESULT (fndecl))
1896 && TREE_CODE (decl) == RESULT_DECL)
1898 if (DECL_RTL_SET_P (decl))
1899 instantiate_decl_rtl (DECL_RTL (decl));
1900 if (DECL_HAS_VALUE_EXPR_P (decl))
1902 tree v = DECL_VALUE_EXPR (decl);
1903 walk_tree (&v, instantiate_expr, NULL, NULL);
1907 /* Process the saved static chain if it exists. */
1908 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1909 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1910 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1912 /* Now process all variables defined in the function or its subblocks. */
1913 instantiate_decls_1 (DECL_INITIAL (fndecl));
1915 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1916 if (DECL_RTL_SET_P (decl))
1917 instantiate_decl_rtl (DECL_RTL (decl));
1918 vec_free (cfun->local_decls);
1921 /* Pass through the INSNS of function FNDECL and convert virtual register
1922 references to hard register references. */
1924 static unsigned int
1925 instantiate_virtual_regs (void)
1927 rtx_insn *insn;
1929 /* Compute the offsets to use for this function. */
1930 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1931 var_offset = STARTING_FRAME_OFFSET;
1932 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1933 out_arg_offset = STACK_POINTER_OFFSET;
1934 #ifdef FRAME_POINTER_CFA_OFFSET
1935 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1936 #else
1937 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1938 #endif
1940 /* Initialize recognition, indicating that volatile is OK. */
1941 init_recog ();
1943 /* Scan through all the insns, instantiating every virtual register still
1944 present. */
1945 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1946 if (INSN_P (insn))
1948 /* These patterns in the instruction stream can never be recognized.
1949 Fortunately, they shouldn't contain virtual registers either. */
1950 if (GET_CODE (PATTERN (insn)) == USE
1951 || GET_CODE (PATTERN (insn)) == CLOBBER
1952 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1953 continue;
1954 else if (DEBUG_INSN_P (insn))
1955 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1956 else
1957 instantiate_virtual_regs_in_insn (insn);
1959 if (insn->deleted ())
1960 continue;
1962 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1964 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1965 if (CALL_P (insn))
1966 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1969 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1970 instantiate_decls (current_function_decl);
1972 targetm.instantiate_decls ();
1974 /* Indicate that, from now on, assign_stack_local should use
1975 frame_pointer_rtx. */
1976 virtuals_instantiated = 1;
1978 return 0;
1981 namespace {
1983 const pass_data pass_data_instantiate_virtual_regs =
1985 RTL_PASS, /* type */
1986 "vregs", /* name */
1987 OPTGROUP_NONE, /* optinfo_flags */
1988 TV_NONE, /* tv_id */
1989 0, /* properties_required */
1990 0, /* properties_provided */
1991 0, /* properties_destroyed */
1992 0, /* todo_flags_start */
1993 0, /* todo_flags_finish */
1996 class pass_instantiate_virtual_regs : public rtl_opt_pass
1998 public:
1999 pass_instantiate_virtual_regs (gcc::context *ctxt)
2000 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2003 /* opt_pass methods: */
2004 virtual unsigned int execute (function *)
2006 return instantiate_virtual_regs ();
2009 }; // class pass_instantiate_virtual_regs
2011 } // anon namespace
2013 rtl_opt_pass *
2014 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2016 return new pass_instantiate_virtual_regs (ctxt);
2020 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2021 This means a type for which function calls must pass an address to the
2022 function or get an address back from the function.
2023 EXP may be a type node or an expression (whose type is tested). */
2026 aggregate_value_p (const_tree exp, const_tree fntype)
2028 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2029 int i, regno, nregs;
2030 rtx reg;
2032 if (fntype)
2033 switch (TREE_CODE (fntype))
2035 case CALL_EXPR:
2037 tree fndecl = get_callee_fndecl (fntype);
2038 if (fndecl)
2039 fntype = TREE_TYPE (fndecl);
2040 else if (CALL_EXPR_FN (fntype))
2041 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2042 else
2043 /* For internal functions, assume nothing needs to be
2044 returned in memory. */
2045 return 0;
2047 break;
2048 case FUNCTION_DECL:
2049 fntype = TREE_TYPE (fntype);
2050 break;
2051 case FUNCTION_TYPE:
2052 case METHOD_TYPE:
2053 break;
2054 case IDENTIFIER_NODE:
2055 fntype = NULL_TREE;
2056 break;
2057 default:
2058 /* We don't expect other tree types here. */
2059 gcc_unreachable ();
2062 if (VOID_TYPE_P (type))
2063 return 0;
2065 /* If a record should be passed the same as its first (and only) member
2066 don't pass it as an aggregate. */
2067 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2068 return aggregate_value_p (first_field (type), fntype);
2070 /* If the front end has decided that this needs to be passed by
2071 reference, do so. */
2072 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2073 && DECL_BY_REFERENCE (exp))
2074 return 1;
2076 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2077 if (fntype && TREE_ADDRESSABLE (fntype))
2078 return 1;
2080 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2081 and thus can't be returned in registers. */
2082 if (TREE_ADDRESSABLE (type))
2083 return 1;
2085 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2086 return 1;
2088 if (targetm.calls.return_in_memory (type, fntype))
2089 return 1;
2091 /* Make sure we have suitable call-clobbered regs to return
2092 the value in; if not, we must return it in memory. */
2093 reg = hard_function_value (type, 0, fntype, 0);
2095 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2096 it is OK. */
2097 if (!REG_P (reg))
2098 return 0;
2100 regno = REGNO (reg);
2101 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2102 for (i = 0; i < nregs; i++)
2103 if (! call_used_regs[regno + i])
2104 return 1;
2106 return 0;
2109 /* Return true if we should assign DECL a pseudo register; false if it
2110 should live on the local stack. */
2112 bool
2113 use_register_for_decl (const_tree decl)
2115 if (TREE_CODE (decl) == SSA_NAME)
2117 /* We often try to use the SSA_NAME, instead of its underlying
2118 decl, to get type information and guide decisions, to avoid
2119 differences of behavior between anonymous and named
2120 variables, but in this one case we have to go for the actual
2121 variable if there is one. The main reason is that, at least
2122 at -O0, we want to place user variables on the stack, but we
2123 don't mind using pseudos for anonymous or ignored temps.
2124 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2125 should go in pseudos, whereas their corresponding variables
2126 might have to go on the stack. So, disregarding the decl
2127 here would negatively impact debug info at -O0, enable
2128 coalescing between SSA_NAMEs that ought to get different
2129 stack/pseudo assignments, and get the incoming argument
2130 processing thoroughly confused by PARM_DECLs expected to live
2131 in stack slots but assigned to pseudos. */
2132 if (!SSA_NAME_VAR (decl))
2133 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2134 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2136 decl = SSA_NAME_VAR (decl);
2139 /* Honor volatile. */
2140 if (TREE_SIDE_EFFECTS (decl))
2141 return false;
2143 /* Honor addressability. */
2144 if (TREE_ADDRESSABLE (decl))
2145 return false;
2147 /* Decl is implicitly addressible by bound stores and loads
2148 if it is an aggregate holding bounds. */
2149 if (chkp_function_instrumented_p (current_function_decl)
2150 && TREE_TYPE (decl)
2151 && !BOUNDED_P (decl)
2152 && chkp_type_has_pointer (TREE_TYPE (decl)))
2153 return false;
2155 /* Only register-like things go in registers. */
2156 if (DECL_MODE (decl) == BLKmode)
2157 return false;
2159 /* If -ffloat-store specified, don't put explicit float variables
2160 into registers. */
2161 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2162 propagates values across these stores, and it probably shouldn't. */
2163 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2164 return false;
2166 if (!targetm.calls.allocate_stack_slots_for_args ())
2167 return true;
2169 /* If we're not interested in tracking debugging information for
2170 this decl, then we can certainly put it in a register. */
2171 if (DECL_IGNORED_P (decl))
2172 return true;
2174 if (optimize)
2175 return true;
2177 if (!DECL_REGISTER (decl))
2178 return false;
2180 switch (TREE_CODE (TREE_TYPE (decl)))
2182 case RECORD_TYPE:
2183 case UNION_TYPE:
2184 case QUAL_UNION_TYPE:
2185 /* When not optimizing, disregard register keyword for variables with
2186 types containing methods, otherwise the methods won't be callable
2187 from the debugger. */
2188 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
2189 return false;
2190 break;
2191 default:
2192 break;
2195 return true;
2198 /* Structures to communicate between the subroutines of assign_parms.
2199 The first holds data persistent across all parameters, the second
2200 is cleared out for each parameter. */
2202 struct assign_parm_data_all
2204 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2205 should become a job of the target or otherwise encapsulated. */
2206 CUMULATIVE_ARGS args_so_far_v;
2207 cumulative_args_t args_so_far;
2208 struct args_size stack_args_size;
2209 tree function_result_decl;
2210 tree orig_fnargs;
2211 rtx_insn *first_conversion_insn;
2212 rtx_insn *last_conversion_insn;
2213 HOST_WIDE_INT pretend_args_size;
2214 HOST_WIDE_INT extra_pretend_bytes;
2215 int reg_parm_stack_space;
2218 struct assign_parm_data_one
2220 tree nominal_type;
2221 tree passed_type;
2222 rtx entry_parm;
2223 rtx stack_parm;
2224 machine_mode nominal_mode;
2225 machine_mode passed_mode;
2226 machine_mode promoted_mode;
2227 struct locate_and_pad_arg_data locate;
2228 int partial;
2229 BOOL_BITFIELD named_arg : 1;
2230 BOOL_BITFIELD passed_pointer : 1;
2231 BOOL_BITFIELD on_stack : 1;
2232 BOOL_BITFIELD loaded_in_reg : 1;
2235 struct bounds_parm_data
2237 assign_parm_data_one parm_data;
2238 tree bounds_parm;
2239 tree ptr_parm;
2240 rtx ptr_entry;
2241 int bound_no;
2244 /* A subroutine of assign_parms. Initialize ALL. */
2246 static void
2247 assign_parms_initialize_all (struct assign_parm_data_all *all)
2249 tree fntype ATTRIBUTE_UNUSED;
2251 memset (all, 0, sizeof (*all));
2253 fntype = TREE_TYPE (current_function_decl);
2255 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2256 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2257 #else
2258 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2259 current_function_decl, -1);
2260 #endif
2261 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2263 #ifdef INCOMING_REG_PARM_STACK_SPACE
2264 all->reg_parm_stack_space
2265 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2266 #endif
2269 /* If ARGS contains entries with complex types, split the entry into two
2270 entries of the component type. Return a new list of substitutions are
2271 needed, else the old list. */
2273 static void
2274 split_complex_args (struct assign_parm_data_all *all, vec<tree> *args)
2276 unsigned i;
2277 tree p;
2279 FOR_EACH_VEC_ELT (*args, i, p)
2281 tree type = TREE_TYPE (p);
2282 if (TREE_CODE (type) == COMPLEX_TYPE
2283 && targetm.calls.split_complex_arg (type))
2285 tree cparm = p;
2286 tree decl;
2287 tree subtype = TREE_TYPE (type);
2288 bool addressable = TREE_ADDRESSABLE (p);
2290 /* Rewrite the PARM_DECL's type with its component. */
2291 p = copy_node (p);
2292 TREE_TYPE (p) = subtype;
2293 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2294 DECL_MODE (p) = VOIDmode;
2295 DECL_SIZE (p) = NULL;
2296 DECL_SIZE_UNIT (p) = NULL;
2297 /* If this arg must go in memory, put it in a pseudo here.
2298 We can't allow it to go in memory as per normal parms,
2299 because the usual place might not have the imag part
2300 adjacent to the real part. */
2301 DECL_ARTIFICIAL (p) = addressable;
2302 DECL_IGNORED_P (p) = addressable;
2303 TREE_ADDRESSABLE (p) = 0;
2304 /* Reset the RTL before layout_decl, or it may change the
2305 mode of the RTL of the original argument copied to P. */
2306 SET_DECL_RTL (p, NULL_RTX);
2307 layout_decl (p, 0);
2308 (*args)[i] = p;
2310 /* Build a second synthetic decl. */
2311 decl = build_decl (EXPR_LOCATION (p),
2312 PARM_DECL, NULL_TREE, subtype);
2313 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2314 DECL_ARTIFICIAL (decl) = addressable;
2315 DECL_IGNORED_P (decl) = addressable;
2316 layout_decl (decl, 0);
2317 args->safe_insert (++i, decl);
2319 /* If we are expanding a function, rather than gimplifying
2320 it, propagate the RTL of the complex parm to the split
2321 declarations, and set their contexts so that
2322 maybe_reset_rtl_for_parm can recognize them and refrain
2323 from resetting their RTL. */
2324 if (currently_expanding_to_rtl)
2326 maybe_reset_rtl_for_parm (cparm);
2327 rtx rtl = rtl_for_parm (all, cparm);
2328 if (rtl)
2330 /* If this is parm is unassigned, assign it now: the
2331 newly-created decls wouldn't expect the need for
2332 assignment, and if they were assigned
2333 independently, they might not end up in adjacent
2334 slots, so unsplit wouldn't be able to fill in the
2335 unassigned address of the complex MEM. */
2336 if (parm_in_unassigned_mem_p (cparm, rtl))
2338 int align = STACK_SLOT_ALIGNMENT
2339 (TREE_TYPE (cparm), GET_MODE (rtl), MEM_ALIGN (rtl));
2340 rtx loc = assign_stack_local
2341 (GET_MODE (rtl), GET_MODE_SIZE (GET_MODE (rtl)),
2342 align);
2343 XEXP (rtl, 0) = XEXP (loc, 0);
2346 SET_DECL_RTL (p, read_complex_part (rtl, false));
2347 SET_DECL_RTL (decl, read_complex_part (rtl, true));
2349 DECL_CONTEXT (p) = cparm;
2350 DECL_CONTEXT (decl) = cparm;
2357 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2358 the hidden struct return argument, and (abi willing) complex args.
2359 Return the new parameter list. */
2361 static vec<tree>
2362 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2364 tree fndecl = current_function_decl;
2365 tree fntype = TREE_TYPE (fndecl);
2366 vec<tree> fnargs = vNULL;
2367 tree arg;
2369 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2370 fnargs.safe_push (arg);
2372 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2374 /* If struct value address is treated as the first argument, make it so. */
2375 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2376 && ! cfun->returns_pcc_struct
2377 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2379 tree type = build_pointer_type (TREE_TYPE (fntype));
2380 tree decl;
2382 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2383 PARM_DECL, get_identifier (".result_ptr"), type);
2384 DECL_ARG_TYPE (decl) = type;
2385 DECL_ARTIFICIAL (decl) = 1;
2386 DECL_NAMELESS (decl) = 1;
2387 TREE_CONSTANT (decl) = 1;
2389 DECL_CHAIN (decl) = all->orig_fnargs;
2390 all->orig_fnargs = decl;
2391 fnargs.safe_insert (0, decl);
2393 all->function_result_decl = decl;
2395 /* If function is instrumented then bounds of the
2396 passed structure address is the second argument. */
2397 if (chkp_function_instrumented_p (fndecl))
2399 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2400 PARM_DECL, get_identifier (".result_bnd"),
2401 pointer_bounds_type_node);
2402 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2403 DECL_ARTIFICIAL (decl) = 1;
2404 DECL_NAMELESS (decl) = 1;
2405 TREE_CONSTANT (decl) = 1;
2407 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2408 DECL_CHAIN (all->orig_fnargs) = decl;
2409 fnargs.safe_insert (1, decl);
2413 /* If the target wants to split complex arguments into scalars, do so. */
2414 if (targetm.calls.split_complex_arg)
2415 split_complex_args (all, &fnargs);
2417 return fnargs;
2420 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2421 data for the parameter. Incorporate ABI specifics such as pass-by-
2422 reference and type promotion. */
2424 static void
2425 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2426 struct assign_parm_data_one *data)
2428 tree nominal_type, passed_type;
2429 machine_mode nominal_mode, passed_mode, promoted_mode;
2430 int unsignedp;
2432 memset (data, 0, sizeof (*data));
2434 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2435 if (!cfun->stdarg)
2436 data->named_arg = 1; /* No variadic parms. */
2437 else if (DECL_CHAIN (parm))
2438 data->named_arg = 1; /* Not the last non-variadic parm. */
2439 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2440 data->named_arg = 1; /* Only variadic ones are unnamed. */
2441 else
2442 data->named_arg = 0; /* Treat as variadic. */
2444 nominal_type = TREE_TYPE (parm);
2445 passed_type = DECL_ARG_TYPE (parm);
2447 /* Look out for errors propagating this far. Also, if the parameter's
2448 type is void then its value doesn't matter. */
2449 if (TREE_TYPE (parm) == error_mark_node
2450 /* This can happen after weird syntax errors
2451 or if an enum type is defined among the parms. */
2452 || TREE_CODE (parm) != PARM_DECL
2453 || passed_type == NULL
2454 || VOID_TYPE_P (nominal_type))
2456 nominal_type = passed_type = void_type_node;
2457 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2458 goto egress;
2461 /* Find mode of arg as it is passed, and mode of arg as it should be
2462 during execution of this function. */
2463 passed_mode = TYPE_MODE (passed_type);
2464 nominal_mode = TYPE_MODE (nominal_type);
2466 /* If the parm is to be passed as a transparent union or record, use the
2467 type of the first field for the tests below. We have already verified
2468 that the modes are the same. */
2469 if ((TREE_CODE (passed_type) == UNION_TYPE
2470 || TREE_CODE (passed_type) == RECORD_TYPE)
2471 && TYPE_TRANSPARENT_AGGR (passed_type))
2472 passed_type = TREE_TYPE (first_field (passed_type));
2474 /* See if this arg was passed by invisible reference. */
2475 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2476 passed_type, data->named_arg))
2478 passed_type = nominal_type = build_pointer_type (passed_type);
2479 data->passed_pointer = true;
2480 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2483 /* Find mode as it is passed by the ABI. */
2484 unsignedp = TYPE_UNSIGNED (passed_type);
2485 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2486 TREE_TYPE (current_function_decl), 0);
2488 egress:
2489 data->nominal_type = nominal_type;
2490 data->passed_type = passed_type;
2491 data->nominal_mode = nominal_mode;
2492 data->passed_mode = passed_mode;
2493 data->promoted_mode = promoted_mode;
2496 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2498 static void
2499 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2500 struct assign_parm_data_one *data, bool no_rtl)
2502 int varargs_pretend_bytes = 0;
2504 targetm.calls.setup_incoming_varargs (all->args_so_far,
2505 data->promoted_mode,
2506 data->passed_type,
2507 &varargs_pretend_bytes, no_rtl);
2509 /* If the back-end has requested extra stack space, record how much is
2510 needed. Do not change pretend_args_size otherwise since it may be
2511 nonzero from an earlier partial argument. */
2512 if (varargs_pretend_bytes > 0)
2513 all->pretend_args_size = varargs_pretend_bytes;
2516 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2517 the incoming location of the current parameter. */
2519 static void
2520 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2521 struct assign_parm_data_one *data)
2523 HOST_WIDE_INT pretend_bytes = 0;
2524 rtx entry_parm;
2525 bool in_regs;
2527 if (data->promoted_mode == VOIDmode)
2529 data->entry_parm = data->stack_parm = const0_rtx;
2530 return;
2533 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2534 data->promoted_mode,
2535 data->passed_type,
2536 data->named_arg);
2538 if (entry_parm == 0)
2539 data->promoted_mode = data->passed_mode;
2541 /* Determine parm's home in the stack, in case it arrives in the stack
2542 or we should pretend it did. Compute the stack position and rtx where
2543 the argument arrives and its size.
2545 There is one complexity here: If this was a parameter that would
2546 have been passed in registers, but wasn't only because it is
2547 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2548 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2549 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2550 as it was the previous time. */
2551 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2552 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2553 in_regs = true;
2554 #endif
2555 if (!in_regs && !data->named_arg)
2557 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2559 rtx tem;
2560 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2561 data->promoted_mode,
2562 data->passed_type, true);
2563 in_regs = tem != NULL;
2567 /* If this parameter was passed both in registers and in the stack, use
2568 the copy on the stack. */
2569 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2570 data->passed_type))
2571 entry_parm = 0;
2573 if (entry_parm)
2575 int partial;
2577 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2578 data->promoted_mode,
2579 data->passed_type,
2580 data->named_arg);
2581 data->partial = partial;
2583 /* The caller might already have allocated stack space for the
2584 register parameters. */
2585 if (partial != 0 && all->reg_parm_stack_space == 0)
2587 /* Part of this argument is passed in registers and part
2588 is passed on the stack. Ask the prologue code to extend
2589 the stack part so that we can recreate the full value.
2591 PRETEND_BYTES is the size of the registers we need to store.
2592 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2593 stack space that the prologue should allocate.
2595 Internally, gcc assumes that the argument pointer is aligned
2596 to STACK_BOUNDARY bits. This is used both for alignment
2597 optimizations (see init_emit) and to locate arguments that are
2598 aligned to more than PARM_BOUNDARY bits. We must preserve this
2599 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2600 a stack boundary. */
2602 /* We assume at most one partial arg, and it must be the first
2603 argument on the stack. */
2604 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2606 pretend_bytes = partial;
2607 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2609 /* We want to align relative to the actual stack pointer, so
2610 don't include this in the stack size until later. */
2611 all->extra_pretend_bytes = all->pretend_args_size;
2615 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2616 all->reg_parm_stack_space,
2617 entry_parm ? data->partial : 0, current_function_decl,
2618 &all->stack_args_size, &data->locate);
2620 /* Update parm_stack_boundary if this parameter is passed in the
2621 stack. */
2622 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2623 crtl->parm_stack_boundary = data->locate.boundary;
2625 /* Adjust offsets to include the pretend args. */
2626 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2627 data->locate.slot_offset.constant += pretend_bytes;
2628 data->locate.offset.constant += pretend_bytes;
2630 data->entry_parm = entry_parm;
2633 /* A subroutine of assign_parms. If there is actually space on the stack
2634 for this parm, count it in stack_args_size and return true. */
2636 static bool
2637 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2638 struct assign_parm_data_one *data)
2640 /* Bounds are never passed on the stack to keep compatibility
2641 with not instrumented code. */
2642 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2643 return false;
2644 /* Trivially true if we've no incoming register. */
2645 else if (data->entry_parm == NULL)
2647 /* Also true if we're partially in registers and partially not,
2648 since we've arranged to drop the entire argument on the stack. */
2649 else if (data->partial != 0)
2651 /* Also true if the target says that it's passed in both registers
2652 and on the stack. */
2653 else if (GET_CODE (data->entry_parm) == PARALLEL
2654 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2656 /* Also true if the target says that there's stack allocated for
2657 all register parameters. */
2658 else if (all->reg_parm_stack_space > 0)
2660 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2661 else
2662 return false;
2664 all->stack_args_size.constant += data->locate.size.constant;
2665 if (data->locate.size.var)
2666 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2668 return true;
2671 /* A subroutine of assign_parms. Given that this parameter is allocated
2672 stack space by the ABI, find it. */
2674 static void
2675 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2677 rtx offset_rtx, stack_parm;
2678 unsigned int align, boundary;
2680 /* If we're passing this arg using a reg, make its stack home the
2681 aligned stack slot. */
2682 if (data->entry_parm)
2683 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2684 else
2685 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2687 stack_parm = crtl->args.internal_arg_pointer;
2688 if (offset_rtx != const0_rtx)
2689 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2690 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2692 if (!data->passed_pointer)
2694 set_mem_attributes (stack_parm, parm, 1);
2695 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2696 while promoted mode's size is needed. */
2697 if (data->promoted_mode != BLKmode
2698 && data->promoted_mode != DECL_MODE (parm))
2700 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2701 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2703 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2704 data->promoted_mode);
2705 if (offset)
2706 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2711 boundary = data->locate.boundary;
2712 align = BITS_PER_UNIT;
2714 /* If we're padding upward, we know that the alignment of the slot
2715 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2716 intentionally forcing upward padding. Otherwise we have to come
2717 up with a guess at the alignment based on OFFSET_RTX. */
2718 if (data->locate.where_pad != downward || data->entry_parm)
2719 align = boundary;
2720 else if (CONST_INT_P (offset_rtx))
2722 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2723 align = align & -align;
2725 set_mem_align (stack_parm, align);
2727 if (data->entry_parm)
2728 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2730 data->stack_parm = stack_parm;
2733 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2734 always valid and contiguous. */
2736 static void
2737 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2739 rtx entry_parm = data->entry_parm;
2740 rtx stack_parm = data->stack_parm;
2742 /* If this parm was passed part in regs and part in memory, pretend it
2743 arrived entirely in memory by pushing the register-part onto the stack.
2744 In the special case of a DImode or DFmode that is split, we could put
2745 it together in a pseudoreg directly, but for now that's not worth
2746 bothering with. */
2747 if (data->partial != 0)
2749 /* Handle calls that pass values in multiple non-contiguous
2750 locations. The Irix 6 ABI has examples of this. */
2751 if (GET_CODE (entry_parm) == PARALLEL)
2752 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2753 data->passed_type,
2754 int_size_in_bytes (data->passed_type));
2755 else
2757 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2758 move_block_from_reg (REGNO (entry_parm),
2759 validize_mem (copy_rtx (stack_parm)),
2760 data->partial / UNITS_PER_WORD);
2763 entry_parm = stack_parm;
2766 /* If we didn't decide this parm came in a register, by default it came
2767 on the stack. */
2768 else if (entry_parm == NULL)
2769 entry_parm = stack_parm;
2771 /* When an argument is passed in multiple locations, we can't make use
2772 of this information, but we can save some copying if the whole argument
2773 is passed in a single register. */
2774 else if (GET_CODE (entry_parm) == PARALLEL
2775 && data->nominal_mode != BLKmode
2776 && data->passed_mode != BLKmode)
2778 size_t i, len = XVECLEN (entry_parm, 0);
2780 for (i = 0; i < len; i++)
2781 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2782 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2783 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2784 == data->passed_mode)
2785 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2787 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2788 break;
2792 data->entry_parm = entry_parm;
2795 /* A subroutine of assign_parms. Reconstitute any values which were
2796 passed in multiple registers and would fit in a single register. */
2798 static void
2799 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2801 rtx entry_parm = data->entry_parm;
2803 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2804 This can be done with register operations rather than on the
2805 stack, even if we will store the reconstituted parameter on the
2806 stack later. */
2807 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2809 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2810 emit_group_store (parmreg, entry_parm, data->passed_type,
2811 GET_MODE_SIZE (GET_MODE (entry_parm)));
2812 entry_parm = parmreg;
2815 data->entry_parm = entry_parm;
2818 /* Wrapper for use_register_for_decl, that special-cases the
2819 .result_ptr as the function's RESULT_DECL when the RESULT_DECL is
2820 passed by reference. */
2822 static bool
2823 use_register_for_parm_decl (struct assign_parm_data_all *all, tree parm)
2825 if (parm == all->function_result_decl)
2827 tree result = DECL_RESULT (current_function_decl);
2829 if (DECL_BY_REFERENCE (result))
2830 parm = result;
2833 return use_register_for_decl (parm);
2836 /* Wrapper for get_rtl_for_parm_ssa_default_def, that special-cases
2837 the .result_ptr as the function's RESULT_DECL when the RESULT_DECL
2838 is passed by reference. */
2840 static rtx
2841 rtl_for_parm (struct assign_parm_data_all *all, tree parm)
2843 if (parm == all->function_result_decl)
2845 tree result = DECL_RESULT (current_function_decl);
2847 if (!DECL_BY_REFERENCE (result))
2848 return NULL_RTX;
2850 parm = result;
2853 return get_rtl_for_parm_ssa_default_def (parm);
2856 /* Reset the location of PARM_DECLs and RESULT_DECLs that had
2857 SSA_NAMEs in multiple partitions, so that assign_parms will choose
2858 the default def, if it exists, or create new RTL to hold the unused
2859 entry value. If we are coalescing across variables, we want to
2860 reset the location too, because a parm without a default def
2861 (incoming value unused) might be coalesced with one with a default
2862 def, and then assign_parms would copy both incoming values to the
2863 same location, which might cause the wrong value to survive. */
2864 static void
2865 maybe_reset_rtl_for_parm (tree parm)
2867 gcc_assert (TREE_CODE (parm) == PARM_DECL
2868 || TREE_CODE (parm) == RESULT_DECL);
2870 /* This is a split complex parameter, and its context was set to its
2871 original PARM_DECL in split_complex_args so that we could
2872 recognize it here and not reset its RTL. */
2873 if (DECL_CONTEXT (parm) && TREE_CODE (DECL_CONTEXT (parm)) == PARM_DECL)
2875 DECL_CONTEXT (parm) = DECL_CONTEXT (DECL_CONTEXT (parm));
2876 return;
2879 if ((flag_tree_coalesce_vars
2880 || (DECL_RTL_SET_P (parm) && DECL_RTL (parm) == pc_rtx))
2881 && is_gimple_reg (parm))
2882 SET_DECL_RTL (parm, NULL_RTX);
2885 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2886 always valid and properly aligned. */
2888 static void
2889 assign_parm_adjust_stack_rtl (struct assign_parm_data_all *all, tree parm,
2890 struct assign_parm_data_one *data)
2892 rtx stack_parm = data->stack_parm;
2894 /* If out-of-SSA assigned RTL to the parm default def, make sure we
2895 don't use what we might have computed before. */
2896 rtx ssa_assigned = rtl_for_parm (all, parm);
2897 if (ssa_assigned)
2898 stack_parm = NULL;
2900 /* If we can't trust the parm stack slot to be aligned enough for its
2901 ultimate type, don't use that slot after entry. We'll make another
2902 stack slot, if we need one. */
2903 else if (stack_parm
2904 && ((STRICT_ALIGNMENT
2905 && (GET_MODE_ALIGNMENT (data->nominal_mode)
2906 > MEM_ALIGN (stack_parm)))
2907 || (data->nominal_type
2908 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2909 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2910 stack_parm = NULL;
2912 /* If parm was passed in memory, and we need to convert it on entry,
2913 don't store it back in that same slot. */
2914 else if (data->entry_parm == stack_parm
2915 && data->nominal_mode != BLKmode
2916 && data->nominal_mode != data->passed_mode)
2917 stack_parm = NULL;
2919 /* If stack protection is in effect for this function, don't leave any
2920 pointers in their passed stack slots. */
2921 else if (crtl->stack_protect_guard
2922 && (flag_stack_protect == 2
2923 || data->passed_pointer
2924 || POINTER_TYPE_P (data->nominal_type)))
2925 stack_parm = NULL;
2927 data->stack_parm = stack_parm;
2930 /* A subroutine of assign_parms. Return true if the current parameter
2931 should be stored as a BLKmode in the current frame. */
2933 static bool
2934 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2936 if (data->nominal_mode == BLKmode)
2937 return true;
2938 if (GET_MODE (data->entry_parm) == BLKmode)
2939 return true;
2941 #ifdef BLOCK_REG_PADDING
2942 /* Only assign_parm_setup_block knows how to deal with register arguments
2943 that are padded at the least significant end. */
2944 if (REG_P (data->entry_parm)
2945 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2946 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2947 == (BYTES_BIG_ENDIAN ? upward : downward)))
2948 return true;
2949 #endif
2951 return false;
2954 /* Return true if FROM_EXPAND is a MEM with an address to be filled in
2955 by assign_params. This should be the case if, and only if,
2956 parm_in_stack_slot_p holds for the parm DECL that expanded to
2957 FROM_EXPAND, so we check that, too. */
2959 static bool
2960 parm_in_unassigned_mem_p (tree decl, rtx from_expand)
2962 bool result = MEM_P (from_expand) && !XEXP (from_expand, 0);
2964 gcc_assert (result == parm_in_stack_slot_p (decl)
2965 /* Maybe it was already assigned. That's ok, especially
2966 for split complex args. */
2967 || (!result && MEM_P (from_expand)
2968 && (XEXP (from_expand, 0) == virtual_stack_vars_rtx
2969 || (GET_CODE (XEXP (from_expand, 0)) == PLUS
2970 && XEXP (XEXP (from_expand, 0), 0) == virtual_stack_vars_rtx))));
2972 return result;
2975 /* A subroutine of assign_parms. Arrange for the parameter to be
2976 present and valid in DATA->STACK_RTL. */
2978 static void
2979 assign_parm_setup_block (struct assign_parm_data_all *all,
2980 tree parm, struct assign_parm_data_one *data)
2982 rtx entry_parm = data->entry_parm;
2983 rtx stack_parm = data->stack_parm;
2984 HOST_WIDE_INT size;
2985 HOST_WIDE_INT size_stored;
2987 if (GET_CODE (entry_parm) == PARALLEL)
2988 entry_parm = emit_group_move_into_temps (entry_parm);
2990 size = int_size_in_bytes (data->passed_type);
2991 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2993 if (stack_parm == 0)
2995 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2996 rtx from_expand = rtl_for_parm (all, parm);
2997 if (from_expand && !parm_in_unassigned_mem_p (parm, from_expand))
2998 stack_parm = copy_rtx (from_expand);
2999 else
3001 stack_parm = assign_stack_local (BLKmode, size_stored,
3002 DECL_ALIGN (parm));
3003 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
3004 PUT_MODE (stack_parm, GET_MODE (entry_parm));
3005 if (from_expand)
3007 gcc_assert (GET_CODE (stack_parm) == MEM);
3008 gcc_assert (parm_in_unassigned_mem_p (parm, from_expand));
3009 XEXP (from_expand, 0) = XEXP (stack_parm, 0);
3010 PUT_MODE (from_expand, GET_MODE (stack_parm));
3011 stack_parm = copy_rtx (from_expand);
3013 else
3014 set_mem_attributes (stack_parm, parm, 1);
3018 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
3019 calls that pass values in multiple non-contiguous locations. */
3020 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
3022 rtx mem;
3024 /* Note that we will be storing an integral number of words.
3025 So we have to be careful to ensure that we allocate an
3026 integral number of words. We do this above when we call
3027 assign_stack_local if space was not allocated in the argument
3028 list. If it was, this will not work if PARM_BOUNDARY is not
3029 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3030 if it becomes a problem. Exception is when BLKmode arrives
3031 with arguments not conforming to word_mode. */
3033 if (data->stack_parm == 0)
3035 else if (GET_CODE (entry_parm) == PARALLEL)
3037 else
3038 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
3040 mem = validize_mem (copy_rtx (stack_parm));
3042 /* Handle values in multiple non-contiguous locations. */
3043 if (GET_CODE (entry_parm) == PARALLEL)
3045 push_to_sequence2 (all->first_conversion_insn,
3046 all->last_conversion_insn);
3047 emit_group_store (mem, entry_parm, data->passed_type, size);
3048 all->first_conversion_insn = get_insns ();
3049 all->last_conversion_insn = get_last_insn ();
3050 end_sequence ();
3053 else if (size == 0)
3056 /* MEM may be a REG if coalescing assigns the param's partition
3057 to a pseudo. */
3058 else if (REG_P (mem))
3059 emit_move_insn (mem, entry_parm);
3061 /* If SIZE is that of a mode no bigger than a word, just use
3062 that mode's store operation. */
3063 else if (size <= UNITS_PER_WORD)
3065 machine_mode mode
3066 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
3068 if (mode != BLKmode
3069 #ifdef BLOCK_REG_PADDING
3070 && (size == UNITS_PER_WORD
3071 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3072 != (BYTES_BIG_ENDIAN ? upward : downward)))
3073 #endif
3076 rtx reg;
3078 /* We are really truncating a word_mode value containing
3079 SIZE bytes into a value of mode MODE. If such an
3080 operation requires no actual instructions, we can refer
3081 to the value directly in mode MODE, otherwise we must
3082 start with the register in word_mode and explicitly
3083 convert it. */
3084 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
3085 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3086 else
3088 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3089 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3091 emit_move_insn (change_address (mem, mode, 0), reg);
3094 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3095 machine must be aligned to the left before storing
3096 to memory. Note that the previous test doesn't
3097 handle all cases (e.g. SIZE == 3). */
3098 else if (size != UNITS_PER_WORD
3099 #ifdef BLOCK_REG_PADDING
3100 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3101 == downward)
3102 #else
3103 && BYTES_BIG_ENDIAN
3104 #endif
3107 rtx tem, x;
3108 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3109 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3111 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3112 tem = change_address (mem, word_mode, 0);
3113 emit_move_insn (tem, x);
3115 else
3116 move_block_from_reg (REGNO (entry_parm), mem,
3117 size_stored / UNITS_PER_WORD);
3119 else
3120 move_block_from_reg (REGNO (entry_parm), mem,
3121 size_stored / UNITS_PER_WORD);
3123 else if (data->stack_parm == 0)
3125 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3126 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3127 BLOCK_OP_NORMAL);
3128 all->first_conversion_insn = get_insns ();
3129 all->last_conversion_insn = get_last_insn ();
3130 end_sequence ();
3133 data->stack_parm = stack_parm;
3134 SET_DECL_RTL (parm, stack_parm);
3137 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3138 parameter. Get it there. Perform all ABI specified conversions. */
3140 static void
3141 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3142 struct assign_parm_data_one *data)
3144 rtx parmreg, validated_mem;
3145 rtx equiv_stack_parm;
3146 machine_mode promoted_nominal_mode;
3147 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3148 bool did_conversion = false;
3149 bool need_conversion, moved;
3151 /* Store the parm in a pseudoregister during the function, but we may
3152 need to do it in a wider mode. Using 2 here makes the result
3153 consistent with promote_decl_mode and thus expand_expr_real_1. */
3154 promoted_nominal_mode
3155 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3156 TREE_TYPE (current_function_decl), 2);
3158 rtx from_expand = parmreg = rtl_for_parm (all, parm);
3160 if (from_expand && !data->passed_pointer)
3162 if (GET_MODE (parmreg) != promoted_nominal_mode)
3163 parmreg = gen_lowpart (promoted_nominal_mode, parmreg);
3165 else if (!from_expand || parm_in_unassigned_mem_p (parm, from_expand))
3167 parmreg = gen_reg_rtx (promoted_nominal_mode);
3168 if (!DECL_ARTIFICIAL (parm))
3169 mark_user_reg (parmreg);
3171 if (from_expand)
3173 gcc_assert (data->passed_pointer);
3174 gcc_assert (GET_CODE (from_expand) == MEM
3175 && XEXP (from_expand, 0) == NULL_RTX);
3176 XEXP (from_expand, 0) = parmreg;
3180 /* If this was an item that we received a pointer to,
3181 set DECL_RTL appropriately. */
3182 if (from_expand)
3183 SET_DECL_RTL (parm, from_expand);
3184 else if (data->passed_pointer)
3186 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3187 set_mem_attributes (x, parm, 1);
3188 SET_DECL_RTL (parm, x);
3190 else
3191 SET_DECL_RTL (parm, parmreg);
3193 assign_parm_remove_parallels (data);
3195 /* Copy the value into the register, thus bridging between
3196 assign_parm_find_data_types and expand_expr_real_1. */
3198 equiv_stack_parm = data->stack_parm;
3199 if (!equiv_stack_parm)
3200 equiv_stack_parm = data->entry_parm;
3201 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3203 need_conversion = (data->nominal_mode != data->passed_mode
3204 || promoted_nominal_mode != data->promoted_mode);
3205 gcc_assert (!(need_conversion && data->passed_pointer && from_expand));
3206 moved = false;
3208 if (need_conversion
3209 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3210 && data->nominal_mode == data->passed_mode
3211 && data->nominal_mode == GET_MODE (data->entry_parm))
3213 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3214 mode, by the caller. We now have to convert it to
3215 NOMINAL_MODE, if different. However, PARMREG may be in
3216 a different mode than NOMINAL_MODE if it is being stored
3217 promoted.
3219 If ENTRY_PARM is a hard register, it might be in a register
3220 not valid for operating in its mode (e.g., an odd-numbered
3221 register for a DFmode). In that case, moves are the only
3222 thing valid, so we can't do a convert from there. This
3223 occurs when the calling sequence allow such misaligned
3224 usages.
3226 In addition, the conversion may involve a call, which could
3227 clobber parameters which haven't been copied to pseudo
3228 registers yet.
3230 First, we try to emit an insn which performs the necessary
3231 conversion. We verify that this insn does not clobber any
3232 hard registers. */
3234 enum insn_code icode;
3235 rtx op0, op1;
3237 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3238 unsignedp);
3240 op0 = parmreg;
3241 op1 = validated_mem;
3242 if (icode != CODE_FOR_nothing
3243 && insn_operand_matches (icode, 0, op0)
3244 && insn_operand_matches (icode, 1, op1))
3246 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3247 rtx_insn *insn, *insns;
3248 rtx t = op1;
3249 HARD_REG_SET hardregs;
3251 start_sequence ();
3252 /* If op1 is a hard register that is likely spilled, first
3253 force it into a pseudo, otherwise combiner might extend
3254 its lifetime too much. */
3255 if (GET_CODE (t) == SUBREG)
3256 t = SUBREG_REG (t);
3257 if (REG_P (t)
3258 && HARD_REGISTER_P (t)
3259 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3260 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3262 t = gen_reg_rtx (GET_MODE (op1));
3263 emit_move_insn (t, op1);
3265 else
3266 t = op1;
3267 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3268 data->passed_mode, unsignedp);
3269 emit_insn (pat);
3270 insns = get_insns ();
3272 moved = true;
3273 CLEAR_HARD_REG_SET (hardregs);
3274 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3276 if (INSN_P (insn))
3277 note_stores (PATTERN (insn), record_hard_reg_sets,
3278 &hardregs);
3279 if (!hard_reg_set_empty_p (hardregs))
3280 moved = false;
3283 end_sequence ();
3285 if (moved)
3287 emit_insn (insns);
3288 if (equiv_stack_parm != NULL_RTX)
3289 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3290 equiv_stack_parm);
3295 if (moved)
3296 /* Nothing to do. */
3298 else if (need_conversion)
3300 /* We did not have an insn to convert directly, or the sequence
3301 generated appeared unsafe. We must first copy the parm to a
3302 pseudo reg, and save the conversion until after all
3303 parameters have been moved. */
3305 int save_tree_used;
3306 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3308 emit_move_insn (tempreg, validated_mem);
3310 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3311 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3313 if (GET_CODE (tempreg) == SUBREG
3314 && GET_MODE (tempreg) == data->nominal_mode
3315 && REG_P (SUBREG_REG (tempreg))
3316 && data->nominal_mode == data->passed_mode
3317 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3318 && GET_MODE_SIZE (GET_MODE (tempreg))
3319 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3321 /* The argument is already sign/zero extended, so note it
3322 into the subreg. */
3323 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3324 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3327 /* TREE_USED gets set erroneously during expand_assignment. */
3328 save_tree_used = TREE_USED (parm);
3329 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3330 TREE_USED (parm) = save_tree_used;
3331 all->first_conversion_insn = get_insns ();
3332 all->last_conversion_insn = get_last_insn ();
3333 end_sequence ();
3335 did_conversion = true;
3337 /* We don't want to copy the incoming pointer to a parmreg expected
3338 to hold the value rather than the pointer. */
3339 else if (!data->passed_pointer || parmreg != from_expand)
3340 emit_move_insn (parmreg, validated_mem);
3342 /* If we were passed a pointer but the actual value can safely live
3343 in a register, retrieve it and use it directly. */
3344 if (data->passed_pointer
3345 && (from_expand || TYPE_MODE (TREE_TYPE (parm)) != BLKmode))
3347 rtx src = DECL_RTL (parm);
3349 /* We can't use nominal_mode, because it will have been set to
3350 Pmode above. We must use the actual mode of the parm. */
3351 if (from_expand)
3353 parmreg = from_expand;
3354 gcc_assert (GET_MODE (parmreg) == TYPE_MODE (TREE_TYPE (parm)));
3355 src = gen_rtx_MEM (GET_MODE (parmreg), validated_mem);
3356 set_mem_attributes (src, parm, 1);
3358 else if (use_register_for_decl (parm))
3360 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3361 mark_user_reg (parmreg);
3363 else
3365 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3366 TYPE_MODE (TREE_TYPE (parm)),
3367 TYPE_ALIGN (TREE_TYPE (parm)));
3368 parmreg
3369 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3370 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3371 align);
3372 set_mem_attributes (parmreg, parm, 1);
3375 if (GET_MODE (parmreg) != GET_MODE (src))
3377 rtx tempreg = gen_reg_rtx (GET_MODE (src));
3378 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3380 push_to_sequence2 (all->first_conversion_insn,
3381 all->last_conversion_insn);
3382 emit_move_insn (tempreg, src);
3383 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3384 emit_move_insn (parmreg, tempreg);
3385 all->first_conversion_insn = get_insns ();
3386 all->last_conversion_insn = get_last_insn ();
3387 end_sequence ();
3389 did_conversion = true;
3391 else if (GET_MODE (parmreg) == BLKmode)
3392 gcc_assert (parm_in_stack_slot_p (parm));
3393 else
3394 emit_move_insn (parmreg, src);
3396 SET_DECL_RTL (parm, parmreg);
3398 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3399 now the parm. */
3400 data->stack_parm = equiv_stack_parm = NULL;
3403 /* Mark the register as eliminable if we did no conversion and it was
3404 copied from memory at a fixed offset, and the arg pointer was not
3405 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3406 offset formed an invalid address, such memory-equivalences as we
3407 make here would screw up life analysis for it. */
3408 if (data->nominal_mode == data->passed_mode
3409 && !did_conversion
3410 && equiv_stack_parm != 0
3411 && MEM_P (equiv_stack_parm)
3412 && data->locate.offset.var == 0
3413 && reg_mentioned_p (virtual_incoming_args_rtx,
3414 XEXP (equiv_stack_parm, 0)))
3416 rtx_insn *linsn = get_last_insn ();
3417 rtx_insn *sinsn;
3418 rtx set;
3420 /* Mark complex types separately. */
3421 if (GET_CODE (parmreg) == CONCAT)
3423 machine_mode submode
3424 = GET_MODE_INNER (GET_MODE (parmreg));
3425 int regnor = REGNO (XEXP (parmreg, 0));
3426 int regnoi = REGNO (XEXP (parmreg, 1));
3427 rtx stackr = adjust_address_nv (equiv_stack_parm, submode, 0);
3428 rtx stacki = adjust_address_nv (equiv_stack_parm, submode,
3429 GET_MODE_SIZE (submode));
3431 /* Scan backwards for the set of the real and
3432 imaginary parts. */
3433 for (sinsn = linsn; sinsn != 0;
3434 sinsn = prev_nonnote_insn (sinsn))
3436 set = single_set (sinsn);
3437 if (set == 0)
3438 continue;
3440 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3441 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3442 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3443 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3446 else
3447 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3450 /* For pointer data type, suggest pointer register. */
3451 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3452 mark_reg_pointer (parmreg,
3453 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3456 /* A subroutine of assign_parms. Allocate stack space to hold the current
3457 parameter. Get it there. Perform all ABI specified conversions. */
3459 static void
3460 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3461 struct assign_parm_data_one *data)
3463 /* Value must be stored in the stack slot STACK_PARM during function
3464 execution. */
3465 bool to_conversion = false;
3467 assign_parm_remove_parallels (data);
3469 if (data->promoted_mode != data->nominal_mode)
3471 /* Conversion is required. */
3472 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3474 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3476 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3477 to_conversion = true;
3479 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3480 TYPE_UNSIGNED (TREE_TYPE (parm)));
3482 if (data->stack_parm)
3484 int offset = subreg_lowpart_offset (data->nominal_mode,
3485 GET_MODE (data->stack_parm));
3486 /* ??? This may need a big-endian conversion on sparc64. */
3487 data->stack_parm
3488 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3489 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3490 set_mem_offset (data->stack_parm,
3491 MEM_OFFSET (data->stack_parm) + offset);
3495 if (data->entry_parm != data->stack_parm)
3497 rtx src, dest;
3498 rtx from_expand = NULL_RTX;
3500 if (data->stack_parm == 0)
3502 from_expand = rtl_for_parm (all, parm);
3503 if (from_expand)
3504 gcc_assert (GET_MODE (from_expand) == GET_MODE (data->entry_parm));
3505 if (from_expand && !parm_in_unassigned_mem_p (parm, from_expand))
3506 data->stack_parm = from_expand;
3509 if (data->stack_parm == 0)
3511 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3512 GET_MODE (data->entry_parm),
3513 TYPE_ALIGN (data->passed_type));
3514 data->stack_parm
3515 = assign_stack_local (GET_MODE (data->entry_parm),
3516 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3517 align);
3518 if (!from_expand)
3519 set_mem_attributes (data->stack_parm, parm, 1);
3520 else
3522 gcc_assert (GET_CODE (data->stack_parm) == MEM);
3523 gcc_assert (parm_in_unassigned_mem_p (parm, from_expand));
3524 XEXP (from_expand, 0) = XEXP (data->stack_parm, 0);
3525 PUT_MODE (from_expand, GET_MODE (data->stack_parm));
3526 data->stack_parm = copy_rtx (from_expand);
3530 dest = validize_mem (copy_rtx (data->stack_parm));
3531 src = validize_mem (copy_rtx (data->entry_parm));
3533 if (MEM_P (src))
3535 /* Use a block move to handle potentially misaligned entry_parm. */
3536 if (!to_conversion)
3537 push_to_sequence2 (all->first_conversion_insn,
3538 all->last_conversion_insn);
3539 to_conversion = true;
3541 emit_block_move (dest, src,
3542 GEN_INT (int_size_in_bytes (data->passed_type)),
3543 BLOCK_OP_NORMAL);
3545 else
3546 emit_move_insn (dest, src);
3549 if (to_conversion)
3551 all->first_conversion_insn = get_insns ();
3552 all->last_conversion_insn = get_last_insn ();
3553 end_sequence ();
3556 SET_DECL_RTL (parm, data->stack_parm);
3559 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3560 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3562 static void
3563 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3564 vec<tree> fnargs)
3566 tree parm;
3567 tree orig_fnargs = all->orig_fnargs;
3568 unsigned i = 0;
3570 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3572 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3573 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3575 rtx tmp, real, imag;
3576 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3578 real = DECL_RTL (fnargs[i]);
3579 imag = DECL_RTL (fnargs[i + 1]);
3580 if (inner != GET_MODE (real))
3582 real = simplify_gen_subreg (inner, real, GET_MODE (real),
3583 subreg_lowpart_offset
3584 (inner, GET_MODE (real)));
3585 imag = simplify_gen_subreg (inner, imag, GET_MODE (imag),
3586 subreg_lowpart_offset
3587 (inner, GET_MODE (imag)));
3590 if ((tmp = rtl_for_parm (all, parm)) != NULL_RTX
3591 && rtx_equal_p (real,
3592 read_complex_part (tmp, false))
3593 && rtx_equal_p (imag,
3594 read_complex_part (tmp, true)))
3595 ; /* We now have the right rtl in tmp. */
3596 else if (TREE_ADDRESSABLE (parm))
3598 rtx rmem, imem;
3599 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3600 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3601 DECL_MODE (parm),
3602 TYPE_ALIGN (TREE_TYPE (parm)));
3604 /* split_complex_arg put the real and imag parts in
3605 pseudos. Move them to memory. */
3606 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3607 set_mem_attributes (tmp, parm, 1);
3608 rmem = adjust_address_nv (tmp, inner, 0);
3609 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3610 push_to_sequence2 (all->first_conversion_insn,
3611 all->last_conversion_insn);
3612 emit_move_insn (rmem, real);
3613 emit_move_insn (imem, imag);
3614 all->first_conversion_insn = get_insns ();
3615 all->last_conversion_insn = get_last_insn ();
3616 end_sequence ();
3618 else
3619 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3620 SET_DECL_RTL (parm, tmp);
3622 real = DECL_INCOMING_RTL (fnargs[i]);
3623 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3624 if (inner != GET_MODE (real))
3626 real = gen_lowpart_SUBREG (inner, real);
3627 imag = gen_lowpart_SUBREG (inner, imag);
3629 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3630 set_decl_incoming_rtl (parm, tmp, false);
3631 i++;
3636 /* Load bounds of PARM from bounds table. */
3637 static void
3638 assign_parm_load_bounds (struct assign_parm_data_one *data,
3639 tree parm,
3640 rtx entry,
3641 unsigned bound_no)
3643 bitmap_iterator bi;
3644 unsigned i, offs = 0;
3645 int bnd_no = -1;
3646 rtx slot = NULL, ptr = NULL;
3648 if (parm)
3650 bitmap slots;
3651 bitmap_obstack_initialize (NULL);
3652 slots = BITMAP_ALLOC (NULL);
3653 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3654 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3656 if (bound_no)
3657 bound_no--;
3658 else
3660 bnd_no = i;
3661 break;
3664 BITMAP_FREE (slots);
3665 bitmap_obstack_release (NULL);
3668 /* We may have bounds not associated with any pointer. */
3669 if (bnd_no != -1)
3670 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3672 /* Find associated pointer. */
3673 if (bnd_no == -1)
3675 /* If bounds are not associated with any bounds,
3676 then it is passed in a register or special slot. */
3677 gcc_assert (data->entry_parm);
3678 ptr = const0_rtx;
3680 else if (MEM_P (entry))
3681 slot = adjust_address (entry, Pmode, offs);
3682 else if (REG_P (entry))
3683 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3684 else if (GET_CODE (entry) == PARALLEL)
3685 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3686 else
3687 gcc_unreachable ();
3688 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3689 data->entry_parm);
3692 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3694 static void
3695 assign_bounds (vec<bounds_parm_data> &bndargs,
3696 struct assign_parm_data_all &all,
3697 bool assign_regs, bool assign_special,
3698 bool assign_bt)
3700 unsigned i, pass;
3701 bounds_parm_data *pbdata;
3703 if (!bndargs.exists ())
3704 return;
3706 /* We make few passes to store input bounds. Firstly handle bounds
3707 passed in registers. After that we load bounds passed in special
3708 slots. Finally we load bounds from Bounds Table. */
3709 for (pass = 0; pass < 3; pass++)
3710 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3712 /* Pass 0 => regs only. */
3713 if (pass == 0
3714 && (!assign_regs
3715 ||(!pbdata->parm_data.entry_parm
3716 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3717 continue;
3718 /* Pass 1 => slots only. */
3719 else if (pass == 1
3720 && (!assign_special
3721 || (!pbdata->parm_data.entry_parm
3722 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3723 continue;
3724 /* Pass 2 => BT only. */
3725 else if (pass == 2
3726 && (!assign_bt
3727 || pbdata->parm_data.entry_parm))
3728 continue;
3730 if (!pbdata->parm_data.entry_parm
3731 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3732 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3733 pbdata->ptr_entry, pbdata->bound_no);
3735 set_decl_incoming_rtl (pbdata->bounds_parm,
3736 pbdata->parm_data.entry_parm, false);
3738 if (assign_parm_setup_block_p (&pbdata->parm_data))
3739 assign_parm_setup_block (&all, pbdata->bounds_parm,
3740 &pbdata->parm_data);
3741 else if (pbdata->parm_data.passed_pointer
3742 || use_register_for_parm_decl (&all, pbdata->bounds_parm))
3743 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3744 &pbdata->parm_data);
3745 else
3746 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3747 &pbdata->parm_data);
3751 /* Assign RTL expressions to the function's parameters. This may involve
3752 copying them into registers and using those registers as the DECL_RTL. */
3754 static void
3755 assign_parms (tree fndecl)
3757 struct assign_parm_data_all all;
3758 tree parm;
3759 vec<tree> fnargs;
3760 unsigned i, bound_no = 0;
3761 tree last_arg = NULL;
3762 rtx last_arg_entry = NULL;
3763 vec<bounds_parm_data> bndargs = vNULL;
3764 bounds_parm_data bdata;
3766 crtl->args.internal_arg_pointer
3767 = targetm.calls.internal_arg_pointer ();
3769 assign_parms_initialize_all (&all);
3770 fnargs = assign_parms_augmented_arg_list (&all);
3772 FOR_EACH_VEC_ELT (fnargs, i, parm)
3774 struct assign_parm_data_one data;
3776 /* Extract the type of PARM; adjust it according to ABI. */
3777 assign_parm_find_data_types (&all, parm, &data);
3779 /* Early out for errors and void parameters. */
3780 if (data.passed_mode == VOIDmode)
3782 SET_DECL_RTL (parm, const0_rtx);
3783 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3784 continue;
3786 else
3787 maybe_reset_rtl_for_parm (parm);
3789 /* Estimate stack alignment from parameter alignment. */
3790 if (SUPPORTS_STACK_ALIGNMENT)
3792 unsigned int align
3793 = targetm.calls.function_arg_boundary (data.promoted_mode,
3794 data.passed_type);
3795 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3796 align);
3797 if (TYPE_ALIGN (data.nominal_type) > align)
3798 align = MINIMUM_ALIGNMENT (data.nominal_type,
3799 TYPE_MODE (data.nominal_type),
3800 TYPE_ALIGN (data.nominal_type));
3801 if (crtl->stack_alignment_estimated < align)
3803 gcc_assert (!crtl->stack_realign_processed);
3804 crtl->stack_alignment_estimated = align;
3808 /* Find out where the parameter arrives in this function. */
3809 assign_parm_find_entry_rtl (&all, &data);
3811 /* Find out where stack space for this parameter might be. */
3812 if (assign_parm_is_stack_parm (&all, &data))
3814 assign_parm_find_stack_rtl (parm, &data);
3815 assign_parm_adjust_entry_rtl (&data);
3817 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3819 /* Remember where last non bounds arg was passed in case
3820 we have to load associated bounds for it from Bounds
3821 Table. */
3822 last_arg = parm;
3823 last_arg_entry = data.entry_parm;
3824 bound_no = 0;
3826 /* Record permanently how this parm was passed. */
3827 if (data.passed_pointer)
3829 rtx incoming_rtl
3830 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3831 data.entry_parm);
3832 set_decl_incoming_rtl (parm, incoming_rtl, true);
3834 else
3835 set_decl_incoming_rtl (parm, data.entry_parm, false);
3837 assign_parm_adjust_stack_rtl (&all, parm, &data);
3839 /* Bounds should be loaded in the particular order to
3840 have registers allocated correctly. Collect info about
3841 input bounds and load them later. */
3842 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3844 /* Expect bounds in instrumented functions only. */
3845 gcc_assert (chkp_function_instrumented_p (fndecl));
3847 bdata.parm_data = data;
3848 bdata.bounds_parm = parm;
3849 bdata.ptr_parm = last_arg;
3850 bdata.ptr_entry = last_arg_entry;
3851 bdata.bound_no = bound_no;
3852 bndargs.safe_push (bdata);
3854 else
3856 if (assign_parm_setup_block_p (&data))
3857 assign_parm_setup_block (&all, parm, &data);
3858 else if (data.passed_pointer
3859 || use_register_for_parm_decl (&all, parm))
3860 assign_parm_setup_reg (&all, parm, &data);
3861 else
3862 assign_parm_setup_stack (&all, parm, &data);
3865 if (cfun->stdarg && !DECL_CHAIN (parm))
3867 int pretend_bytes = 0;
3869 assign_parms_setup_varargs (&all, &data, false);
3871 if (chkp_function_instrumented_p (fndecl))
3873 /* We expect this is the last parm. Otherwise it is wrong
3874 to assign bounds right now. */
3875 gcc_assert (i == (fnargs.length () - 1));
3876 assign_bounds (bndargs, all, true, false, false);
3877 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3878 data.promoted_mode,
3879 data.passed_type,
3880 &pretend_bytes,
3881 false);
3882 assign_bounds (bndargs, all, false, true, true);
3883 bndargs.release ();
3887 /* Update info on where next arg arrives in registers. */
3888 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3889 data.passed_type, data.named_arg);
3891 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3892 bound_no++;
3895 assign_bounds (bndargs, all, true, true, true);
3896 bndargs.release ();
3898 if (targetm.calls.split_complex_arg)
3899 assign_parms_unsplit_complex (&all, fnargs);
3901 fnargs.release ();
3903 /* Output all parameter conversion instructions (possibly including calls)
3904 now that all parameters have been copied out of hard registers. */
3905 emit_insn (all.first_conversion_insn);
3907 /* Estimate reload stack alignment from scalar return mode. */
3908 if (SUPPORTS_STACK_ALIGNMENT)
3910 if (DECL_RESULT (fndecl))
3912 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3913 machine_mode mode = TYPE_MODE (type);
3915 if (mode != BLKmode
3916 && mode != VOIDmode
3917 && !AGGREGATE_TYPE_P (type))
3919 unsigned int align = GET_MODE_ALIGNMENT (mode);
3920 if (crtl->stack_alignment_estimated < align)
3922 gcc_assert (!crtl->stack_realign_processed);
3923 crtl->stack_alignment_estimated = align;
3929 /* If we are receiving a struct value address as the first argument, set up
3930 the RTL for the function result. As this might require code to convert
3931 the transmitted address to Pmode, we do this here to ensure that possible
3932 preliminary conversions of the address have been emitted already. */
3933 if (all.function_result_decl)
3935 tree result = DECL_RESULT (current_function_decl);
3936 rtx addr = DECL_RTL (all.function_result_decl);
3937 rtx x;
3939 if (DECL_BY_REFERENCE (result))
3941 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3942 x = addr;
3944 else
3946 SET_DECL_VALUE_EXPR (result,
3947 build1 (INDIRECT_REF, TREE_TYPE (result),
3948 all.function_result_decl));
3949 addr = convert_memory_address (Pmode, addr);
3950 x = gen_rtx_MEM (DECL_MODE (result), addr);
3951 set_mem_attributes (x, result, 1);
3954 DECL_HAS_VALUE_EXPR_P (result) = 1;
3956 SET_DECL_RTL (result, x);
3959 /* We have aligned all the args, so add space for the pretend args. */
3960 crtl->args.pretend_args_size = all.pretend_args_size;
3961 all.stack_args_size.constant += all.extra_pretend_bytes;
3962 crtl->args.size = all.stack_args_size.constant;
3964 /* Adjust function incoming argument size for alignment and
3965 minimum length. */
3967 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3968 crtl->args.size = CEIL_ROUND (crtl->args.size,
3969 PARM_BOUNDARY / BITS_PER_UNIT);
3971 if (ARGS_GROW_DOWNWARD)
3973 crtl->args.arg_offset_rtx
3974 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3975 : expand_expr (size_diffop (all.stack_args_size.var,
3976 size_int (-all.stack_args_size.constant)),
3977 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3979 else
3980 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3982 /* See how many bytes, if any, of its args a function should try to pop
3983 on return. */
3985 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3986 TREE_TYPE (fndecl),
3987 crtl->args.size);
3989 /* For stdarg.h function, save info about
3990 regs and stack space used by the named args. */
3992 crtl->args.info = all.args_so_far_v;
3994 /* Set the rtx used for the function return value. Put this in its
3995 own variable so any optimizers that need this information don't have
3996 to include tree.h. Do this here so it gets done when an inlined
3997 function gets output. */
3999 crtl->return_rtx
4000 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
4001 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
4003 /* If scalar return value was computed in a pseudo-reg, or was a named
4004 return value that got dumped to the stack, copy that to the hard
4005 return register. */
4006 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
4008 tree decl_result = DECL_RESULT (fndecl);
4009 rtx decl_rtl = DECL_RTL (decl_result);
4011 if (REG_P (decl_rtl)
4012 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4013 : DECL_REGISTER (decl_result))
4015 rtx real_decl_rtl;
4017 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
4018 fndecl, true);
4019 if (chkp_function_instrumented_p (fndecl))
4020 crtl->return_bnd
4021 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
4022 fndecl, true);
4023 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
4024 /* The delay slot scheduler assumes that crtl->return_rtx
4025 holds the hard register containing the return value, not a
4026 temporary pseudo. */
4027 crtl->return_rtx = real_decl_rtl;
4032 /* A subroutine of gimplify_parameters, invoked via walk_tree.
4033 For all seen types, gimplify their sizes. */
4035 static tree
4036 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
4038 tree t = *tp;
4040 *walk_subtrees = 0;
4041 if (TYPE_P (t))
4043 if (POINTER_TYPE_P (t))
4044 *walk_subtrees = 1;
4045 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
4046 && !TYPE_SIZES_GIMPLIFIED (t))
4048 gimplify_type_sizes (t, (gimple_seq *) data);
4049 *walk_subtrees = 1;
4053 return NULL;
4056 /* Gimplify the parameter list for current_function_decl. This involves
4057 evaluating SAVE_EXPRs of variable sized parameters and generating code
4058 to implement callee-copies reference parameters. Returns a sequence of
4059 statements to add to the beginning of the function. */
4061 gimple_seq
4062 gimplify_parameters (void)
4064 struct assign_parm_data_all all;
4065 tree parm;
4066 gimple_seq stmts = NULL;
4067 vec<tree> fnargs;
4068 unsigned i;
4070 assign_parms_initialize_all (&all);
4071 fnargs = assign_parms_augmented_arg_list (&all);
4073 FOR_EACH_VEC_ELT (fnargs, i, parm)
4075 struct assign_parm_data_one data;
4077 /* Extract the type of PARM; adjust it according to ABI. */
4078 assign_parm_find_data_types (&all, parm, &data);
4080 /* Early out for errors and void parameters. */
4081 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
4082 continue;
4084 /* Update info on where next arg arrives in registers. */
4085 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
4086 data.passed_type, data.named_arg);
4088 /* ??? Once upon a time variable_size stuffed parameter list
4089 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4090 turned out to be less than manageable in the gimple world.
4091 Now we have to hunt them down ourselves. */
4092 walk_tree_without_duplicates (&data.passed_type,
4093 gimplify_parm_type, &stmts);
4095 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4097 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4098 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4101 if (data.passed_pointer)
4103 tree type = TREE_TYPE (data.passed_type);
4104 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4105 type, data.named_arg))
4107 tree local, t;
4109 /* For constant-sized objects, this is trivial; for
4110 variable-sized objects, we have to play games. */
4111 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4112 && !(flag_stack_check == GENERIC_STACK_CHECK
4113 && compare_tree_int (DECL_SIZE_UNIT (parm),
4114 STACK_CHECK_MAX_VAR_SIZE) > 0))
4116 local = create_tmp_var (type, get_name (parm));
4117 DECL_IGNORED_P (local) = 0;
4118 /* If PARM was addressable, move that flag over
4119 to the local copy, as its address will be taken,
4120 not the PARMs. Keep the parms address taken
4121 as we'll query that flag during gimplification. */
4122 if (TREE_ADDRESSABLE (parm))
4123 TREE_ADDRESSABLE (local) = 1;
4124 else if (TREE_CODE (type) == COMPLEX_TYPE
4125 || TREE_CODE (type) == VECTOR_TYPE)
4126 DECL_GIMPLE_REG_P (local) = 1;
4128 else
4130 tree ptr_type, addr;
4132 ptr_type = build_pointer_type (type);
4133 addr = create_tmp_reg (ptr_type, get_name (parm));
4134 DECL_IGNORED_P (addr) = 0;
4135 local = build_fold_indirect_ref (addr);
4137 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4138 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
4139 size_int (DECL_ALIGN (parm)));
4141 /* The call has been built for a variable-sized object. */
4142 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4143 t = fold_convert (ptr_type, t);
4144 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4145 gimplify_and_add (t, &stmts);
4148 gimplify_assign (local, parm, &stmts);
4150 SET_DECL_VALUE_EXPR (parm, local);
4151 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4156 fnargs.release ();
4158 return stmts;
4161 /* Compute the size and offset from the start of the stacked arguments for a
4162 parm passed in mode PASSED_MODE and with type TYPE.
4164 INITIAL_OFFSET_PTR points to the current offset into the stacked
4165 arguments.
4167 The starting offset and size for this parm are returned in
4168 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4169 nonzero, the offset is that of stack slot, which is returned in
4170 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4171 padding required from the initial offset ptr to the stack slot.
4173 IN_REGS is nonzero if the argument will be passed in registers. It will
4174 never be set if REG_PARM_STACK_SPACE is not defined.
4176 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4177 for arguments which are passed in registers.
4179 FNDECL is the function in which the argument was defined.
4181 There are two types of rounding that are done. The first, controlled by
4182 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4183 argument list to be aligned to the specific boundary (in bits). This
4184 rounding affects the initial and starting offsets, but not the argument
4185 size.
4187 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4188 optionally rounds the size of the parm to PARM_BOUNDARY. The
4189 initial offset is not affected by this rounding, while the size always
4190 is and the starting offset may be. */
4192 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4193 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4194 callers pass in the total size of args so far as
4195 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4197 void
4198 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4199 int reg_parm_stack_space, int partial,
4200 tree fndecl ATTRIBUTE_UNUSED,
4201 struct args_size *initial_offset_ptr,
4202 struct locate_and_pad_arg_data *locate)
4204 tree sizetree;
4205 enum direction where_pad;
4206 unsigned int boundary, round_boundary;
4207 int part_size_in_regs;
4209 /* If we have found a stack parm before we reach the end of the
4210 area reserved for registers, skip that area. */
4211 if (! in_regs)
4213 if (reg_parm_stack_space > 0)
4215 if (initial_offset_ptr->var)
4217 initial_offset_ptr->var
4218 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4219 ssize_int (reg_parm_stack_space));
4220 initial_offset_ptr->constant = 0;
4222 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4223 initial_offset_ptr->constant = reg_parm_stack_space;
4227 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4229 sizetree
4230 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4231 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4232 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4233 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4234 type);
4235 locate->where_pad = where_pad;
4237 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4238 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4239 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4241 locate->boundary = boundary;
4243 if (SUPPORTS_STACK_ALIGNMENT)
4245 /* stack_alignment_estimated can't change after stack has been
4246 realigned. */
4247 if (crtl->stack_alignment_estimated < boundary)
4249 if (!crtl->stack_realign_processed)
4250 crtl->stack_alignment_estimated = boundary;
4251 else
4253 /* If stack is realigned and stack alignment value
4254 hasn't been finalized, it is OK not to increase
4255 stack_alignment_estimated. The bigger alignment
4256 requirement is recorded in stack_alignment_needed
4257 below. */
4258 gcc_assert (!crtl->stack_realign_finalized
4259 && crtl->stack_realign_needed);
4264 /* Remember if the outgoing parameter requires extra alignment on the
4265 calling function side. */
4266 if (crtl->stack_alignment_needed < boundary)
4267 crtl->stack_alignment_needed = boundary;
4268 if (crtl->preferred_stack_boundary < boundary)
4269 crtl->preferred_stack_boundary = boundary;
4271 if (ARGS_GROW_DOWNWARD)
4273 locate->slot_offset.constant = -initial_offset_ptr->constant;
4274 if (initial_offset_ptr->var)
4275 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4276 initial_offset_ptr->var);
4279 tree s2 = sizetree;
4280 if (where_pad != none
4281 && (!tree_fits_uhwi_p (sizetree)
4282 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4283 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4284 SUB_PARM_SIZE (locate->slot_offset, s2);
4287 locate->slot_offset.constant += part_size_in_regs;
4289 if (!in_regs || reg_parm_stack_space > 0)
4290 pad_to_arg_alignment (&locate->slot_offset, boundary,
4291 &locate->alignment_pad);
4293 locate->size.constant = (-initial_offset_ptr->constant
4294 - locate->slot_offset.constant);
4295 if (initial_offset_ptr->var)
4296 locate->size.var = size_binop (MINUS_EXPR,
4297 size_binop (MINUS_EXPR,
4298 ssize_int (0),
4299 initial_offset_ptr->var),
4300 locate->slot_offset.var);
4302 /* Pad_below needs the pre-rounded size to know how much to pad
4303 below. */
4304 locate->offset = locate->slot_offset;
4305 if (where_pad == downward)
4306 pad_below (&locate->offset, passed_mode, sizetree);
4309 else
4311 if (!in_regs || reg_parm_stack_space > 0)
4312 pad_to_arg_alignment (initial_offset_ptr, boundary,
4313 &locate->alignment_pad);
4314 locate->slot_offset = *initial_offset_ptr;
4316 #ifdef PUSH_ROUNDING
4317 if (passed_mode != BLKmode)
4318 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4319 #endif
4321 /* Pad_below needs the pre-rounded size to know how much to pad below
4322 so this must be done before rounding up. */
4323 locate->offset = locate->slot_offset;
4324 if (where_pad == downward)
4325 pad_below (&locate->offset, passed_mode, sizetree);
4327 if (where_pad != none
4328 && (!tree_fits_uhwi_p (sizetree)
4329 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4330 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4332 ADD_PARM_SIZE (locate->size, sizetree);
4334 locate->size.constant -= part_size_in_regs;
4337 #ifdef FUNCTION_ARG_OFFSET
4338 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4339 #endif
4342 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4343 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4345 static void
4346 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4347 struct args_size *alignment_pad)
4349 tree save_var = NULL_TREE;
4350 HOST_WIDE_INT save_constant = 0;
4351 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4352 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4354 #ifdef SPARC_STACK_BOUNDARY_HACK
4355 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4356 the real alignment of %sp. However, when it does this, the
4357 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4358 if (SPARC_STACK_BOUNDARY_HACK)
4359 sp_offset = 0;
4360 #endif
4362 if (boundary > PARM_BOUNDARY)
4364 save_var = offset_ptr->var;
4365 save_constant = offset_ptr->constant;
4368 alignment_pad->var = NULL_TREE;
4369 alignment_pad->constant = 0;
4371 if (boundary > BITS_PER_UNIT)
4373 if (offset_ptr->var)
4375 tree sp_offset_tree = ssize_int (sp_offset);
4376 tree offset = size_binop (PLUS_EXPR,
4377 ARGS_SIZE_TREE (*offset_ptr),
4378 sp_offset_tree);
4379 tree rounded;
4380 if (ARGS_GROW_DOWNWARD)
4381 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4382 else
4383 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4385 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4386 /* ARGS_SIZE_TREE includes constant term. */
4387 offset_ptr->constant = 0;
4388 if (boundary > PARM_BOUNDARY)
4389 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4390 save_var);
4392 else
4394 offset_ptr->constant = -sp_offset +
4395 (ARGS_GROW_DOWNWARD
4396 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4397 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4399 if (boundary > PARM_BOUNDARY)
4400 alignment_pad->constant = offset_ptr->constant - save_constant;
4405 static void
4406 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4408 if (passed_mode != BLKmode)
4410 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4411 offset_ptr->constant
4412 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4413 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4414 - GET_MODE_SIZE (passed_mode));
4416 else
4418 if (TREE_CODE (sizetree) != INTEGER_CST
4419 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4421 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4422 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4423 /* Add it in. */
4424 ADD_PARM_SIZE (*offset_ptr, s2);
4425 SUB_PARM_SIZE (*offset_ptr, sizetree);
4431 /* True if register REGNO was alive at a place where `setjmp' was
4432 called and was set more than once or is an argument. Such regs may
4433 be clobbered by `longjmp'. */
4435 static bool
4436 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4438 /* There appear to be cases where some local vars never reach the
4439 backend but have bogus regnos. */
4440 if (regno >= max_reg_num ())
4441 return false;
4443 return ((REG_N_SETS (regno) > 1
4444 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4445 regno))
4446 && REGNO_REG_SET_P (setjmp_crosses, regno));
4449 /* Walk the tree of blocks describing the binding levels within a
4450 function and warn about variables the might be killed by setjmp or
4451 vfork. This is done after calling flow_analysis before register
4452 allocation since that will clobber the pseudo-regs to hard
4453 regs. */
4455 static void
4456 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4458 tree decl, sub;
4460 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4462 if (TREE_CODE (decl) == VAR_DECL
4463 && DECL_RTL_SET_P (decl)
4464 && REG_P (DECL_RTL (decl))
4465 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4466 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4467 " %<longjmp%> or %<vfork%>", decl);
4470 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4471 setjmp_vars_warning (setjmp_crosses, sub);
4474 /* Do the appropriate part of setjmp_vars_warning
4475 but for arguments instead of local variables. */
4477 static void
4478 setjmp_args_warning (bitmap setjmp_crosses)
4480 tree decl;
4481 for (decl = DECL_ARGUMENTS (current_function_decl);
4482 decl; decl = DECL_CHAIN (decl))
4483 if (DECL_RTL (decl) != 0
4484 && REG_P (DECL_RTL (decl))
4485 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4486 warning (OPT_Wclobbered,
4487 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4488 decl);
4491 /* Generate warning messages for variables live across setjmp. */
4493 void
4494 generate_setjmp_warnings (void)
4496 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4498 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4499 || bitmap_empty_p (setjmp_crosses))
4500 return;
4502 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4503 setjmp_args_warning (setjmp_crosses);
4507 /* Reverse the order of elements in the fragment chain T of blocks,
4508 and return the new head of the chain (old last element).
4509 In addition to that clear BLOCK_SAME_RANGE flags when needed
4510 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4511 its super fragment origin. */
4513 static tree
4514 block_fragments_nreverse (tree t)
4516 tree prev = 0, block, next, prev_super = 0;
4517 tree super = BLOCK_SUPERCONTEXT (t);
4518 if (BLOCK_FRAGMENT_ORIGIN (super))
4519 super = BLOCK_FRAGMENT_ORIGIN (super);
4520 for (block = t; block; block = next)
4522 next = BLOCK_FRAGMENT_CHAIN (block);
4523 BLOCK_FRAGMENT_CHAIN (block) = prev;
4524 if ((prev && !BLOCK_SAME_RANGE (prev))
4525 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4526 != prev_super))
4527 BLOCK_SAME_RANGE (block) = 0;
4528 prev_super = BLOCK_SUPERCONTEXT (block);
4529 BLOCK_SUPERCONTEXT (block) = super;
4530 prev = block;
4532 t = BLOCK_FRAGMENT_ORIGIN (t);
4533 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4534 != prev_super)
4535 BLOCK_SAME_RANGE (t) = 0;
4536 BLOCK_SUPERCONTEXT (t) = super;
4537 return prev;
4540 /* Reverse the order of elements in the chain T of blocks,
4541 and return the new head of the chain (old last element).
4542 Also do the same on subblocks and reverse the order of elements
4543 in BLOCK_FRAGMENT_CHAIN as well. */
4545 static tree
4546 blocks_nreverse_all (tree t)
4548 tree prev = 0, block, next;
4549 for (block = t; block; block = next)
4551 next = BLOCK_CHAIN (block);
4552 BLOCK_CHAIN (block) = prev;
4553 if (BLOCK_FRAGMENT_CHAIN (block)
4554 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4556 BLOCK_FRAGMENT_CHAIN (block)
4557 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4558 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4559 BLOCK_SAME_RANGE (block) = 0;
4561 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4562 prev = block;
4564 return prev;
4568 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4569 and create duplicate blocks. */
4570 /* ??? Need an option to either create block fragments or to create
4571 abstract origin duplicates of a source block. It really depends
4572 on what optimization has been performed. */
4574 void
4575 reorder_blocks (void)
4577 tree block = DECL_INITIAL (current_function_decl);
4579 if (block == NULL_TREE)
4580 return;
4582 auto_vec<tree, 10> block_stack;
4584 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4585 clear_block_marks (block);
4587 /* Prune the old trees away, so that they don't get in the way. */
4588 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4589 BLOCK_CHAIN (block) = NULL_TREE;
4591 /* Recreate the block tree from the note nesting. */
4592 reorder_blocks_1 (get_insns (), block, &block_stack);
4593 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4596 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4598 void
4599 clear_block_marks (tree block)
4601 while (block)
4603 TREE_ASM_WRITTEN (block) = 0;
4604 clear_block_marks (BLOCK_SUBBLOCKS (block));
4605 block = BLOCK_CHAIN (block);
4609 static void
4610 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4611 vec<tree> *p_block_stack)
4613 rtx_insn *insn;
4614 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4616 for (insn = insns; insn; insn = NEXT_INSN (insn))
4618 if (NOTE_P (insn))
4620 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4622 tree block = NOTE_BLOCK (insn);
4623 tree origin;
4625 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4626 origin = block;
4628 if (prev_end)
4629 BLOCK_SAME_RANGE (prev_end) = 0;
4630 prev_end = NULL_TREE;
4632 /* If we have seen this block before, that means it now
4633 spans multiple address regions. Create a new fragment. */
4634 if (TREE_ASM_WRITTEN (block))
4636 tree new_block = copy_node (block);
4638 BLOCK_SAME_RANGE (new_block) = 0;
4639 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4640 BLOCK_FRAGMENT_CHAIN (new_block)
4641 = BLOCK_FRAGMENT_CHAIN (origin);
4642 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4644 NOTE_BLOCK (insn) = new_block;
4645 block = new_block;
4648 if (prev_beg == current_block && prev_beg)
4649 BLOCK_SAME_RANGE (block) = 1;
4651 prev_beg = origin;
4653 BLOCK_SUBBLOCKS (block) = 0;
4654 TREE_ASM_WRITTEN (block) = 1;
4655 /* When there's only one block for the entire function,
4656 current_block == block and we mustn't do this, it
4657 will cause infinite recursion. */
4658 if (block != current_block)
4660 tree super;
4661 if (block != origin)
4662 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4663 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4664 (origin))
4665 == current_block);
4666 if (p_block_stack->is_empty ())
4667 super = current_block;
4668 else
4670 super = p_block_stack->last ();
4671 gcc_assert (super == current_block
4672 || BLOCK_FRAGMENT_ORIGIN (super)
4673 == current_block);
4675 BLOCK_SUPERCONTEXT (block) = super;
4676 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4677 BLOCK_SUBBLOCKS (current_block) = block;
4678 current_block = origin;
4680 p_block_stack->safe_push (block);
4682 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4684 NOTE_BLOCK (insn) = p_block_stack->pop ();
4685 current_block = BLOCK_SUPERCONTEXT (current_block);
4686 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4687 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4688 prev_beg = NULL_TREE;
4689 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4690 ? NOTE_BLOCK (insn) : NULL_TREE;
4693 else
4695 prev_beg = NULL_TREE;
4696 if (prev_end)
4697 BLOCK_SAME_RANGE (prev_end) = 0;
4698 prev_end = NULL_TREE;
4703 /* Reverse the order of elements in the chain T of blocks,
4704 and return the new head of the chain (old last element). */
4706 tree
4707 blocks_nreverse (tree t)
4709 tree prev = 0, block, next;
4710 for (block = t; block; block = next)
4712 next = BLOCK_CHAIN (block);
4713 BLOCK_CHAIN (block) = prev;
4714 prev = block;
4716 return prev;
4719 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4720 by modifying the last node in chain 1 to point to chain 2. */
4722 tree
4723 block_chainon (tree op1, tree op2)
4725 tree t1;
4727 if (!op1)
4728 return op2;
4729 if (!op2)
4730 return op1;
4732 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4733 continue;
4734 BLOCK_CHAIN (t1) = op2;
4736 #ifdef ENABLE_TREE_CHECKING
4738 tree t2;
4739 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4740 gcc_assert (t2 != t1);
4742 #endif
4744 return op1;
4747 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4748 non-NULL, list them all into VECTOR, in a depth-first preorder
4749 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4750 blocks. */
4752 static int
4753 all_blocks (tree block, tree *vector)
4755 int n_blocks = 0;
4757 while (block)
4759 TREE_ASM_WRITTEN (block) = 0;
4761 /* Record this block. */
4762 if (vector)
4763 vector[n_blocks] = block;
4765 ++n_blocks;
4767 /* Record the subblocks, and their subblocks... */
4768 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4769 vector ? vector + n_blocks : 0);
4770 block = BLOCK_CHAIN (block);
4773 return n_blocks;
4776 /* Return a vector containing all the blocks rooted at BLOCK. The
4777 number of elements in the vector is stored in N_BLOCKS_P. The
4778 vector is dynamically allocated; it is the caller's responsibility
4779 to call `free' on the pointer returned. */
4781 static tree *
4782 get_block_vector (tree block, int *n_blocks_p)
4784 tree *block_vector;
4786 *n_blocks_p = all_blocks (block, NULL);
4787 block_vector = XNEWVEC (tree, *n_blocks_p);
4788 all_blocks (block, block_vector);
4790 return block_vector;
4793 static GTY(()) int next_block_index = 2;
4795 /* Set BLOCK_NUMBER for all the blocks in FN. */
4797 void
4798 number_blocks (tree fn)
4800 int i;
4801 int n_blocks;
4802 tree *block_vector;
4804 /* For SDB and XCOFF debugging output, we start numbering the blocks
4805 from 1 within each function, rather than keeping a running
4806 count. */
4807 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4808 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4809 next_block_index = 1;
4810 #endif
4812 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4814 /* The top-level BLOCK isn't numbered at all. */
4815 for (i = 1; i < n_blocks; ++i)
4816 /* We number the blocks from two. */
4817 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4819 free (block_vector);
4821 return;
4824 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4826 DEBUG_FUNCTION tree
4827 debug_find_var_in_block_tree (tree var, tree block)
4829 tree t;
4831 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4832 if (t == var)
4833 return block;
4835 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4837 tree ret = debug_find_var_in_block_tree (var, t);
4838 if (ret)
4839 return ret;
4842 return NULL_TREE;
4845 /* Keep track of whether we're in a dummy function context. If we are,
4846 we don't want to invoke the set_current_function hook, because we'll
4847 get into trouble if the hook calls target_reinit () recursively or
4848 when the initial initialization is not yet complete. */
4850 static bool in_dummy_function;
4852 /* Invoke the target hook when setting cfun. Update the optimization options
4853 if the function uses different options than the default. */
4855 static void
4856 invoke_set_current_function_hook (tree fndecl)
4858 if (!in_dummy_function)
4860 tree opts = ((fndecl)
4861 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4862 : optimization_default_node);
4864 if (!opts)
4865 opts = optimization_default_node;
4867 /* Change optimization options if needed. */
4868 if (optimization_current_node != opts)
4870 optimization_current_node = opts;
4871 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4874 targetm.set_current_function (fndecl);
4875 this_fn_optabs = this_target_optabs;
4877 if (opts != optimization_default_node)
4879 init_tree_optimization_optabs (opts);
4880 if (TREE_OPTIMIZATION_OPTABS (opts))
4881 this_fn_optabs = (struct target_optabs *)
4882 TREE_OPTIMIZATION_OPTABS (opts);
4887 /* cfun should never be set directly; use this function. */
4889 void
4890 set_cfun (struct function *new_cfun)
4892 if (cfun != new_cfun)
4894 cfun = new_cfun;
4895 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4899 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4901 static vec<function_p> cfun_stack;
4903 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4904 current_function_decl accordingly. */
4906 void
4907 push_cfun (struct function *new_cfun)
4909 gcc_assert ((!cfun && !current_function_decl)
4910 || (cfun && current_function_decl == cfun->decl));
4911 cfun_stack.safe_push (cfun);
4912 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4913 set_cfun (new_cfun);
4916 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4918 void
4919 pop_cfun (void)
4921 struct function *new_cfun = cfun_stack.pop ();
4922 /* When in_dummy_function, we do have a cfun but current_function_decl is
4923 NULL. We also allow pushing NULL cfun and subsequently changing
4924 current_function_decl to something else and have both restored by
4925 pop_cfun. */
4926 gcc_checking_assert (in_dummy_function
4927 || !cfun
4928 || current_function_decl == cfun->decl);
4929 set_cfun (new_cfun);
4930 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4933 /* Return value of funcdef and increase it. */
4935 get_next_funcdef_no (void)
4937 return funcdef_no++;
4940 /* Return value of funcdef. */
4942 get_last_funcdef_no (void)
4944 return funcdef_no;
4947 /* Allocate a function structure for FNDECL and set its contents
4948 to the defaults. Set cfun to the newly-allocated object.
4949 Some of the helper functions invoked during initialization assume
4950 that cfun has already been set. Therefore, assign the new object
4951 directly into cfun and invoke the back end hook explicitly at the
4952 very end, rather than initializing a temporary and calling set_cfun
4953 on it.
4955 ABSTRACT_P is true if this is a function that will never be seen by
4956 the middle-end. Such functions are front-end concepts (like C++
4957 function templates) that do not correspond directly to functions
4958 placed in object files. */
4960 void
4961 allocate_struct_function (tree fndecl, bool abstract_p)
4963 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4965 cfun = ggc_cleared_alloc<function> ();
4967 init_eh_for_function ();
4969 if (init_machine_status)
4970 cfun->machine = (*init_machine_status) ();
4972 #ifdef OVERRIDE_ABI_FORMAT
4973 OVERRIDE_ABI_FORMAT (fndecl);
4974 #endif
4976 if (fndecl != NULL_TREE)
4978 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4979 cfun->decl = fndecl;
4980 current_function_funcdef_no = get_next_funcdef_no ();
4983 invoke_set_current_function_hook (fndecl);
4985 if (fndecl != NULL_TREE)
4987 tree result = DECL_RESULT (fndecl);
4988 if (!abstract_p && aggregate_value_p (result, fndecl))
4990 #ifdef PCC_STATIC_STRUCT_RETURN
4991 cfun->returns_pcc_struct = 1;
4992 #endif
4993 cfun->returns_struct = 1;
4996 cfun->stdarg = stdarg_p (fntype);
4998 /* Assume all registers in stdarg functions need to be saved. */
4999 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
5000 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
5002 /* ??? This could be set on a per-function basis by the front-end
5003 but is this worth the hassle? */
5004 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
5005 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
5007 if (!profile_flag && !flag_instrument_function_entry_exit)
5008 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
5012 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
5013 instead of just setting it. */
5015 void
5016 push_struct_function (tree fndecl)
5018 /* When in_dummy_function we might be in the middle of a pop_cfun and
5019 current_function_decl and cfun may not match. */
5020 gcc_assert (in_dummy_function
5021 || (!cfun && !current_function_decl)
5022 || (cfun && current_function_decl == cfun->decl));
5023 cfun_stack.safe_push (cfun);
5024 current_function_decl = fndecl;
5025 allocate_struct_function (fndecl, false);
5028 /* Reset crtl and other non-struct-function variables to defaults as
5029 appropriate for emitting rtl at the start of a function. */
5031 static void
5032 prepare_function_start (void)
5034 gcc_assert (!get_last_insn ());
5035 init_temp_slots ();
5036 init_emit ();
5037 init_varasm_status ();
5038 init_expr ();
5039 default_rtl_profile ();
5041 if (flag_stack_usage_info)
5043 cfun->su = ggc_cleared_alloc<stack_usage> ();
5044 cfun->su->static_stack_size = -1;
5047 cse_not_expected = ! optimize;
5049 /* Caller save not needed yet. */
5050 caller_save_needed = 0;
5052 /* We haven't done register allocation yet. */
5053 reg_renumber = 0;
5055 /* Indicate that we have not instantiated virtual registers yet. */
5056 virtuals_instantiated = 0;
5058 /* Indicate that we want CONCATs now. */
5059 generating_concat_p = 1;
5061 /* Indicate we have no need of a frame pointer yet. */
5062 frame_pointer_needed = 0;
5065 void
5066 push_dummy_function (bool with_decl)
5068 tree fn_decl, fn_type, fn_result_decl;
5070 gcc_assert (!in_dummy_function);
5071 in_dummy_function = true;
5073 if (with_decl)
5075 fn_type = build_function_type_list (void_type_node, NULL_TREE);
5076 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
5077 fn_type);
5078 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
5079 NULL_TREE, void_type_node);
5080 DECL_RESULT (fn_decl) = fn_result_decl;
5082 else
5083 fn_decl = NULL_TREE;
5085 push_struct_function (fn_decl);
5088 /* Initialize the rtl expansion mechanism so that we can do simple things
5089 like generate sequences. This is used to provide a context during global
5090 initialization of some passes. You must call expand_dummy_function_end
5091 to exit this context. */
5093 void
5094 init_dummy_function_start (void)
5096 push_dummy_function (false);
5097 prepare_function_start ();
5100 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5101 and initialize static variables for generating RTL for the statements
5102 of the function. */
5104 void
5105 init_function_start (tree subr)
5107 if (subr && DECL_STRUCT_FUNCTION (subr))
5108 set_cfun (DECL_STRUCT_FUNCTION (subr));
5109 else
5110 allocate_struct_function (subr, false);
5112 /* Initialize backend, if needed. */
5113 initialize_rtl ();
5115 prepare_function_start ();
5116 decide_function_section (subr);
5118 /* Warn if this value is an aggregate type,
5119 regardless of which calling convention we are using for it. */
5120 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5121 warning (OPT_Waggregate_return, "function returns an aggregate");
5124 /* Expand code to verify the stack_protect_guard. This is invoked at
5125 the end of a function to be protected. */
5127 void
5128 stack_protect_epilogue (void)
5130 tree guard_decl = targetm.stack_protect_guard ();
5131 rtx_code_label *label = gen_label_rtx ();
5132 rtx x, y, tmp;
5133 rtx_insn *seq;
5135 x = expand_normal (crtl->stack_protect_guard);
5136 y = expand_normal (guard_decl);
5138 /* Allow the target to compare Y with X without leaking either into
5139 a register. */
5140 if (targetm.have_stack_protect_test ()
5141 && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5142 emit_insn (seq);
5143 else
5144 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5146 /* The noreturn predictor has been moved to the tree level. The rtl-level
5147 predictors estimate this branch about 20%, which isn't enough to get
5148 things moved out of line. Since this is the only extant case of adding
5149 a noreturn function at the rtl level, it doesn't seem worth doing ought
5150 except adding the prediction by hand. */
5151 tmp = get_last_insn ();
5152 if (JUMP_P (tmp))
5153 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
5155 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5156 free_temp_slots ();
5157 emit_label (label);
5160 /* Start the RTL for a new function, and set variables used for
5161 emitting RTL.
5162 SUBR is the FUNCTION_DECL node.
5163 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5164 the function's parameters, which must be run at any return statement. */
5166 void
5167 expand_function_start (tree subr)
5169 /* Make sure volatile mem refs aren't considered
5170 valid operands of arithmetic insns. */
5171 init_recog_no_volatile ();
5173 crtl->profile
5174 = (profile_flag
5175 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5177 crtl->limit_stack
5178 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5180 /* Make the label for return statements to jump to. Do not special
5181 case machines with special return instructions -- they will be
5182 handled later during jump, ifcvt, or epilogue creation. */
5183 return_label = gen_label_rtx ();
5185 /* Initialize rtx used to return the value. */
5186 /* Do this before assign_parms so that we copy the struct value address
5187 before any library calls that assign parms might generate. */
5189 /* Decide whether to return the value in memory or in a register. */
5190 tree res = DECL_RESULT (subr);
5191 maybe_reset_rtl_for_parm (res);
5192 if (aggregate_value_p (res, subr))
5194 /* Returning something that won't go in a register. */
5195 rtx value_address = 0;
5197 #ifdef PCC_STATIC_STRUCT_RETURN
5198 if (cfun->returns_pcc_struct)
5200 int size = int_size_in_bytes (TREE_TYPE (res));
5201 value_address = assemble_static_space (size);
5203 else
5204 #endif
5206 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5207 /* Expect to be passed the address of a place to store the value.
5208 If it is passed as an argument, assign_parms will take care of
5209 it. */
5210 if (sv)
5212 if (DECL_BY_REFERENCE (res))
5213 value_address = get_rtl_for_parm_ssa_default_def (res);
5214 if (!value_address)
5215 value_address = gen_reg_rtx (Pmode);
5216 emit_move_insn (value_address, sv);
5219 if (value_address)
5221 rtx x = value_address;
5222 if (!DECL_BY_REFERENCE (res))
5224 x = get_rtl_for_parm_ssa_default_def (res);
5225 if (!x)
5227 x = gen_rtx_MEM (DECL_MODE (res), value_address);
5228 set_mem_attributes (x, res, 1);
5231 SET_DECL_RTL (res, x);
5234 else if (DECL_MODE (res) == VOIDmode)
5235 /* If return mode is void, this decl rtl should not be used. */
5236 SET_DECL_RTL (res, NULL_RTX);
5237 else
5239 /* Compute the return values into a pseudo reg, which we will copy
5240 into the true return register after the cleanups are done. */
5241 tree return_type = TREE_TYPE (res);
5242 rtx x = get_rtl_for_parm_ssa_default_def (res);
5243 if (x)
5244 /* Use it. */;
5245 else if (TYPE_MODE (return_type) != BLKmode
5246 && targetm.calls.return_in_msb (return_type))
5247 /* expand_function_end will insert the appropriate padding in
5248 this case. Use the return value's natural (unpadded) mode
5249 within the function proper. */
5250 x = gen_reg_rtx (TYPE_MODE (return_type));
5251 else
5253 /* In order to figure out what mode to use for the pseudo, we
5254 figure out what the mode of the eventual return register will
5255 actually be, and use that. */
5256 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5258 /* Structures that are returned in registers are not
5259 aggregate_value_p, so we may see a PARALLEL or a REG. */
5260 if (REG_P (hard_reg))
5261 x = gen_reg_rtx (GET_MODE (hard_reg));
5262 else
5264 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5265 x = gen_group_rtx (hard_reg);
5269 SET_DECL_RTL (res, x);
5271 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5272 result to the real return register(s). */
5273 DECL_REGISTER (res) = 1;
5275 if (chkp_function_instrumented_p (current_function_decl))
5277 tree return_type = TREE_TYPE (res);
5278 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5279 subr, 1);
5280 SET_DECL_BOUNDS_RTL (res, bounds);
5284 /* Initialize rtx for parameters and local variables.
5285 In some cases this requires emitting insns. */
5286 assign_parms (subr);
5288 /* If function gets a static chain arg, store it. */
5289 if (cfun->static_chain_decl)
5291 tree parm = cfun->static_chain_decl;
5292 rtx local, chain;
5293 rtx_insn *insn;
5295 local = get_rtl_for_parm_ssa_default_def (parm);
5296 if (!local)
5297 local = gen_reg_rtx (Pmode);
5298 chain = targetm.calls.static_chain (current_function_decl, true);
5300 set_decl_incoming_rtl (parm, chain, false);
5301 SET_DECL_RTL (parm, local);
5302 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5304 if (GET_MODE (local) != Pmode)
5305 local = convert_to_mode (Pmode, local,
5306 TYPE_UNSIGNED (TREE_TYPE (parm)));
5308 insn = emit_move_insn (local, chain);
5310 /* Mark the register as eliminable, similar to parameters. */
5311 if (MEM_P (chain)
5312 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5313 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5315 /* If we aren't optimizing, save the static chain onto the stack. */
5316 if (!optimize)
5318 tree saved_static_chain_decl
5319 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5320 DECL_NAME (parm), TREE_TYPE (parm));
5321 rtx saved_static_chain_rtx
5322 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5323 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5324 emit_move_insn (saved_static_chain_rtx, chain);
5325 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5326 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5330 /* If the function receives a non-local goto, then store the
5331 bits we need to restore the frame pointer. */
5332 if (cfun->nonlocal_goto_save_area)
5334 tree t_save;
5335 rtx r_save;
5337 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5338 gcc_assert (DECL_RTL_SET_P (var));
5340 t_save = build4 (ARRAY_REF,
5341 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5342 cfun->nonlocal_goto_save_area,
5343 integer_zero_node, NULL_TREE, NULL_TREE);
5344 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5345 gcc_assert (GET_MODE (r_save) == Pmode);
5347 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5348 update_nonlocal_goto_save_area ();
5351 /* The following was moved from init_function_start.
5352 The move is supposed to make sdb output more accurate. */
5353 /* Indicate the beginning of the function body,
5354 as opposed to parm setup. */
5355 emit_note (NOTE_INSN_FUNCTION_BEG);
5357 gcc_assert (NOTE_P (get_last_insn ()));
5359 parm_birth_insn = get_last_insn ();
5361 if (crtl->profile)
5363 #ifdef PROFILE_HOOK
5364 PROFILE_HOOK (current_function_funcdef_no);
5365 #endif
5368 /* If we are doing generic stack checking, the probe should go here. */
5369 if (flag_stack_check == GENERIC_STACK_CHECK)
5370 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5373 void
5374 pop_dummy_function (void)
5376 pop_cfun ();
5377 in_dummy_function = false;
5380 /* Undo the effects of init_dummy_function_start. */
5381 void
5382 expand_dummy_function_end (void)
5384 gcc_assert (in_dummy_function);
5386 /* End any sequences that failed to be closed due to syntax errors. */
5387 while (in_sequence_p ())
5388 end_sequence ();
5390 /* Outside function body, can't compute type's actual size
5391 until next function's body starts. */
5393 free_after_parsing (cfun);
5394 free_after_compilation (cfun);
5395 pop_dummy_function ();
5398 /* Helper for diddle_return_value. */
5400 void
5401 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5403 if (! outgoing)
5404 return;
5406 if (REG_P (outgoing))
5407 (*doit) (outgoing, arg);
5408 else if (GET_CODE (outgoing) == PARALLEL)
5410 int i;
5412 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5414 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5416 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5417 (*doit) (x, arg);
5422 /* Call DOIT for each hard register used as a return value from
5423 the current function. */
5425 void
5426 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5428 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5429 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5432 static void
5433 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5435 emit_clobber (reg);
5438 void
5439 clobber_return_register (void)
5441 diddle_return_value (do_clobber_return_reg, NULL);
5443 /* In case we do use pseudo to return value, clobber it too. */
5444 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5446 tree decl_result = DECL_RESULT (current_function_decl);
5447 rtx decl_rtl = DECL_RTL (decl_result);
5448 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5450 do_clobber_return_reg (decl_rtl, NULL);
5455 static void
5456 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5458 emit_use (reg);
5461 static void
5462 use_return_register (void)
5464 diddle_return_value (do_use_return_reg, NULL);
5467 /* Set the location of the insn chain starting at INSN to LOC. */
5469 static void
5470 set_insn_locations (rtx_insn *insn, int loc)
5472 while (insn != NULL)
5474 if (INSN_P (insn))
5475 INSN_LOCATION (insn) = loc;
5476 insn = NEXT_INSN (insn);
5480 /* Generate RTL for the end of the current function. */
5482 void
5483 expand_function_end (void)
5485 /* If arg_pointer_save_area was referenced only from a nested
5486 function, we will not have initialized it yet. Do that now. */
5487 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5488 get_arg_pointer_save_area ();
5490 /* If we are doing generic stack checking and this function makes calls,
5491 do a stack probe at the start of the function to ensure we have enough
5492 space for another stack frame. */
5493 if (flag_stack_check == GENERIC_STACK_CHECK)
5495 rtx_insn *insn, *seq;
5497 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5498 if (CALL_P (insn))
5500 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5501 start_sequence ();
5502 if (STACK_CHECK_MOVING_SP)
5503 anti_adjust_stack_and_probe (max_frame_size, true);
5504 else
5505 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5506 seq = get_insns ();
5507 end_sequence ();
5508 set_insn_locations (seq, prologue_location);
5509 emit_insn_before (seq, stack_check_probe_note);
5510 break;
5514 /* End any sequences that failed to be closed due to syntax errors. */
5515 while (in_sequence_p ())
5516 end_sequence ();
5518 clear_pending_stack_adjust ();
5519 do_pending_stack_adjust ();
5521 /* Output a linenumber for the end of the function.
5522 SDB depends on this. */
5523 set_curr_insn_location (input_location);
5525 /* Before the return label (if any), clobber the return
5526 registers so that they are not propagated live to the rest of
5527 the function. This can only happen with functions that drop
5528 through; if there had been a return statement, there would
5529 have either been a return rtx, or a jump to the return label.
5531 We delay actual code generation after the current_function_value_rtx
5532 is computed. */
5533 rtx_insn *clobber_after = get_last_insn ();
5535 /* Output the label for the actual return from the function. */
5536 emit_label (return_label);
5538 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5540 /* Let except.c know where it should emit the call to unregister
5541 the function context for sjlj exceptions. */
5542 if (flag_exceptions)
5543 sjlj_emit_function_exit_after (get_last_insn ());
5545 else
5547 /* We want to ensure that instructions that may trap are not
5548 moved into the epilogue by scheduling, because we don't
5549 always emit unwind information for the epilogue. */
5550 if (cfun->can_throw_non_call_exceptions)
5551 emit_insn (gen_blockage ());
5554 /* If this is an implementation of throw, do what's necessary to
5555 communicate between __builtin_eh_return and the epilogue. */
5556 expand_eh_return ();
5558 /* If scalar return value was computed in a pseudo-reg, or was a named
5559 return value that got dumped to the stack, copy that to the hard
5560 return register. */
5561 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5563 tree decl_result = DECL_RESULT (current_function_decl);
5564 rtx decl_rtl = DECL_RTL (decl_result);
5566 if (REG_P (decl_rtl)
5567 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5568 : DECL_REGISTER (decl_result))
5570 rtx real_decl_rtl = crtl->return_rtx;
5572 /* This should be set in assign_parms. */
5573 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5575 /* If this is a BLKmode structure being returned in registers,
5576 then use the mode computed in expand_return. Note that if
5577 decl_rtl is memory, then its mode may have been changed,
5578 but that crtl->return_rtx has not. */
5579 if (GET_MODE (real_decl_rtl) == BLKmode)
5580 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5582 /* If a non-BLKmode return value should be padded at the least
5583 significant end of the register, shift it left by the appropriate
5584 amount. BLKmode results are handled using the group load/store
5585 machinery. */
5586 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5587 && REG_P (real_decl_rtl)
5588 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5590 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5591 REGNO (real_decl_rtl)),
5592 decl_rtl);
5593 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5595 /* If a named return value dumped decl_return to memory, then
5596 we may need to re-do the PROMOTE_MODE signed/unsigned
5597 extension. */
5598 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5600 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5601 promote_function_mode (TREE_TYPE (decl_result),
5602 GET_MODE (decl_rtl), &unsignedp,
5603 TREE_TYPE (current_function_decl), 1);
5605 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5607 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5609 /* If expand_function_start has created a PARALLEL for decl_rtl,
5610 move the result to the real return registers. Otherwise, do
5611 a group load from decl_rtl for a named return. */
5612 if (GET_CODE (decl_rtl) == PARALLEL)
5613 emit_group_move (real_decl_rtl, decl_rtl);
5614 else
5615 emit_group_load (real_decl_rtl, decl_rtl,
5616 TREE_TYPE (decl_result),
5617 int_size_in_bytes (TREE_TYPE (decl_result)));
5619 /* In the case of complex integer modes smaller than a word, we'll
5620 need to generate some non-trivial bitfield insertions. Do that
5621 on a pseudo and not the hard register. */
5622 else if (GET_CODE (decl_rtl) == CONCAT
5623 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5624 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5626 int old_generating_concat_p;
5627 rtx tmp;
5629 old_generating_concat_p = generating_concat_p;
5630 generating_concat_p = 0;
5631 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5632 generating_concat_p = old_generating_concat_p;
5634 emit_move_insn (tmp, decl_rtl);
5635 emit_move_insn (real_decl_rtl, tmp);
5637 else
5638 emit_move_insn (real_decl_rtl, decl_rtl);
5642 /* If returning a structure, arrange to return the address of the value
5643 in a place where debuggers expect to find it.
5645 If returning a structure PCC style,
5646 the caller also depends on this value.
5647 And cfun->returns_pcc_struct is not necessarily set. */
5648 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5649 && !targetm.calls.omit_struct_return_reg)
5651 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5652 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5653 rtx outgoing;
5655 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5656 type = TREE_TYPE (type);
5657 else
5658 value_address = XEXP (value_address, 0);
5660 outgoing = targetm.calls.function_value (build_pointer_type (type),
5661 current_function_decl, true);
5663 /* Mark this as a function return value so integrate will delete the
5664 assignment and USE below when inlining this function. */
5665 REG_FUNCTION_VALUE_P (outgoing) = 1;
5667 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5668 value_address = convert_memory_address (GET_MODE (outgoing),
5669 value_address);
5671 emit_move_insn (outgoing, value_address);
5673 /* Show return register used to hold result (in this case the address
5674 of the result. */
5675 crtl->return_rtx = outgoing;
5678 /* Emit the actual code to clobber return register. Don't emit
5679 it if clobber_after is a barrier, then the previous basic block
5680 certainly doesn't fall thru into the exit block. */
5681 if (!BARRIER_P (clobber_after))
5683 start_sequence ();
5684 clobber_return_register ();
5685 rtx_insn *seq = get_insns ();
5686 end_sequence ();
5688 emit_insn_after (seq, clobber_after);
5691 /* Output the label for the naked return from the function. */
5692 if (naked_return_label)
5693 emit_label (naked_return_label);
5695 /* @@@ This is a kludge. We want to ensure that instructions that
5696 may trap are not moved into the epilogue by scheduling, because
5697 we don't always emit unwind information for the epilogue. */
5698 if (cfun->can_throw_non_call_exceptions
5699 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5700 emit_insn (gen_blockage ());
5702 /* If stack protection is enabled for this function, check the guard. */
5703 if (crtl->stack_protect_guard)
5704 stack_protect_epilogue ();
5706 /* If we had calls to alloca, and this machine needs
5707 an accurate stack pointer to exit the function,
5708 insert some code to save and restore the stack pointer. */
5709 if (! EXIT_IGNORE_STACK
5710 && cfun->calls_alloca)
5712 rtx tem = 0;
5714 start_sequence ();
5715 emit_stack_save (SAVE_FUNCTION, &tem);
5716 rtx_insn *seq = get_insns ();
5717 end_sequence ();
5718 emit_insn_before (seq, parm_birth_insn);
5720 emit_stack_restore (SAVE_FUNCTION, tem);
5723 /* ??? This should no longer be necessary since stupid is no longer with
5724 us, but there are some parts of the compiler (eg reload_combine, and
5725 sh mach_dep_reorg) that still try and compute their own lifetime info
5726 instead of using the general framework. */
5727 use_return_register ();
5731 get_arg_pointer_save_area (void)
5733 rtx ret = arg_pointer_save_area;
5735 if (! ret)
5737 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5738 arg_pointer_save_area = ret;
5741 if (! crtl->arg_pointer_save_area_init)
5743 /* Save the arg pointer at the beginning of the function. The
5744 generated stack slot may not be a valid memory address, so we
5745 have to check it and fix it if necessary. */
5746 start_sequence ();
5747 emit_move_insn (validize_mem (copy_rtx (ret)),
5748 crtl->args.internal_arg_pointer);
5749 rtx_insn *seq = get_insns ();
5750 end_sequence ();
5752 push_topmost_sequence ();
5753 emit_insn_after (seq, entry_of_function ());
5754 pop_topmost_sequence ();
5756 crtl->arg_pointer_save_area_init = true;
5759 return ret;
5762 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5763 for the first time. */
5765 static void
5766 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5768 rtx_insn *tmp;
5769 hash_table<insn_cache_hasher> *hash = *hashp;
5771 if (hash == NULL)
5772 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5774 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5776 rtx *slot = hash->find_slot (tmp, INSERT);
5777 gcc_assert (*slot == NULL);
5778 *slot = tmp;
5782 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5783 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5784 insn, then record COPY as well. */
5786 void
5787 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5789 hash_table<insn_cache_hasher> *hash;
5790 rtx *slot;
5792 hash = epilogue_insn_hash;
5793 if (!hash || !hash->find (insn))
5795 hash = prologue_insn_hash;
5796 if (!hash || !hash->find (insn))
5797 return;
5800 slot = hash->find_slot (copy, INSERT);
5801 gcc_assert (*slot == NULL);
5802 *slot = copy;
5805 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5806 we can be running after reorg, SEQUENCE rtl is possible. */
5808 static bool
5809 contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
5811 if (hash == NULL)
5812 return false;
5814 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5816 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5817 int i;
5818 for (i = seq->len () - 1; i >= 0; i--)
5819 if (hash->find (seq->element (i)))
5820 return true;
5821 return false;
5824 return hash->find (const_cast<rtx> (insn)) != NULL;
5828 prologue_epilogue_contains (const_rtx insn)
5830 if (contains (insn, prologue_insn_hash))
5831 return 1;
5832 if (contains (insn, epilogue_insn_hash))
5833 return 1;
5834 return 0;
5837 /* Insert use of return register before the end of BB. */
5839 static void
5840 emit_use_return_register_into_block (basic_block bb)
5842 start_sequence ();
5843 use_return_register ();
5844 rtx_insn *seq = get_insns ();
5845 end_sequence ();
5846 rtx_insn *insn = BB_END (bb);
5847 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5848 insn = prev_cc0_setter (insn);
5850 emit_insn_before (seq, insn);
5854 /* Create a return pattern, either simple_return or return, depending on
5855 simple_p. */
5857 static rtx_insn *
5858 gen_return_pattern (bool simple_p)
5860 return (simple_p
5861 ? targetm.gen_simple_return ()
5862 : targetm.gen_return ());
5865 /* Insert an appropriate return pattern at the end of block BB. This
5866 also means updating block_for_insn appropriately. SIMPLE_P is
5867 the same as in gen_return_pattern and passed to it. */
5869 void
5870 emit_return_into_block (bool simple_p, basic_block bb)
5872 rtx_jump_insn *jump = emit_jump_insn_after (gen_return_pattern (simple_p),
5873 BB_END (bb));
5874 rtx pat = PATTERN (jump);
5875 if (GET_CODE (pat) == PARALLEL)
5876 pat = XVECEXP (pat, 0, 0);
5877 gcc_assert (ANY_RETURN_P (pat));
5878 JUMP_LABEL (jump) = pat;
5881 /* Set JUMP_LABEL for a return insn. */
5883 void
5884 set_return_jump_label (rtx_insn *returnjump)
5886 rtx pat = PATTERN (returnjump);
5887 if (GET_CODE (pat) == PARALLEL)
5888 pat = XVECEXP (pat, 0, 0);
5889 if (ANY_RETURN_P (pat))
5890 JUMP_LABEL (returnjump) = pat;
5891 else
5892 JUMP_LABEL (returnjump) = ret_rtx;
5895 /* Return true if there are any active insns between HEAD and TAIL. */
5896 bool
5897 active_insn_between (rtx_insn *head, rtx_insn *tail)
5899 while (tail)
5901 if (active_insn_p (tail))
5902 return true;
5903 if (tail == head)
5904 return false;
5905 tail = PREV_INSN (tail);
5907 return false;
5910 /* LAST_BB is a block that exits, and empty of active instructions.
5911 Examine its predecessors for jumps that can be converted to
5912 (conditional) returns. */
5913 vec<edge>
5914 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5915 vec<edge> unconverted ATTRIBUTE_UNUSED)
5917 int i;
5918 basic_block bb;
5919 edge_iterator ei;
5920 edge e;
5921 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5923 FOR_EACH_EDGE (e, ei, last_bb->preds)
5924 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5925 src_bbs.quick_push (e->src);
5927 rtx_insn *label = BB_HEAD (last_bb);
5929 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5931 rtx_insn *jump = BB_END (bb);
5933 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5934 continue;
5936 e = find_edge (bb, last_bb);
5938 /* If we have an unconditional jump, we can replace that
5939 with a simple return instruction. */
5940 if (simplejump_p (jump))
5942 /* The use of the return register might be present in the exit
5943 fallthru block. Either:
5944 - removing the use is safe, and we should remove the use in
5945 the exit fallthru block, or
5946 - removing the use is not safe, and we should add it here.
5947 For now, we conservatively choose the latter. Either of the
5948 2 helps in crossjumping. */
5949 emit_use_return_register_into_block (bb);
5951 emit_return_into_block (simple_p, bb);
5952 delete_insn (jump);
5955 /* If we have a conditional jump branching to the last
5956 block, we can try to replace that with a conditional
5957 return instruction. */
5958 else if (condjump_p (jump))
5960 rtx dest;
5962 if (simple_p)
5963 dest = simple_return_rtx;
5964 else
5965 dest = ret_rtx;
5966 if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
5968 if (targetm.have_simple_return () && simple_p)
5970 if (dump_file)
5971 fprintf (dump_file,
5972 "Failed to redirect bb %d branch.\n", bb->index);
5973 unconverted.safe_push (e);
5975 continue;
5978 /* See comment in simplejump_p case above. */
5979 emit_use_return_register_into_block (bb);
5981 /* If this block has only one successor, it both jumps
5982 and falls through to the fallthru block, so we can't
5983 delete the edge. */
5984 if (single_succ_p (bb))
5985 continue;
5987 else
5989 if (targetm.have_simple_return () && simple_p)
5991 if (dump_file)
5992 fprintf (dump_file,
5993 "Failed to redirect bb %d branch.\n", bb->index);
5994 unconverted.safe_push (e);
5996 continue;
5999 /* Fix up the CFG for the successful change we just made. */
6000 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
6001 e->flags &= ~EDGE_CROSSING;
6003 src_bbs.release ();
6004 return unconverted;
6007 /* Emit a return insn for the exit fallthru block. */
6008 basic_block
6009 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
6011 basic_block last_bb = exit_fallthru_edge->src;
6013 if (JUMP_P (BB_END (last_bb)))
6015 last_bb = split_edge (exit_fallthru_edge);
6016 exit_fallthru_edge = single_succ_edge (last_bb);
6018 emit_barrier_after (BB_END (last_bb));
6019 emit_return_into_block (simple_p, last_bb);
6020 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
6021 return last_bb;
6025 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6026 this into place with notes indicating where the prologue ends and where
6027 the epilogue begins. Update the basic block information when possible.
6029 Notes on epilogue placement:
6030 There are several kinds of edges to the exit block:
6031 * a single fallthru edge from LAST_BB
6032 * possibly, edges from blocks containing sibcalls
6033 * possibly, fake edges from infinite loops
6035 The epilogue is always emitted on the fallthru edge from the last basic
6036 block in the function, LAST_BB, into the exit block.
6038 If LAST_BB is empty except for a label, it is the target of every
6039 other basic block in the function that ends in a return. If a
6040 target has a return or simple_return pattern (possibly with
6041 conditional variants), these basic blocks can be changed so that a
6042 return insn is emitted into them, and their target is adjusted to
6043 the real exit block.
6045 Notes on shrink wrapping: We implement a fairly conservative
6046 version of shrink-wrapping rather than the textbook one. We only
6047 generate a single prologue and a single epilogue. This is
6048 sufficient to catch a number of interesting cases involving early
6049 exits.
6051 First, we identify the blocks that require the prologue to occur before
6052 them. These are the ones that modify a call-saved register, or reference
6053 any of the stack or frame pointer registers. To simplify things, we then
6054 mark everything reachable from these blocks as also requiring a prologue.
6055 This takes care of loops automatically, and avoids the need to examine
6056 whether MEMs reference the frame, since it is sufficient to check for
6057 occurrences of the stack or frame pointer.
6059 We then compute the set of blocks for which the need for a prologue
6060 is anticipatable (borrowing terminology from the shrink-wrapping
6061 description in Muchnick's book). These are the blocks which either
6062 require a prologue themselves, or those that have only successors
6063 where the prologue is anticipatable. The prologue needs to be
6064 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6065 is not. For the moment, we ensure that only one such edge exists.
6067 The epilogue is placed as described above, but we make a
6068 distinction between inserting return and simple_return patterns
6069 when modifying other blocks that end in a return. Blocks that end
6070 in a sibcall omit the sibcall_epilogue if the block is not in
6071 ANTIC. */
6073 void
6074 thread_prologue_and_epilogue_insns (void)
6076 bool inserted;
6077 vec<edge> unconverted_simple_returns = vNULL;
6078 bitmap_head bb_flags;
6079 rtx_insn *returnjump;
6080 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
6081 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
6082 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
6083 edge_iterator ei;
6085 df_analyze ();
6087 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6089 inserted = false;
6090 epilogue_end = NULL;
6091 returnjump = NULL;
6093 /* Can't deal with multiple successors of the entry block at the
6094 moment. Function should always have at least one entry
6095 point. */
6096 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6097 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6098 orig_entry_edge = entry_edge;
6100 split_prologue_seq = NULL;
6101 if (flag_split_stack
6102 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
6103 == NULL))
6105 start_sequence ();
6106 emit_insn (targetm.gen_split_stack_prologue ());
6107 split_prologue_seq = get_insns ();
6108 end_sequence ();
6110 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
6111 set_insn_locations (split_prologue_seq, prologue_location);
6114 prologue_seq = NULL;
6115 if (targetm.have_prologue ())
6117 start_sequence ();
6118 rtx_insn *seq = targetm.gen_prologue ();
6119 emit_insn (seq);
6121 /* Insert an explicit USE for the frame pointer
6122 if the profiling is on and the frame pointer is required. */
6123 if (crtl->profile && frame_pointer_needed)
6124 emit_use (hard_frame_pointer_rtx);
6126 /* Retain a map of the prologue insns. */
6127 record_insns (seq, NULL, &prologue_insn_hash);
6128 emit_note (NOTE_INSN_PROLOGUE_END);
6130 /* Ensure that instructions are not moved into the prologue when
6131 profiling is on. The call to the profiling routine can be
6132 emitted within the live range of a call-clobbered register. */
6133 if (!targetm.profile_before_prologue () && crtl->profile)
6134 emit_insn (gen_blockage ());
6136 prologue_seq = get_insns ();
6137 end_sequence ();
6138 set_insn_locations (prologue_seq, prologue_location);
6141 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
6143 /* Try to perform a kind of shrink-wrapping, making sure the
6144 prologue/epilogue is emitted only around those parts of the
6145 function that require it. */
6147 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
6149 if (split_prologue_seq != NULL_RTX)
6151 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6152 inserted = true;
6154 if (prologue_seq != NULL_RTX)
6156 insert_insn_on_edge (prologue_seq, entry_edge);
6157 inserted = true;
6160 /* If the exit block has no non-fake predecessors, we don't need
6161 an epilogue. */
6162 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6163 if ((e->flags & EDGE_FAKE) == 0)
6164 break;
6165 if (e == NULL)
6166 goto epilogue_done;
6168 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6170 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6172 if (targetm.have_simple_return () && entry_edge != orig_entry_edge)
6173 exit_fallthru_edge
6174 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
6175 &unconverted_simple_returns,
6176 &returnjump);
6177 if (targetm.have_return ())
6179 if (exit_fallthru_edge == NULL)
6180 goto epilogue_done;
6182 if (optimize)
6184 basic_block last_bb = exit_fallthru_edge->src;
6186 if (LABEL_P (BB_HEAD (last_bb))
6187 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6188 convert_jumps_to_returns (last_bb, false, vNULL);
6190 if (EDGE_COUNT (last_bb->preds) != 0
6191 && single_succ_p (last_bb))
6193 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6194 epilogue_end = returnjump = BB_END (last_bb);
6196 /* Emitting the return may add a basic block.
6197 Fix bb_flags for the added block. */
6198 if (targetm.have_simple_return ()
6199 && last_bb != exit_fallthru_edge->src)
6200 bitmap_set_bit (&bb_flags, last_bb->index);
6202 goto epilogue_done;
6207 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6208 this marker for the splits of EH_RETURN patterns, and nothing else
6209 uses the flag in the meantime. */
6210 epilogue_completed = 1;
6212 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6213 some targets, these get split to a special version of the epilogue
6214 code. In order to be able to properly annotate these with unwind
6215 info, try to split them now. If we get a valid split, drop an
6216 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6217 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6219 rtx_insn *prev, *last, *trial;
6221 if (e->flags & EDGE_FALLTHRU)
6222 continue;
6223 last = BB_END (e->src);
6224 if (!eh_returnjump_p (last))
6225 continue;
6227 prev = PREV_INSN (last);
6228 trial = try_split (PATTERN (last), last, 1);
6229 if (trial == last)
6230 continue;
6232 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6233 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6236 /* If nothing falls through into the exit block, we don't need an
6237 epilogue. */
6239 if (exit_fallthru_edge == NULL)
6240 goto epilogue_done;
6242 if (targetm.have_epilogue ())
6244 start_sequence ();
6245 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6246 rtx_insn *seq = targetm.gen_epilogue ();
6247 if (seq)
6248 emit_jump_insn (seq);
6250 /* Retain a map of the epilogue insns. */
6251 record_insns (seq, NULL, &epilogue_insn_hash);
6252 set_insn_locations (seq, epilogue_location);
6254 seq = get_insns ();
6255 returnjump = get_last_insn ();
6256 end_sequence ();
6258 insert_insn_on_edge (seq, exit_fallthru_edge);
6259 inserted = true;
6261 if (JUMP_P (returnjump))
6262 set_return_jump_label (returnjump);
6264 else
6266 basic_block cur_bb;
6268 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6269 goto epilogue_done;
6270 /* We have a fall-through edge to the exit block, the source is not
6271 at the end of the function, and there will be an assembler epilogue
6272 at the end of the function.
6273 We can't use force_nonfallthru here, because that would try to
6274 use return. Inserting a jump 'by hand' is extremely messy, so
6275 we take advantage of cfg_layout_finalize using
6276 fixup_fallthru_exit_predecessor. */
6277 cfg_layout_initialize (0);
6278 FOR_EACH_BB_FN (cur_bb, cfun)
6279 if (cur_bb->index >= NUM_FIXED_BLOCKS
6280 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6281 cur_bb->aux = cur_bb->next_bb;
6282 cfg_layout_finalize ();
6285 epilogue_done:
6287 default_rtl_profile ();
6289 if (inserted)
6291 sbitmap blocks;
6293 commit_edge_insertions ();
6295 /* Look for basic blocks within the prologue insns. */
6296 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6297 bitmap_clear (blocks);
6298 bitmap_set_bit (blocks, entry_edge->dest->index);
6299 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6300 find_many_sub_basic_blocks (blocks);
6301 sbitmap_free (blocks);
6303 /* The epilogue insns we inserted may cause the exit edge to no longer
6304 be fallthru. */
6305 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6307 if (((e->flags & EDGE_FALLTHRU) != 0)
6308 && returnjump_p (BB_END (e->src)))
6309 e->flags &= ~EDGE_FALLTHRU;
6313 if (targetm.have_simple_return ())
6314 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6315 returnjump, unconverted_simple_returns);
6317 /* Emit sibling epilogues before any sibling call sites. */
6318 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6319 ei_safe_edge (ei));
6322 basic_block bb = e->src;
6323 rtx_insn *insn = BB_END (bb);
6325 if (!CALL_P (insn)
6326 || ! SIBLING_CALL_P (insn)
6327 || (targetm.have_simple_return ()
6328 && entry_edge != orig_entry_edge
6329 && !bitmap_bit_p (&bb_flags, bb->index)))
6331 ei_next (&ei);
6332 continue;
6335 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6337 start_sequence ();
6338 emit_note (NOTE_INSN_EPILOGUE_BEG);
6339 emit_insn (ep_seq);
6340 rtx_insn *seq = get_insns ();
6341 end_sequence ();
6343 /* Retain a map of the epilogue insns. Used in life analysis to
6344 avoid getting rid of sibcall epilogue insns. Do this before we
6345 actually emit the sequence. */
6346 record_insns (seq, NULL, &epilogue_insn_hash);
6347 set_insn_locations (seq, epilogue_location);
6349 emit_insn_before (seq, insn);
6351 ei_next (&ei);
6354 if (epilogue_end)
6356 rtx_insn *insn, *next;
6358 /* Similarly, move any line notes that appear after the epilogue.
6359 There is no need, however, to be quite so anal about the existence
6360 of such a note. Also possibly move
6361 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6362 info generation. */
6363 for (insn = epilogue_end; insn; insn = next)
6365 next = NEXT_INSN (insn);
6366 if (NOTE_P (insn)
6367 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6368 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6372 bitmap_clear (&bb_flags);
6374 /* Threading the prologue and epilogue changes the artificial refs
6375 in the entry and exit blocks. */
6376 epilogue_completed = 1;
6377 df_update_entry_exit_and_calls ();
6380 /* Reposition the prologue-end and epilogue-begin notes after
6381 instruction scheduling. */
6383 void
6384 reposition_prologue_and_epilogue_notes (void)
6386 if (!targetm.have_prologue ()
6387 && !targetm.have_epilogue ()
6388 && !targetm.have_sibcall_epilogue ())
6389 return;
6391 /* Since the hash table is created on demand, the fact that it is
6392 non-null is a signal that it is non-empty. */
6393 if (prologue_insn_hash != NULL)
6395 size_t len = prologue_insn_hash->elements ();
6396 rtx_insn *insn, *last = NULL, *note = NULL;
6398 /* Scan from the beginning until we reach the last prologue insn. */
6399 /* ??? While we do have the CFG intact, there are two problems:
6400 (1) The prologue can contain loops (typically probing the stack),
6401 which means that the end of the prologue isn't in the first bb.
6402 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6403 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6405 if (NOTE_P (insn))
6407 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6408 note = insn;
6410 else if (contains (insn, prologue_insn_hash))
6412 last = insn;
6413 if (--len == 0)
6414 break;
6418 if (last)
6420 if (note == NULL)
6422 /* Scan forward looking for the PROLOGUE_END note. It should
6423 be right at the beginning of the block, possibly with other
6424 insn notes that got moved there. */
6425 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6427 if (NOTE_P (note)
6428 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6429 break;
6433 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6434 if (LABEL_P (last))
6435 last = NEXT_INSN (last);
6436 reorder_insns (note, note, last);
6440 if (epilogue_insn_hash != NULL)
6442 edge_iterator ei;
6443 edge e;
6445 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6447 rtx_insn *insn, *first = NULL, *note = NULL;
6448 basic_block bb = e->src;
6450 /* Scan from the beginning until we reach the first epilogue insn. */
6451 FOR_BB_INSNS (bb, insn)
6453 if (NOTE_P (insn))
6455 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6457 note = insn;
6458 if (first != NULL)
6459 break;
6462 else if (first == NULL && contains (insn, epilogue_insn_hash))
6464 first = insn;
6465 if (note != NULL)
6466 break;
6470 if (note)
6472 /* If the function has a single basic block, and no real
6473 epilogue insns (e.g. sibcall with no cleanup), the
6474 epilogue note can get scheduled before the prologue
6475 note. If we have frame related prologue insns, having
6476 them scanned during the epilogue will result in a crash.
6477 In this case re-order the epilogue note to just before
6478 the last insn in the block. */
6479 if (first == NULL)
6480 first = BB_END (bb);
6482 if (PREV_INSN (first) != note)
6483 reorder_insns (note, note, PREV_INSN (first));
6489 /* Returns the name of function declared by FNDECL. */
6490 const char *
6491 fndecl_name (tree fndecl)
6493 if (fndecl == NULL)
6494 return "(nofn)";
6495 return lang_hooks.decl_printable_name (fndecl, 2);
6498 /* Returns the name of function FN. */
6499 const char *
6500 function_name (struct function *fn)
6502 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6503 return fndecl_name (fndecl);
6506 /* Returns the name of the current function. */
6507 const char *
6508 current_function_name (void)
6510 return function_name (cfun);
6514 static unsigned int
6515 rest_of_handle_check_leaf_regs (void)
6517 #ifdef LEAF_REGISTERS
6518 crtl->uses_only_leaf_regs
6519 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6520 #endif
6521 return 0;
6524 /* Insert a TYPE into the used types hash table of CFUN. */
6526 static void
6527 used_types_insert_helper (tree type, struct function *func)
6529 if (type != NULL && func != NULL)
6531 if (func->used_types_hash == NULL)
6532 func->used_types_hash = hash_set<tree>::create_ggc (37);
6534 func->used_types_hash->add (type);
6538 /* Given a type, insert it into the used hash table in cfun. */
6539 void
6540 used_types_insert (tree t)
6542 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6543 if (TYPE_NAME (t))
6544 break;
6545 else
6546 t = TREE_TYPE (t);
6547 if (TREE_CODE (t) == ERROR_MARK)
6548 return;
6549 if (TYPE_NAME (t) == NULL_TREE
6550 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6551 t = TYPE_MAIN_VARIANT (t);
6552 if (debug_info_level > DINFO_LEVEL_NONE)
6554 if (cfun)
6555 used_types_insert_helper (t, cfun);
6556 else
6558 /* So this might be a type referenced by a global variable.
6559 Record that type so that we can later decide to emit its
6560 debug information. */
6561 vec_safe_push (types_used_by_cur_var_decl, t);
6566 /* Helper to Hash a struct types_used_by_vars_entry. */
6568 static hashval_t
6569 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6571 gcc_assert (entry && entry->var_decl && entry->type);
6573 return iterative_hash_object (entry->type,
6574 iterative_hash_object (entry->var_decl, 0));
6577 /* Hash function of the types_used_by_vars_entry hash table. */
6579 hashval_t
6580 used_type_hasher::hash (types_used_by_vars_entry *entry)
6582 return hash_types_used_by_vars_entry (entry);
6585 /*Equality function of the types_used_by_vars_entry hash table. */
6587 bool
6588 used_type_hasher::equal (types_used_by_vars_entry *e1,
6589 types_used_by_vars_entry *e2)
6591 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6594 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6596 void
6597 types_used_by_var_decl_insert (tree type, tree var_decl)
6599 if (type != NULL && var_decl != NULL)
6601 types_used_by_vars_entry **slot;
6602 struct types_used_by_vars_entry e;
6603 e.var_decl = var_decl;
6604 e.type = type;
6605 if (types_used_by_vars_hash == NULL)
6606 types_used_by_vars_hash
6607 = hash_table<used_type_hasher>::create_ggc (37);
6609 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6610 if (*slot == NULL)
6612 struct types_used_by_vars_entry *entry;
6613 entry = ggc_alloc<types_used_by_vars_entry> ();
6614 entry->type = type;
6615 entry->var_decl = var_decl;
6616 *slot = entry;
6621 namespace {
6623 const pass_data pass_data_leaf_regs =
6625 RTL_PASS, /* type */
6626 "*leaf_regs", /* name */
6627 OPTGROUP_NONE, /* optinfo_flags */
6628 TV_NONE, /* tv_id */
6629 0, /* properties_required */
6630 0, /* properties_provided */
6631 0, /* properties_destroyed */
6632 0, /* todo_flags_start */
6633 0, /* todo_flags_finish */
6636 class pass_leaf_regs : public rtl_opt_pass
6638 public:
6639 pass_leaf_regs (gcc::context *ctxt)
6640 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6643 /* opt_pass methods: */
6644 virtual unsigned int execute (function *)
6646 return rest_of_handle_check_leaf_regs ();
6649 }; // class pass_leaf_regs
6651 } // anon namespace
6653 rtl_opt_pass *
6654 make_pass_leaf_regs (gcc::context *ctxt)
6656 return new pass_leaf_regs (ctxt);
6659 static unsigned int
6660 rest_of_handle_thread_prologue_and_epilogue (void)
6662 if (optimize)
6663 cleanup_cfg (CLEANUP_EXPENSIVE);
6665 /* On some machines, the prologue and epilogue code, or parts thereof,
6666 can be represented as RTL. Doing so lets us schedule insns between
6667 it and the rest of the code and also allows delayed branch
6668 scheduling to operate in the epilogue. */
6669 thread_prologue_and_epilogue_insns ();
6671 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6672 see PR57320. */
6673 cleanup_cfg (0);
6675 /* The stack usage info is finalized during prologue expansion. */
6676 if (flag_stack_usage_info)
6677 output_stack_usage ();
6679 return 0;
6682 namespace {
6684 const pass_data pass_data_thread_prologue_and_epilogue =
6686 RTL_PASS, /* type */
6687 "pro_and_epilogue", /* name */
6688 OPTGROUP_NONE, /* optinfo_flags */
6689 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6690 0, /* properties_required */
6691 0, /* properties_provided */
6692 0, /* properties_destroyed */
6693 0, /* todo_flags_start */
6694 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6697 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6699 public:
6700 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6701 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6704 /* opt_pass methods: */
6705 virtual unsigned int execute (function *)
6707 return rest_of_handle_thread_prologue_and_epilogue ();
6710 }; // class pass_thread_prologue_and_epilogue
6712 } // anon namespace
6714 rtl_opt_pass *
6715 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6717 return new pass_thread_prologue_and_epilogue (ctxt);
6721 /* This mini-pass fixes fall-out from SSA in asm statements that have
6722 in-out constraints. Say you start with
6724 orig = inout;
6725 asm ("": "+mr" (inout));
6726 use (orig);
6728 which is transformed very early to use explicit output and match operands:
6730 orig = inout;
6731 asm ("": "=mr" (inout) : "0" (inout));
6732 use (orig);
6734 Or, after SSA and copyprop,
6736 asm ("": "=mr" (inout_2) : "0" (inout_1));
6737 use (inout_1);
6739 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6740 they represent two separate values, so they will get different pseudo
6741 registers during expansion. Then, since the two operands need to match
6742 per the constraints, but use different pseudo registers, reload can
6743 only register a reload for these operands. But reloads can only be
6744 satisfied by hardregs, not by memory, so we need a register for this
6745 reload, just because we are presented with non-matching operands.
6746 So, even though we allow memory for this operand, no memory can be
6747 used for it, just because the two operands don't match. This can
6748 cause reload failures on register-starved targets.
6750 So it's a symptom of reload not being able to use memory for reloads
6751 or, alternatively it's also a symptom of both operands not coming into
6752 reload as matching (in which case the pseudo could go to memory just
6753 fine, as the alternative allows it, and no reload would be necessary).
6754 We fix the latter problem here, by transforming
6756 asm ("": "=mr" (inout_2) : "0" (inout_1));
6758 back to
6760 inout_2 = inout_1;
6761 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6763 static void
6764 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6766 int i;
6767 bool changed = false;
6768 rtx op = SET_SRC (p_sets[0]);
6769 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6770 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6771 bool *output_matched = XALLOCAVEC (bool, noutputs);
6773 memset (output_matched, 0, noutputs * sizeof (bool));
6774 for (i = 0; i < ninputs; i++)
6776 rtx input, output;
6777 rtx_insn *insns;
6778 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6779 char *end;
6780 int match, j;
6782 if (*constraint == '%')
6783 constraint++;
6785 match = strtoul (constraint, &end, 10);
6786 if (end == constraint)
6787 continue;
6789 gcc_assert (match < noutputs);
6790 output = SET_DEST (p_sets[match]);
6791 input = RTVEC_ELT (inputs, i);
6792 /* Only do the transformation for pseudos. */
6793 if (! REG_P (output)
6794 || rtx_equal_p (output, input)
6795 || (GET_MODE (input) != VOIDmode
6796 && GET_MODE (input) != GET_MODE (output)))
6797 continue;
6799 /* We can't do anything if the output is also used as input,
6800 as we're going to overwrite it. */
6801 for (j = 0; j < ninputs; j++)
6802 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6803 break;
6804 if (j != ninputs)
6805 continue;
6807 /* Avoid changing the same input several times. For
6808 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6809 only change in once (to out1), rather than changing it
6810 first to out1 and afterwards to out2. */
6811 if (i > 0)
6813 for (j = 0; j < noutputs; j++)
6814 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6815 break;
6816 if (j != noutputs)
6817 continue;
6819 output_matched[match] = true;
6821 start_sequence ();
6822 emit_move_insn (output, input);
6823 insns = get_insns ();
6824 end_sequence ();
6825 emit_insn_before (insns, insn);
6827 /* Now replace all mentions of the input with output. We can't
6828 just replace the occurrence in inputs[i], as the register might
6829 also be used in some other input (or even in an address of an
6830 output), which would mean possibly increasing the number of
6831 inputs by one (namely 'output' in addition), which might pose
6832 a too complicated problem for reload to solve. E.g. this situation:
6834 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6836 Here 'input' is used in two occurrences as input (once for the
6837 input operand, once for the address in the second output operand).
6838 If we would replace only the occurrence of the input operand (to
6839 make the matching) we would be left with this:
6841 output = input
6842 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6844 Now we suddenly have two different input values (containing the same
6845 value, but different pseudos) where we formerly had only one.
6846 With more complicated asms this might lead to reload failures
6847 which wouldn't have happen without this pass. So, iterate over
6848 all operands and replace all occurrences of the register used. */
6849 for (j = 0; j < noutputs; j++)
6850 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6851 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6852 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6853 input, output);
6854 for (j = 0; j < ninputs; j++)
6855 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6856 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6857 input, output);
6859 changed = true;
6862 if (changed)
6863 df_insn_rescan (insn);
6866 /* Add the decl D to the local_decls list of FUN. */
6868 void
6869 add_local_decl (struct function *fun, tree d)
6871 gcc_assert (TREE_CODE (d) == VAR_DECL);
6872 vec_safe_push (fun->local_decls, d);
6875 namespace {
6877 const pass_data pass_data_match_asm_constraints =
6879 RTL_PASS, /* type */
6880 "asmcons", /* name */
6881 OPTGROUP_NONE, /* optinfo_flags */
6882 TV_NONE, /* tv_id */
6883 0, /* properties_required */
6884 0, /* properties_provided */
6885 0, /* properties_destroyed */
6886 0, /* todo_flags_start */
6887 0, /* todo_flags_finish */
6890 class pass_match_asm_constraints : public rtl_opt_pass
6892 public:
6893 pass_match_asm_constraints (gcc::context *ctxt)
6894 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6897 /* opt_pass methods: */
6898 virtual unsigned int execute (function *);
6900 }; // class pass_match_asm_constraints
6902 unsigned
6903 pass_match_asm_constraints::execute (function *fun)
6905 basic_block bb;
6906 rtx_insn *insn;
6907 rtx pat, *p_sets;
6908 int noutputs;
6910 if (!crtl->has_asm_statement)
6911 return 0;
6913 df_set_flags (DF_DEFER_INSN_RESCAN);
6914 FOR_EACH_BB_FN (bb, fun)
6916 FOR_BB_INSNS (bb, insn)
6918 if (!INSN_P (insn))
6919 continue;
6921 pat = PATTERN (insn);
6922 if (GET_CODE (pat) == PARALLEL)
6923 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6924 else if (GET_CODE (pat) == SET)
6925 p_sets = &PATTERN (insn), noutputs = 1;
6926 else
6927 continue;
6929 if (GET_CODE (*p_sets) == SET
6930 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6931 match_asm_constraints_1 (insn, p_sets, noutputs);
6935 return TODO_df_finish;
6938 } // anon namespace
6940 rtl_opt_pass *
6941 make_pass_match_asm_constraints (gcc::context *ctxt)
6943 return new pass_match_asm_constraints (ctxt);
6947 #include "gt-function.h"