2015-09-25 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / function.c
blob27feb3bee8b805e321dc964c9425cf56a2f58ade
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "cfghooks.h"
39 #include "tree.h"
40 #include "rtl.h"
41 #include "df.h"
42 #include "rtl-error.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "stor-layout.h"
46 #include "varasm.h"
47 #include "stringpool.h"
48 #include "flags.h"
49 #include "except.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "insn-codes.h"
59 #include "optabs-tree.h"
60 #include "optabs.h"
61 #include "libfuncs.h"
62 #include "regs.h"
63 #include "recog.h"
64 #include "output.h"
65 #include "tm_p.h"
66 #include "langhooks.h"
67 #include "target.h"
68 #include "common/common-target.h"
69 #include "gimple-expr.h"
70 #include "gimplify.h"
71 #include "tree-pass.h"
72 #include "cfgrtl.h"
73 #include "cfganal.h"
74 #include "cfgbuild.h"
75 #include "cfgcleanup.h"
76 #include "cfgexpand.h"
77 #include "basic-block.h"
78 #include "df.h"
79 #include "params.h"
80 #include "bb-reorder.h"
81 #include "shrink-wrap.h"
82 #include "toplev.h"
83 #include "rtl-iter.h"
84 #include "tree-chkp.h"
85 #include "rtl-chkp.h"
87 /* So we can assign to cfun in this file. */
88 #undef cfun
90 #ifndef STACK_ALIGNMENT_NEEDED
91 #define STACK_ALIGNMENT_NEEDED 1
92 #endif
94 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
96 /* Round a value to the lowest integer less than it that is a multiple of
97 the required alignment. Avoid using division in case the value is
98 negative. Assume the alignment is a power of two. */
99 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
101 /* Similar, but round to the next highest integer that meets the
102 alignment. */
103 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
105 /* Nonzero once virtual register instantiation has been done.
106 assign_stack_local uses frame_pointer_rtx when this is nonzero.
107 calls.c:emit_library_call_value_1 uses it to set up
108 post-instantiation libcalls. */
109 int virtuals_instantiated;
111 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
112 static GTY(()) int funcdef_no;
114 /* These variables hold pointers to functions to create and destroy
115 target specific, per-function data structures. */
116 struct machine_function * (*init_machine_status) (void);
118 /* The currently compiled function. */
119 struct function *cfun = 0;
121 /* These hashes record the prologue and epilogue insns. */
123 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
125 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
126 static bool equal (rtx a, rtx b) { return a == b; }
129 static GTY((cache))
130 hash_table<insn_cache_hasher> *prologue_insn_hash;
131 static GTY((cache))
132 hash_table<insn_cache_hasher> *epilogue_insn_hash;
135 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
136 vec<tree, va_gc> *types_used_by_cur_var_decl;
138 /* Forward declarations. */
140 static struct temp_slot *find_temp_slot_from_address (rtx);
141 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
142 static void pad_below (struct args_size *, machine_mode, tree);
143 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
144 static int all_blocks (tree, tree *);
145 static tree *get_block_vector (tree, int *);
146 extern tree debug_find_var_in_block_tree (tree, tree);
147 /* We always define `record_insns' even if it's not used so that we
148 can always export `prologue_epilogue_contains'. */
149 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
150 ATTRIBUTE_UNUSED;
151 static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx, void *);
154 static void do_use_return_reg (rtx, void *);
155 static rtx rtl_for_parm (struct assign_parm_data_all *, tree);
156 static void maybe_reset_rtl_for_parm (tree);
157 static bool parm_in_unassigned_mem_p (tree, rtx);
160 /* Stack of nested functions. */
161 /* Keep track of the cfun stack. */
163 typedef struct function *function_p;
165 static vec<function_p> function_context_stack;
167 /* Save the current context for compilation of a nested function.
168 This is called from language-specific code. */
170 void
171 push_function_context (void)
173 if (cfun == 0)
174 allocate_struct_function (NULL, false);
176 function_context_stack.safe_push (cfun);
177 set_cfun (NULL);
180 /* Restore the last saved context, at the end of a nested function.
181 This function is called from language-specific code. */
183 void
184 pop_function_context (void)
186 struct function *p = function_context_stack.pop ();
187 set_cfun (p);
188 current_function_decl = p->decl;
190 /* Reset variables that have known state during rtx generation. */
191 virtuals_instantiated = 0;
192 generating_concat_p = 1;
195 /* Clear out all parts of the state in F that can safely be discarded
196 after the function has been parsed, but not compiled, to let
197 garbage collection reclaim the memory. */
199 void
200 free_after_parsing (struct function *f)
202 f->language = 0;
205 /* Clear out all parts of the state in F that can safely be discarded
206 after the function has been compiled, to let garbage collection
207 reclaim the memory. */
209 void
210 free_after_compilation (struct function *f)
212 prologue_insn_hash = NULL;
213 epilogue_insn_hash = NULL;
215 free (crtl->emit.regno_pointer_align);
217 memset (crtl, 0, sizeof (struct rtl_data));
218 f->eh = NULL;
219 f->machine = NULL;
220 f->cfg = NULL;
221 f->curr_properties &= ~PROP_cfg;
223 regno_reg_rtx = NULL;
226 /* Return size needed for stack frame based on slots so far allocated.
227 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 the caller may have to do that. */
230 HOST_WIDE_INT
231 get_frame_size (void)
233 if (FRAME_GROWS_DOWNWARD)
234 return -frame_offset;
235 else
236 return frame_offset;
239 /* Issue an error message and return TRUE if frame OFFSET overflows in
240 the signed target pointer arithmetics for function FUNC. Otherwise
241 return FALSE. */
243 bool
244 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
246 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
248 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
249 /* Leave room for the fixed part of the frame. */
250 - 64 * UNITS_PER_WORD)
252 error_at (DECL_SOURCE_LOCATION (func),
253 "total size of local objects too large");
254 return TRUE;
257 return FALSE;
260 /* Return stack slot alignment in bits for TYPE and MODE. */
262 static unsigned int
263 get_stack_local_alignment (tree type, machine_mode mode)
265 unsigned int alignment;
267 if (mode == BLKmode)
268 alignment = BIGGEST_ALIGNMENT;
269 else
270 alignment = GET_MODE_ALIGNMENT (mode);
272 /* Allow the frond-end to (possibly) increase the alignment of this
273 stack slot. */
274 if (! type)
275 type = lang_hooks.types.type_for_mode (mode, 0);
277 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
280 /* Determine whether it is possible to fit a stack slot of size SIZE and
281 alignment ALIGNMENT into an area in the stack frame that starts at
282 frame offset START and has a length of LENGTH. If so, store the frame
283 offset to be used for the stack slot in *POFFSET and return true;
284 return false otherwise. This function will extend the frame size when
285 given a start/length pair that lies at the end of the frame. */
287 static bool
288 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
289 HOST_WIDE_INT size, unsigned int alignment,
290 HOST_WIDE_INT *poffset)
292 HOST_WIDE_INT this_frame_offset;
293 int frame_off, frame_alignment, frame_phase;
295 /* Calculate how many bytes the start of local variables is off from
296 stack alignment. */
297 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
298 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
299 frame_phase = frame_off ? frame_alignment - frame_off : 0;
301 /* Round the frame offset to the specified alignment. */
303 /* We must be careful here, since FRAME_OFFSET might be negative and
304 division with a negative dividend isn't as well defined as we might
305 like. So we instead assume that ALIGNMENT is a power of two and
306 use logical operations which are unambiguous. */
307 if (FRAME_GROWS_DOWNWARD)
308 this_frame_offset
309 = (FLOOR_ROUND (start + length - size - frame_phase,
310 (unsigned HOST_WIDE_INT) alignment)
311 + frame_phase);
312 else
313 this_frame_offset
314 = (CEIL_ROUND (start - frame_phase,
315 (unsigned HOST_WIDE_INT) alignment)
316 + frame_phase);
318 /* See if it fits. If this space is at the edge of the frame,
319 consider extending the frame to make it fit. Our caller relies on
320 this when allocating a new slot. */
321 if (frame_offset == start && this_frame_offset < frame_offset)
322 frame_offset = this_frame_offset;
323 else if (this_frame_offset < start)
324 return false;
325 else if (start + length == frame_offset
326 && this_frame_offset + size > start + length)
327 frame_offset = this_frame_offset + size;
328 else if (this_frame_offset + size > start + length)
329 return false;
331 *poffset = this_frame_offset;
332 return true;
335 /* Create a new frame_space structure describing free space in the stack
336 frame beginning at START and ending at END, and chain it into the
337 function's frame_space_list. */
339 static void
340 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
342 struct frame_space *space = ggc_alloc<frame_space> ();
343 space->next = crtl->frame_space_list;
344 crtl->frame_space_list = space;
345 space->start = start;
346 space->length = end - start;
349 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
350 with machine mode MODE.
352 ALIGN controls the amount of alignment for the address of the slot:
353 0 means according to MODE,
354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
355 -2 means use BITS_PER_UNIT,
356 positive specifies alignment boundary in bits.
358 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
359 alignment and ASLK_RECORD_PAD bit set if we should remember
360 extra space we allocated for alignment purposes. When we are
361 called from assign_stack_temp_for_type, it is not set so we don't
362 track the same stack slot in two independent lists.
364 We do not round to stack_boundary here. */
367 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
368 int align, int kind)
370 rtx x, addr;
371 int bigend_correction = 0;
372 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
373 unsigned int alignment, alignment_in_bits;
375 if (align == 0)
377 alignment = get_stack_local_alignment (NULL, mode);
378 alignment /= BITS_PER_UNIT;
380 else if (align == -1)
382 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
383 size = CEIL_ROUND (size, alignment);
385 else if (align == -2)
386 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
387 else
388 alignment = align / BITS_PER_UNIT;
390 alignment_in_bits = alignment * BITS_PER_UNIT;
392 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
393 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
395 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
396 alignment = alignment_in_bits / BITS_PER_UNIT;
399 if (SUPPORTS_STACK_ALIGNMENT)
401 if (crtl->stack_alignment_estimated < alignment_in_bits)
403 if (!crtl->stack_realign_processed)
404 crtl->stack_alignment_estimated = alignment_in_bits;
405 else
407 /* If stack is realigned and stack alignment value
408 hasn't been finalized, it is OK not to increase
409 stack_alignment_estimated. The bigger alignment
410 requirement is recorded in stack_alignment_needed
411 below. */
412 gcc_assert (!crtl->stack_realign_finalized);
413 if (!crtl->stack_realign_needed)
415 /* It is OK to reduce the alignment as long as the
416 requested size is 0 or the estimated stack
417 alignment >= mode alignment. */
418 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
419 || size == 0
420 || (crtl->stack_alignment_estimated
421 >= GET_MODE_ALIGNMENT (mode)));
422 alignment_in_bits = crtl->stack_alignment_estimated;
423 alignment = alignment_in_bits / BITS_PER_UNIT;
429 if (crtl->stack_alignment_needed < alignment_in_bits)
430 crtl->stack_alignment_needed = alignment_in_bits;
431 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
432 crtl->max_used_stack_slot_alignment = alignment_in_bits;
434 if (mode != BLKmode || size != 0)
436 if (kind & ASLK_RECORD_PAD)
438 struct frame_space **psp;
440 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
442 struct frame_space *space = *psp;
443 if (!try_fit_stack_local (space->start, space->length, size,
444 alignment, &slot_offset))
445 continue;
446 *psp = space->next;
447 if (slot_offset > space->start)
448 add_frame_space (space->start, slot_offset);
449 if (slot_offset + size < space->start + space->length)
450 add_frame_space (slot_offset + size,
451 space->start + space->length);
452 goto found_space;
456 else if (!STACK_ALIGNMENT_NEEDED)
458 slot_offset = frame_offset;
459 goto found_space;
462 old_frame_offset = frame_offset;
464 if (FRAME_GROWS_DOWNWARD)
466 frame_offset -= size;
467 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
469 if (kind & ASLK_RECORD_PAD)
471 if (slot_offset > frame_offset)
472 add_frame_space (frame_offset, slot_offset);
473 if (slot_offset + size < old_frame_offset)
474 add_frame_space (slot_offset + size, old_frame_offset);
477 else
479 frame_offset += size;
480 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
482 if (kind & ASLK_RECORD_PAD)
484 if (slot_offset > old_frame_offset)
485 add_frame_space (old_frame_offset, slot_offset);
486 if (slot_offset + size < frame_offset)
487 add_frame_space (slot_offset + size, frame_offset);
491 found_space:
492 /* On a big-endian machine, if we are allocating more space than we will use,
493 use the least significant bytes of those that are allocated. */
494 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
495 bigend_correction = size - GET_MODE_SIZE (mode);
497 /* If we have already instantiated virtual registers, return the actual
498 address relative to the frame pointer. */
499 if (virtuals_instantiated)
500 addr = plus_constant (Pmode, frame_pointer_rtx,
501 trunc_int_for_mode
502 (slot_offset + bigend_correction
503 + STARTING_FRAME_OFFSET, Pmode));
504 else
505 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
506 trunc_int_for_mode
507 (slot_offset + bigend_correction,
508 Pmode));
510 x = gen_rtx_MEM (mode, addr);
511 set_mem_align (x, alignment_in_bits);
512 MEM_NOTRAP_P (x) = 1;
514 stack_slot_list
515 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
517 if (frame_offset_overflow (frame_offset, current_function_decl))
518 frame_offset = 0;
520 return x;
523 /* Wrap up assign_stack_local_1 with last parameter as false. */
526 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
528 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
531 /* In order to evaluate some expressions, such as function calls returning
532 structures in memory, we need to temporarily allocate stack locations.
533 We record each allocated temporary in the following structure.
535 Associated with each temporary slot is a nesting level. When we pop up
536 one level, all temporaries associated with the previous level are freed.
537 Normally, all temporaries are freed after the execution of the statement
538 in which they were created. However, if we are inside a ({...}) grouping,
539 the result may be in a temporary and hence must be preserved. If the
540 result could be in a temporary, we preserve it if we can determine which
541 one it is in. If we cannot determine which temporary may contain the
542 result, all temporaries are preserved. A temporary is preserved by
543 pretending it was allocated at the previous nesting level. */
545 struct GTY(()) temp_slot {
546 /* Points to next temporary slot. */
547 struct temp_slot *next;
548 /* Points to previous temporary slot. */
549 struct temp_slot *prev;
550 /* The rtx to used to reference the slot. */
551 rtx slot;
552 /* The size, in units, of the slot. */
553 HOST_WIDE_INT size;
554 /* The type of the object in the slot, or zero if it doesn't correspond
555 to a type. We use this to determine whether a slot can be reused.
556 It can be reused if objects of the type of the new slot will always
557 conflict with objects of the type of the old slot. */
558 tree type;
559 /* The alignment (in bits) of the slot. */
560 unsigned int align;
561 /* Nonzero if this temporary is currently in use. */
562 char in_use;
563 /* Nesting level at which this slot is being used. */
564 int level;
565 /* The offset of the slot from the frame_pointer, including extra space
566 for alignment. This info is for combine_temp_slots. */
567 HOST_WIDE_INT base_offset;
568 /* The size of the slot, including extra space for alignment. This
569 info is for combine_temp_slots. */
570 HOST_WIDE_INT full_size;
573 /* Entry for the below hash table. */
574 struct GTY((for_user)) temp_slot_address_entry {
575 hashval_t hash;
576 rtx address;
577 struct temp_slot *temp_slot;
580 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
582 static hashval_t hash (temp_slot_address_entry *);
583 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
586 /* A table of addresses that represent a stack slot. The table is a mapping
587 from address RTXen to a temp slot. */
588 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
589 static size_t n_temp_slots_in_use;
591 /* Removes temporary slot TEMP from LIST. */
593 static void
594 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
596 if (temp->next)
597 temp->next->prev = temp->prev;
598 if (temp->prev)
599 temp->prev->next = temp->next;
600 else
601 *list = temp->next;
603 temp->prev = temp->next = NULL;
606 /* Inserts temporary slot TEMP to LIST. */
608 static void
609 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
611 temp->next = *list;
612 if (*list)
613 (*list)->prev = temp;
614 temp->prev = NULL;
615 *list = temp;
618 /* Returns the list of used temp slots at LEVEL. */
620 static struct temp_slot **
621 temp_slots_at_level (int level)
623 if (level >= (int) vec_safe_length (used_temp_slots))
624 vec_safe_grow_cleared (used_temp_slots, level + 1);
626 return &(*used_temp_slots)[level];
629 /* Returns the maximal temporary slot level. */
631 static int
632 max_slot_level (void)
634 if (!used_temp_slots)
635 return -1;
637 return used_temp_slots->length () - 1;
640 /* Moves temporary slot TEMP to LEVEL. */
642 static void
643 move_slot_to_level (struct temp_slot *temp, int level)
645 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
646 insert_slot_to_list (temp, temp_slots_at_level (level));
647 temp->level = level;
650 /* Make temporary slot TEMP available. */
652 static void
653 make_slot_available (struct temp_slot *temp)
655 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
656 insert_slot_to_list (temp, &avail_temp_slots);
657 temp->in_use = 0;
658 temp->level = -1;
659 n_temp_slots_in_use--;
662 /* Compute the hash value for an address -> temp slot mapping.
663 The value is cached on the mapping entry. */
664 static hashval_t
665 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
667 int do_not_record = 0;
668 return hash_rtx (t->address, GET_MODE (t->address),
669 &do_not_record, NULL, false);
672 /* Return the hash value for an address -> temp slot mapping. */
673 hashval_t
674 temp_address_hasher::hash (temp_slot_address_entry *t)
676 return t->hash;
679 /* Compare two address -> temp slot mapping entries. */
680 bool
681 temp_address_hasher::equal (temp_slot_address_entry *t1,
682 temp_slot_address_entry *t2)
684 return exp_equiv_p (t1->address, t2->address, 0, true);
687 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
688 static void
689 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
691 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
692 t->address = address;
693 t->temp_slot = temp_slot;
694 t->hash = temp_slot_address_compute_hash (t);
695 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
698 /* Remove an address -> temp slot mapping entry if the temp slot is
699 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
701 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
703 const struct temp_slot_address_entry *t = *slot;
704 if (! t->temp_slot->in_use)
705 temp_slot_address_table->clear_slot (slot);
706 return 1;
709 /* Remove all mappings of addresses to unused temp slots. */
710 static void
711 remove_unused_temp_slot_addresses (void)
713 /* Use quicker clearing if there aren't any active temp slots. */
714 if (n_temp_slots_in_use)
715 temp_slot_address_table->traverse
716 <void *, remove_unused_temp_slot_addresses_1> (NULL);
717 else
718 temp_slot_address_table->empty ();
721 /* Find the temp slot corresponding to the object at address X. */
723 static struct temp_slot *
724 find_temp_slot_from_address (rtx x)
726 struct temp_slot *p;
727 struct temp_slot_address_entry tmp, *t;
729 /* First try the easy way:
730 See if X exists in the address -> temp slot mapping. */
731 tmp.address = x;
732 tmp.temp_slot = NULL;
733 tmp.hash = temp_slot_address_compute_hash (&tmp);
734 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
735 if (t)
736 return t->temp_slot;
738 /* If we have a sum involving a register, see if it points to a temp
739 slot. */
740 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
741 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
742 return p;
743 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
744 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
745 return p;
747 /* Last resort: Address is a virtual stack var address. */
748 if (GET_CODE (x) == PLUS
749 && XEXP (x, 0) == virtual_stack_vars_rtx
750 && CONST_INT_P (XEXP (x, 1)))
752 int i;
753 for (i = max_slot_level (); i >= 0; i--)
754 for (p = *temp_slots_at_level (i); p; p = p->next)
756 if (INTVAL (XEXP (x, 1)) >= p->base_offset
757 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
758 return p;
762 return NULL;
765 /* Allocate a temporary stack slot and record it for possible later
766 reuse.
768 MODE is the machine mode to be given to the returned rtx.
770 SIZE is the size in units of the space required. We do no rounding here
771 since assign_stack_local will do any required rounding.
773 TYPE is the type that will be used for the stack slot. */
776 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
777 tree type)
779 unsigned int align;
780 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
781 rtx slot;
783 /* If SIZE is -1 it means that somebody tried to allocate a temporary
784 of a variable size. */
785 gcc_assert (size != -1);
787 align = get_stack_local_alignment (type, mode);
789 /* Try to find an available, already-allocated temporary of the proper
790 mode which meets the size and alignment requirements. Choose the
791 smallest one with the closest alignment.
793 If assign_stack_temp is called outside of the tree->rtl expansion,
794 we cannot reuse the stack slots (that may still refer to
795 VIRTUAL_STACK_VARS_REGNUM). */
796 if (!virtuals_instantiated)
798 for (p = avail_temp_slots; p; p = p->next)
800 if (p->align >= align && p->size >= size
801 && GET_MODE (p->slot) == mode
802 && objects_must_conflict_p (p->type, type)
803 && (best_p == 0 || best_p->size > p->size
804 || (best_p->size == p->size && best_p->align > p->align)))
806 if (p->align == align && p->size == size)
808 selected = p;
809 cut_slot_from_list (selected, &avail_temp_slots);
810 best_p = 0;
811 break;
813 best_p = p;
818 /* Make our best, if any, the one to use. */
819 if (best_p)
821 selected = best_p;
822 cut_slot_from_list (selected, &avail_temp_slots);
824 /* If there are enough aligned bytes left over, make them into a new
825 temp_slot so that the extra bytes don't get wasted. Do this only
826 for BLKmode slots, so that we can be sure of the alignment. */
827 if (GET_MODE (best_p->slot) == BLKmode)
829 int alignment = best_p->align / BITS_PER_UNIT;
830 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
832 if (best_p->size - rounded_size >= alignment)
834 p = ggc_alloc<temp_slot> ();
835 p->in_use = 0;
836 p->size = best_p->size - rounded_size;
837 p->base_offset = best_p->base_offset + rounded_size;
838 p->full_size = best_p->full_size - rounded_size;
839 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
840 p->align = best_p->align;
841 p->type = best_p->type;
842 insert_slot_to_list (p, &avail_temp_slots);
844 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
845 stack_slot_list);
847 best_p->size = rounded_size;
848 best_p->full_size = rounded_size;
853 /* If we still didn't find one, make a new temporary. */
854 if (selected == 0)
856 HOST_WIDE_INT frame_offset_old = frame_offset;
858 p = ggc_alloc<temp_slot> ();
860 /* We are passing an explicit alignment request to assign_stack_local.
861 One side effect of that is assign_stack_local will not round SIZE
862 to ensure the frame offset remains suitably aligned.
864 So for requests which depended on the rounding of SIZE, we go ahead
865 and round it now. We also make sure ALIGNMENT is at least
866 BIGGEST_ALIGNMENT. */
867 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
868 p->slot = assign_stack_local_1 (mode,
869 (mode == BLKmode
870 ? CEIL_ROUND (size,
871 (int) align
872 / BITS_PER_UNIT)
873 : size),
874 align, 0);
876 p->align = align;
878 /* The following slot size computation is necessary because we don't
879 know the actual size of the temporary slot until assign_stack_local
880 has performed all the frame alignment and size rounding for the
881 requested temporary. Note that extra space added for alignment
882 can be either above or below this stack slot depending on which
883 way the frame grows. We include the extra space if and only if it
884 is above this slot. */
885 if (FRAME_GROWS_DOWNWARD)
886 p->size = frame_offset_old - frame_offset;
887 else
888 p->size = size;
890 /* Now define the fields used by combine_temp_slots. */
891 if (FRAME_GROWS_DOWNWARD)
893 p->base_offset = frame_offset;
894 p->full_size = frame_offset_old - frame_offset;
896 else
898 p->base_offset = frame_offset_old;
899 p->full_size = frame_offset - frame_offset_old;
902 selected = p;
905 p = selected;
906 p->in_use = 1;
907 p->type = type;
908 p->level = temp_slot_level;
909 n_temp_slots_in_use++;
911 pp = temp_slots_at_level (p->level);
912 insert_slot_to_list (p, pp);
913 insert_temp_slot_address (XEXP (p->slot, 0), p);
915 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
916 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
917 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
919 /* If we know the alias set for the memory that will be used, use
920 it. If there's no TYPE, then we don't know anything about the
921 alias set for the memory. */
922 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
923 set_mem_align (slot, align);
925 /* If a type is specified, set the relevant flags. */
926 if (type != 0)
927 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
928 MEM_NOTRAP_P (slot) = 1;
930 return slot;
933 /* Allocate a temporary stack slot and record it for possible later
934 reuse. First two arguments are same as in preceding function. */
937 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
939 return assign_stack_temp_for_type (mode, size, NULL_TREE);
942 /* Assign a temporary.
943 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
944 and so that should be used in error messages. In either case, we
945 allocate of the given type.
946 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
947 it is 0 if a register is OK.
948 DONT_PROMOTE is 1 if we should not promote values in register
949 to wider modes. */
952 assign_temp (tree type_or_decl, int memory_required,
953 int dont_promote ATTRIBUTE_UNUSED)
955 tree type, decl;
956 machine_mode mode;
957 #ifdef PROMOTE_MODE
958 int unsignedp;
959 #endif
961 if (DECL_P (type_or_decl))
962 decl = type_or_decl, type = TREE_TYPE (decl);
963 else
964 decl = NULL, type = type_or_decl;
966 mode = TYPE_MODE (type);
967 #ifdef PROMOTE_MODE
968 unsignedp = TYPE_UNSIGNED (type);
969 #endif
971 if (mode == BLKmode || memory_required)
973 HOST_WIDE_INT size = int_size_in_bytes (type);
974 rtx tmp;
976 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
977 problems with allocating the stack space. */
978 if (size == 0)
979 size = 1;
981 /* Unfortunately, we don't yet know how to allocate variable-sized
982 temporaries. However, sometimes we can find a fixed upper limit on
983 the size, so try that instead. */
984 else if (size == -1)
985 size = max_int_size_in_bytes (type);
987 /* The size of the temporary may be too large to fit into an integer. */
988 /* ??? Not sure this should happen except for user silliness, so limit
989 this to things that aren't compiler-generated temporaries. The
990 rest of the time we'll die in assign_stack_temp_for_type. */
991 if (decl && size == -1
992 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
994 error ("size of variable %q+D is too large", decl);
995 size = 1;
998 tmp = assign_stack_temp_for_type (mode, size, type);
999 return tmp;
1002 #ifdef PROMOTE_MODE
1003 if (! dont_promote)
1004 mode = promote_mode (type, mode, &unsignedp);
1005 #endif
1007 return gen_reg_rtx (mode);
1010 /* Combine temporary stack slots which are adjacent on the stack.
1012 This allows for better use of already allocated stack space. This is only
1013 done for BLKmode slots because we can be sure that we won't have alignment
1014 problems in this case. */
1016 static void
1017 combine_temp_slots (void)
1019 struct temp_slot *p, *q, *next, *next_q;
1020 int num_slots;
1022 /* We can't combine slots, because the information about which slot
1023 is in which alias set will be lost. */
1024 if (flag_strict_aliasing)
1025 return;
1027 /* If there are a lot of temp slots, don't do anything unless
1028 high levels of optimization. */
1029 if (! flag_expensive_optimizations)
1030 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1031 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1032 return;
1034 for (p = avail_temp_slots; p; p = next)
1036 int delete_p = 0;
1038 next = p->next;
1040 if (GET_MODE (p->slot) != BLKmode)
1041 continue;
1043 for (q = p->next; q; q = next_q)
1045 int delete_q = 0;
1047 next_q = q->next;
1049 if (GET_MODE (q->slot) != BLKmode)
1050 continue;
1052 if (p->base_offset + p->full_size == q->base_offset)
1054 /* Q comes after P; combine Q into P. */
1055 p->size += q->size;
1056 p->full_size += q->full_size;
1057 delete_q = 1;
1059 else if (q->base_offset + q->full_size == p->base_offset)
1061 /* P comes after Q; combine P into Q. */
1062 q->size += p->size;
1063 q->full_size += p->full_size;
1064 delete_p = 1;
1065 break;
1067 if (delete_q)
1068 cut_slot_from_list (q, &avail_temp_slots);
1071 /* Either delete P or advance past it. */
1072 if (delete_p)
1073 cut_slot_from_list (p, &avail_temp_slots);
1077 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1078 slot that previously was known by OLD_RTX. */
1080 void
1081 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1083 struct temp_slot *p;
1085 if (rtx_equal_p (old_rtx, new_rtx))
1086 return;
1088 p = find_temp_slot_from_address (old_rtx);
1090 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1091 NEW_RTX is a register, see if one operand of the PLUS is a
1092 temporary location. If so, NEW_RTX points into it. Otherwise,
1093 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1094 in common between them. If so, try a recursive call on those
1095 values. */
1096 if (p == 0)
1098 if (GET_CODE (old_rtx) != PLUS)
1099 return;
1101 if (REG_P (new_rtx))
1103 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1104 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1105 return;
1107 else if (GET_CODE (new_rtx) != PLUS)
1108 return;
1110 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1111 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1112 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1113 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1114 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1115 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1116 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1117 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1119 return;
1122 /* Otherwise add an alias for the temp's address. */
1123 insert_temp_slot_address (new_rtx, p);
1126 /* If X could be a reference to a temporary slot, mark that slot as
1127 belonging to the to one level higher than the current level. If X
1128 matched one of our slots, just mark that one. Otherwise, we can't
1129 easily predict which it is, so upgrade all of them.
1131 This is called when an ({...}) construct occurs and a statement
1132 returns a value in memory. */
1134 void
1135 preserve_temp_slots (rtx x)
1137 struct temp_slot *p = 0, *next;
1139 if (x == 0)
1140 return;
1142 /* If X is a register that is being used as a pointer, see if we have
1143 a temporary slot we know it points to. */
1144 if (REG_P (x) && REG_POINTER (x))
1145 p = find_temp_slot_from_address (x);
1147 /* If X is not in memory or is at a constant address, it cannot be in
1148 a temporary slot. */
1149 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1150 return;
1152 /* First see if we can find a match. */
1153 if (p == 0)
1154 p = find_temp_slot_from_address (XEXP (x, 0));
1156 if (p != 0)
1158 if (p->level == temp_slot_level)
1159 move_slot_to_level (p, temp_slot_level - 1);
1160 return;
1163 /* Otherwise, preserve all non-kept slots at this level. */
1164 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1166 next = p->next;
1167 move_slot_to_level (p, temp_slot_level - 1);
1171 /* Free all temporaries used so far. This is normally called at the
1172 end of generating code for a statement. */
1174 void
1175 free_temp_slots (void)
1177 struct temp_slot *p, *next;
1178 bool some_available = false;
1180 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1182 next = p->next;
1183 make_slot_available (p);
1184 some_available = true;
1187 if (some_available)
1189 remove_unused_temp_slot_addresses ();
1190 combine_temp_slots ();
1194 /* Push deeper into the nesting level for stack temporaries. */
1196 void
1197 push_temp_slots (void)
1199 temp_slot_level++;
1202 /* Pop a temporary nesting level. All slots in use in the current level
1203 are freed. */
1205 void
1206 pop_temp_slots (void)
1208 free_temp_slots ();
1209 temp_slot_level--;
1212 /* Initialize temporary slots. */
1214 void
1215 init_temp_slots (void)
1217 /* We have not allocated any temporaries yet. */
1218 avail_temp_slots = 0;
1219 vec_alloc (used_temp_slots, 0);
1220 temp_slot_level = 0;
1221 n_temp_slots_in_use = 0;
1223 /* Set up the table to map addresses to temp slots. */
1224 if (! temp_slot_address_table)
1225 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1226 else
1227 temp_slot_address_table->empty ();
1230 /* Functions and data structures to keep track of the values hard regs
1231 had at the start of the function. */
1233 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1234 and has_hard_reg_initial_val.. */
1235 struct GTY(()) initial_value_pair {
1236 rtx hard_reg;
1237 rtx pseudo;
1239 /* ??? This could be a VEC but there is currently no way to define an
1240 opaque VEC type. This could be worked around by defining struct
1241 initial_value_pair in function.h. */
1242 struct GTY(()) initial_value_struct {
1243 int num_entries;
1244 int max_entries;
1245 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1248 /* If a pseudo represents an initial hard reg (or expression), return
1249 it, else return NULL_RTX. */
1252 get_hard_reg_initial_reg (rtx reg)
1254 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1255 int i;
1257 if (ivs == 0)
1258 return NULL_RTX;
1260 for (i = 0; i < ivs->num_entries; i++)
1261 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1262 return ivs->entries[i].hard_reg;
1264 return NULL_RTX;
1267 /* Make sure that there's a pseudo register of mode MODE that stores the
1268 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1271 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1273 struct initial_value_struct *ivs;
1274 rtx rv;
1276 rv = has_hard_reg_initial_val (mode, regno);
1277 if (rv)
1278 return rv;
1280 ivs = crtl->hard_reg_initial_vals;
1281 if (ivs == 0)
1283 ivs = ggc_alloc<initial_value_struct> ();
1284 ivs->num_entries = 0;
1285 ivs->max_entries = 5;
1286 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1287 crtl->hard_reg_initial_vals = ivs;
1290 if (ivs->num_entries >= ivs->max_entries)
1292 ivs->max_entries += 5;
1293 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1294 ivs->max_entries);
1297 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1298 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1300 return ivs->entries[ivs->num_entries++].pseudo;
1303 /* See if get_hard_reg_initial_val has been used to create a pseudo
1304 for the initial value of hard register REGNO in mode MODE. Return
1305 the associated pseudo if so, otherwise return NULL. */
1308 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1310 struct initial_value_struct *ivs;
1311 int i;
1313 ivs = crtl->hard_reg_initial_vals;
1314 if (ivs != 0)
1315 for (i = 0; i < ivs->num_entries; i++)
1316 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1317 && REGNO (ivs->entries[i].hard_reg) == regno)
1318 return ivs->entries[i].pseudo;
1320 return NULL_RTX;
1323 unsigned int
1324 emit_initial_value_sets (void)
1326 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1327 int i;
1328 rtx_insn *seq;
1330 if (ivs == 0)
1331 return 0;
1333 start_sequence ();
1334 for (i = 0; i < ivs->num_entries; i++)
1335 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1336 seq = get_insns ();
1337 end_sequence ();
1339 emit_insn_at_entry (seq);
1340 return 0;
1343 /* Return the hardreg-pseudoreg initial values pair entry I and
1344 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1345 bool
1346 initial_value_entry (int i, rtx *hreg, rtx *preg)
1348 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1349 if (!ivs || i >= ivs->num_entries)
1350 return false;
1352 *hreg = ivs->entries[i].hard_reg;
1353 *preg = ivs->entries[i].pseudo;
1354 return true;
1357 /* These routines are responsible for converting virtual register references
1358 to the actual hard register references once RTL generation is complete.
1360 The following four variables are used for communication between the
1361 routines. They contain the offsets of the virtual registers from their
1362 respective hard registers. */
1364 static int in_arg_offset;
1365 static int var_offset;
1366 static int dynamic_offset;
1367 static int out_arg_offset;
1368 static int cfa_offset;
1370 /* In most machines, the stack pointer register is equivalent to the bottom
1371 of the stack. */
1373 #ifndef STACK_POINTER_OFFSET
1374 #define STACK_POINTER_OFFSET 0
1375 #endif
1377 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1378 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1379 #endif
1381 /* If not defined, pick an appropriate default for the offset of dynamically
1382 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1383 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1385 #ifndef STACK_DYNAMIC_OFFSET
1387 /* The bottom of the stack points to the actual arguments. If
1388 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1389 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1390 stack space for register parameters is not pushed by the caller, but
1391 rather part of the fixed stack areas and hence not included in
1392 `crtl->outgoing_args_size'. Nevertheless, we must allow
1393 for it when allocating stack dynamic objects. */
1395 #ifdef INCOMING_REG_PARM_STACK_SPACE
1396 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1397 ((ACCUMULATE_OUTGOING_ARGS \
1398 ? (crtl->outgoing_args_size \
1399 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1400 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1401 : 0) + (STACK_POINTER_OFFSET))
1402 #else
1403 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1404 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1405 + (STACK_POINTER_OFFSET))
1406 #endif
1407 #endif
1410 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1411 is a virtual register, return the equivalent hard register and set the
1412 offset indirectly through the pointer. Otherwise, return 0. */
1414 static rtx
1415 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1417 rtx new_rtx;
1418 HOST_WIDE_INT offset;
1420 if (x == virtual_incoming_args_rtx)
1422 if (stack_realign_drap)
1424 /* Replace virtual_incoming_args_rtx with internal arg
1425 pointer if DRAP is used to realign stack. */
1426 new_rtx = crtl->args.internal_arg_pointer;
1427 offset = 0;
1429 else
1430 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1432 else if (x == virtual_stack_vars_rtx)
1433 new_rtx = frame_pointer_rtx, offset = var_offset;
1434 else if (x == virtual_stack_dynamic_rtx)
1435 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1436 else if (x == virtual_outgoing_args_rtx)
1437 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1438 else if (x == virtual_cfa_rtx)
1440 #ifdef FRAME_POINTER_CFA_OFFSET
1441 new_rtx = frame_pointer_rtx;
1442 #else
1443 new_rtx = arg_pointer_rtx;
1444 #endif
1445 offset = cfa_offset;
1447 else if (x == virtual_preferred_stack_boundary_rtx)
1449 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1450 offset = 0;
1452 else
1453 return NULL_RTX;
1455 *poffset = offset;
1456 return new_rtx;
1459 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1460 registers present inside of *LOC. The expression is simplified,
1461 as much as possible, but is not to be considered "valid" in any sense
1462 implied by the target. Return true if any change is made. */
1464 static bool
1465 instantiate_virtual_regs_in_rtx (rtx *loc)
1467 if (!*loc)
1468 return false;
1469 bool changed = false;
1470 subrtx_ptr_iterator::array_type array;
1471 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1473 rtx *loc = *iter;
1474 if (rtx x = *loc)
1476 rtx new_rtx;
1477 HOST_WIDE_INT offset;
1478 switch (GET_CODE (x))
1480 case REG:
1481 new_rtx = instantiate_new_reg (x, &offset);
1482 if (new_rtx)
1484 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1485 changed = true;
1487 iter.skip_subrtxes ();
1488 break;
1490 case PLUS:
1491 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1492 if (new_rtx)
1494 XEXP (x, 0) = new_rtx;
1495 *loc = plus_constant (GET_MODE (x), x, offset, true);
1496 changed = true;
1497 iter.skip_subrtxes ();
1498 break;
1501 /* FIXME -- from old code */
1502 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1503 we can commute the PLUS and SUBREG because pointers into the
1504 frame are well-behaved. */
1505 break;
1507 default:
1508 break;
1512 return changed;
1515 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1516 matches the predicate for insn CODE operand OPERAND. */
1518 static int
1519 safe_insn_predicate (int code, int operand, rtx x)
1521 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1524 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1525 registers present inside of insn. The result will be a valid insn. */
1527 static void
1528 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1530 HOST_WIDE_INT offset;
1531 int insn_code, i;
1532 bool any_change = false;
1533 rtx set, new_rtx, x;
1534 rtx_insn *seq;
1536 /* There are some special cases to be handled first. */
1537 set = single_set (insn);
1538 if (set)
1540 /* We're allowed to assign to a virtual register. This is interpreted
1541 to mean that the underlying register gets assigned the inverse
1542 transformation. This is used, for example, in the handling of
1543 non-local gotos. */
1544 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1545 if (new_rtx)
1547 start_sequence ();
1549 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1550 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1551 gen_int_mode (-offset, GET_MODE (new_rtx)));
1552 x = force_operand (x, new_rtx);
1553 if (x != new_rtx)
1554 emit_move_insn (new_rtx, x);
1556 seq = get_insns ();
1557 end_sequence ();
1559 emit_insn_before (seq, insn);
1560 delete_insn (insn);
1561 return;
1564 /* Handle a straight copy from a virtual register by generating a
1565 new add insn. The difference between this and falling through
1566 to the generic case is avoiding a new pseudo and eliminating a
1567 move insn in the initial rtl stream. */
1568 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1569 if (new_rtx && offset != 0
1570 && REG_P (SET_DEST (set))
1571 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1573 start_sequence ();
1575 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1576 gen_int_mode (offset,
1577 GET_MODE (SET_DEST (set))),
1578 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1579 if (x != SET_DEST (set))
1580 emit_move_insn (SET_DEST (set), x);
1582 seq = get_insns ();
1583 end_sequence ();
1585 emit_insn_before (seq, insn);
1586 delete_insn (insn);
1587 return;
1590 extract_insn (insn);
1591 insn_code = INSN_CODE (insn);
1593 /* Handle a plus involving a virtual register by determining if the
1594 operands remain valid if they're modified in place. */
1595 if (GET_CODE (SET_SRC (set)) == PLUS
1596 && recog_data.n_operands >= 3
1597 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1598 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1599 && CONST_INT_P (recog_data.operand[2])
1600 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1602 offset += INTVAL (recog_data.operand[2]);
1604 /* If the sum is zero, then replace with a plain move. */
1605 if (offset == 0
1606 && REG_P (SET_DEST (set))
1607 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1609 start_sequence ();
1610 emit_move_insn (SET_DEST (set), new_rtx);
1611 seq = get_insns ();
1612 end_sequence ();
1614 emit_insn_before (seq, insn);
1615 delete_insn (insn);
1616 return;
1619 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1621 /* Using validate_change and apply_change_group here leaves
1622 recog_data in an invalid state. Since we know exactly what
1623 we want to check, do those two by hand. */
1624 if (safe_insn_predicate (insn_code, 1, new_rtx)
1625 && safe_insn_predicate (insn_code, 2, x))
1627 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1628 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1629 any_change = true;
1631 /* Fall through into the regular operand fixup loop in
1632 order to take care of operands other than 1 and 2. */
1636 else
1638 extract_insn (insn);
1639 insn_code = INSN_CODE (insn);
1642 /* In the general case, we expect virtual registers to appear only in
1643 operands, and then only as either bare registers or inside memories. */
1644 for (i = 0; i < recog_data.n_operands; ++i)
1646 x = recog_data.operand[i];
1647 switch (GET_CODE (x))
1649 case MEM:
1651 rtx addr = XEXP (x, 0);
1653 if (!instantiate_virtual_regs_in_rtx (&addr))
1654 continue;
1656 start_sequence ();
1657 x = replace_equiv_address (x, addr, true);
1658 /* It may happen that the address with the virtual reg
1659 was valid (e.g. based on the virtual stack reg, which might
1660 be acceptable to the predicates with all offsets), whereas
1661 the address now isn't anymore, for instance when the address
1662 is still offsetted, but the base reg isn't virtual-stack-reg
1663 anymore. Below we would do a force_reg on the whole operand,
1664 but this insn might actually only accept memory. Hence,
1665 before doing that last resort, try to reload the address into
1666 a register, so this operand stays a MEM. */
1667 if (!safe_insn_predicate (insn_code, i, x))
1669 addr = force_reg (GET_MODE (addr), addr);
1670 x = replace_equiv_address (x, addr, true);
1672 seq = get_insns ();
1673 end_sequence ();
1674 if (seq)
1675 emit_insn_before (seq, insn);
1677 break;
1679 case REG:
1680 new_rtx = instantiate_new_reg (x, &offset);
1681 if (new_rtx == NULL)
1682 continue;
1683 if (offset == 0)
1684 x = new_rtx;
1685 else
1687 start_sequence ();
1689 /* Careful, special mode predicates may have stuff in
1690 insn_data[insn_code].operand[i].mode that isn't useful
1691 to us for computing a new value. */
1692 /* ??? Recognize address_operand and/or "p" constraints
1693 to see if (plus new offset) is a valid before we put
1694 this through expand_simple_binop. */
1695 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1696 gen_int_mode (offset, GET_MODE (x)),
1697 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1698 seq = get_insns ();
1699 end_sequence ();
1700 emit_insn_before (seq, insn);
1702 break;
1704 case SUBREG:
1705 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1706 if (new_rtx == NULL)
1707 continue;
1708 if (offset != 0)
1710 start_sequence ();
1711 new_rtx = expand_simple_binop
1712 (GET_MODE (new_rtx), PLUS, new_rtx,
1713 gen_int_mode (offset, GET_MODE (new_rtx)),
1714 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1715 seq = get_insns ();
1716 end_sequence ();
1717 emit_insn_before (seq, insn);
1719 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1720 GET_MODE (new_rtx), SUBREG_BYTE (x));
1721 gcc_assert (x);
1722 break;
1724 default:
1725 continue;
1728 /* At this point, X contains the new value for the operand.
1729 Validate the new value vs the insn predicate. Note that
1730 asm insns will have insn_code -1 here. */
1731 if (!safe_insn_predicate (insn_code, i, x))
1733 start_sequence ();
1734 if (REG_P (x))
1736 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1737 x = copy_to_reg (x);
1739 else
1740 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1741 seq = get_insns ();
1742 end_sequence ();
1743 if (seq)
1744 emit_insn_before (seq, insn);
1747 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1748 any_change = true;
1751 if (any_change)
1753 /* Propagate operand changes into the duplicates. */
1754 for (i = 0; i < recog_data.n_dups; ++i)
1755 *recog_data.dup_loc[i]
1756 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1758 /* Force re-recognition of the instruction for validation. */
1759 INSN_CODE (insn) = -1;
1762 if (asm_noperands (PATTERN (insn)) >= 0)
1764 if (!check_asm_operands (PATTERN (insn)))
1766 error_for_asm (insn, "impossible constraint in %<asm%>");
1767 /* For asm goto, instead of fixing up all the edges
1768 just clear the template and clear input operands
1769 (asm goto doesn't have any output operands). */
1770 if (JUMP_P (insn))
1772 rtx asm_op = extract_asm_operands (PATTERN (insn));
1773 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1774 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1775 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1777 else
1778 delete_insn (insn);
1781 else
1783 if (recog_memoized (insn) < 0)
1784 fatal_insn_not_found (insn);
1788 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1789 do any instantiation required. */
1791 void
1792 instantiate_decl_rtl (rtx x)
1794 rtx addr;
1796 if (x == 0)
1797 return;
1799 /* If this is a CONCAT, recurse for the pieces. */
1800 if (GET_CODE (x) == CONCAT)
1802 instantiate_decl_rtl (XEXP (x, 0));
1803 instantiate_decl_rtl (XEXP (x, 1));
1804 return;
1807 /* If this is not a MEM, no need to do anything. Similarly if the
1808 address is a constant or a register that is not a virtual register. */
1809 if (!MEM_P (x))
1810 return;
1812 addr = XEXP (x, 0);
1813 if (CONSTANT_P (addr)
1814 || (REG_P (addr)
1815 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1816 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1817 return;
1819 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1822 /* Helper for instantiate_decls called via walk_tree: Process all decls
1823 in the given DECL_VALUE_EXPR. */
1825 static tree
1826 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1828 tree t = *tp;
1829 if (! EXPR_P (t))
1831 *walk_subtrees = 0;
1832 if (DECL_P (t))
1834 if (DECL_RTL_SET_P (t))
1835 instantiate_decl_rtl (DECL_RTL (t));
1836 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1837 && DECL_INCOMING_RTL (t))
1838 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1839 if ((TREE_CODE (t) == VAR_DECL
1840 || TREE_CODE (t) == RESULT_DECL)
1841 && DECL_HAS_VALUE_EXPR_P (t))
1843 tree v = DECL_VALUE_EXPR (t);
1844 walk_tree (&v, instantiate_expr, NULL, NULL);
1848 return NULL;
1851 /* Subroutine of instantiate_decls: Process all decls in the given
1852 BLOCK node and all its subblocks. */
1854 static void
1855 instantiate_decls_1 (tree let)
1857 tree t;
1859 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1861 if (DECL_RTL_SET_P (t))
1862 instantiate_decl_rtl (DECL_RTL (t));
1863 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1865 tree v = DECL_VALUE_EXPR (t);
1866 walk_tree (&v, instantiate_expr, NULL, NULL);
1870 /* Process all subblocks. */
1871 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1872 instantiate_decls_1 (t);
1875 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1876 all virtual registers in their DECL_RTL's. */
1878 static void
1879 instantiate_decls (tree fndecl)
1881 tree decl;
1882 unsigned ix;
1884 /* Process all parameters of the function. */
1885 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1887 instantiate_decl_rtl (DECL_RTL (decl));
1888 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1889 if (DECL_HAS_VALUE_EXPR_P (decl))
1891 tree v = DECL_VALUE_EXPR (decl);
1892 walk_tree (&v, instantiate_expr, NULL, NULL);
1896 if ((decl = DECL_RESULT (fndecl))
1897 && TREE_CODE (decl) == RESULT_DECL)
1899 if (DECL_RTL_SET_P (decl))
1900 instantiate_decl_rtl (DECL_RTL (decl));
1901 if (DECL_HAS_VALUE_EXPR_P (decl))
1903 tree v = DECL_VALUE_EXPR (decl);
1904 walk_tree (&v, instantiate_expr, NULL, NULL);
1908 /* Process the saved static chain if it exists. */
1909 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1910 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1911 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1913 /* Now process all variables defined in the function or its subblocks. */
1914 instantiate_decls_1 (DECL_INITIAL (fndecl));
1916 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1917 if (DECL_RTL_SET_P (decl))
1918 instantiate_decl_rtl (DECL_RTL (decl));
1919 vec_free (cfun->local_decls);
1922 /* Pass through the INSNS of function FNDECL and convert virtual register
1923 references to hard register references. */
1925 static unsigned int
1926 instantiate_virtual_regs (void)
1928 rtx_insn *insn;
1930 /* Compute the offsets to use for this function. */
1931 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1932 var_offset = STARTING_FRAME_OFFSET;
1933 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1934 out_arg_offset = STACK_POINTER_OFFSET;
1935 #ifdef FRAME_POINTER_CFA_OFFSET
1936 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1937 #else
1938 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1939 #endif
1941 /* Initialize recognition, indicating that volatile is OK. */
1942 init_recog ();
1944 /* Scan through all the insns, instantiating every virtual register still
1945 present. */
1946 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1947 if (INSN_P (insn))
1949 /* These patterns in the instruction stream can never be recognized.
1950 Fortunately, they shouldn't contain virtual registers either. */
1951 if (GET_CODE (PATTERN (insn)) == USE
1952 || GET_CODE (PATTERN (insn)) == CLOBBER
1953 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1954 continue;
1955 else if (DEBUG_INSN_P (insn))
1956 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1957 else
1958 instantiate_virtual_regs_in_insn (insn);
1960 if (insn->deleted ())
1961 continue;
1963 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1965 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1966 if (CALL_P (insn))
1967 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1970 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1971 instantiate_decls (current_function_decl);
1973 targetm.instantiate_decls ();
1975 /* Indicate that, from now on, assign_stack_local should use
1976 frame_pointer_rtx. */
1977 virtuals_instantiated = 1;
1979 return 0;
1982 namespace {
1984 const pass_data pass_data_instantiate_virtual_regs =
1986 RTL_PASS, /* type */
1987 "vregs", /* name */
1988 OPTGROUP_NONE, /* optinfo_flags */
1989 TV_NONE, /* tv_id */
1990 0, /* properties_required */
1991 0, /* properties_provided */
1992 0, /* properties_destroyed */
1993 0, /* todo_flags_start */
1994 0, /* todo_flags_finish */
1997 class pass_instantiate_virtual_regs : public rtl_opt_pass
1999 public:
2000 pass_instantiate_virtual_regs (gcc::context *ctxt)
2001 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2004 /* opt_pass methods: */
2005 virtual unsigned int execute (function *)
2007 return instantiate_virtual_regs ();
2010 }; // class pass_instantiate_virtual_regs
2012 } // anon namespace
2014 rtl_opt_pass *
2015 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2017 return new pass_instantiate_virtual_regs (ctxt);
2021 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2022 This means a type for which function calls must pass an address to the
2023 function or get an address back from the function.
2024 EXP may be a type node or an expression (whose type is tested). */
2027 aggregate_value_p (const_tree exp, const_tree fntype)
2029 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2030 int i, regno, nregs;
2031 rtx reg;
2033 if (fntype)
2034 switch (TREE_CODE (fntype))
2036 case CALL_EXPR:
2038 tree fndecl = get_callee_fndecl (fntype);
2039 if (fndecl)
2040 fntype = TREE_TYPE (fndecl);
2041 else if (CALL_EXPR_FN (fntype))
2042 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2043 else
2044 /* For internal functions, assume nothing needs to be
2045 returned in memory. */
2046 return 0;
2048 break;
2049 case FUNCTION_DECL:
2050 fntype = TREE_TYPE (fntype);
2051 break;
2052 case FUNCTION_TYPE:
2053 case METHOD_TYPE:
2054 break;
2055 case IDENTIFIER_NODE:
2056 fntype = NULL_TREE;
2057 break;
2058 default:
2059 /* We don't expect other tree types here. */
2060 gcc_unreachable ();
2063 if (VOID_TYPE_P (type))
2064 return 0;
2066 /* If a record should be passed the same as its first (and only) member
2067 don't pass it as an aggregate. */
2068 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2069 return aggregate_value_p (first_field (type), fntype);
2071 /* If the front end has decided that this needs to be passed by
2072 reference, do so. */
2073 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2074 && DECL_BY_REFERENCE (exp))
2075 return 1;
2077 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2078 if (fntype && TREE_ADDRESSABLE (fntype))
2079 return 1;
2081 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2082 and thus can't be returned in registers. */
2083 if (TREE_ADDRESSABLE (type))
2084 return 1;
2086 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2087 return 1;
2089 if (targetm.calls.return_in_memory (type, fntype))
2090 return 1;
2092 /* Make sure we have suitable call-clobbered regs to return
2093 the value in; if not, we must return it in memory. */
2094 reg = hard_function_value (type, 0, fntype, 0);
2096 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2097 it is OK. */
2098 if (!REG_P (reg))
2099 return 0;
2101 regno = REGNO (reg);
2102 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2103 for (i = 0; i < nregs; i++)
2104 if (! call_used_regs[regno + i])
2105 return 1;
2107 return 0;
2110 /* Return true if we should assign DECL a pseudo register; false if it
2111 should live on the local stack. */
2113 bool
2114 use_register_for_decl (const_tree decl)
2116 if (TREE_CODE (decl) == SSA_NAME)
2118 /* We often try to use the SSA_NAME, instead of its underlying
2119 decl, to get type information and guide decisions, to avoid
2120 differences of behavior between anonymous and named
2121 variables, but in this one case we have to go for the actual
2122 variable if there is one. The main reason is that, at least
2123 at -O0, we want to place user variables on the stack, but we
2124 don't mind using pseudos for anonymous or ignored temps.
2125 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2126 should go in pseudos, whereas their corresponding variables
2127 might have to go on the stack. So, disregarding the decl
2128 here would negatively impact debug info at -O0, enable
2129 coalescing between SSA_NAMEs that ought to get different
2130 stack/pseudo assignments, and get the incoming argument
2131 processing thoroughly confused by PARM_DECLs expected to live
2132 in stack slots but assigned to pseudos. */
2133 if (!SSA_NAME_VAR (decl))
2134 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2135 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2137 decl = SSA_NAME_VAR (decl);
2140 /* Honor volatile. */
2141 if (TREE_SIDE_EFFECTS (decl))
2142 return false;
2144 /* Honor addressability. */
2145 if (TREE_ADDRESSABLE (decl))
2146 return false;
2148 /* Decl is implicitly addressible by bound stores and loads
2149 if it is an aggregate holding bounds. */
2150 if (chkp_function_instrumented_p (current_function_decl)
2151 && TREE_TYPE (decl)
2152 && !BOUNDED_P (decl)
2153 && chkp_type_has_pointer (TREE_TYPE (decl)))
2154 return false;
2156 /* Only register-like things go in registers. */
2157 if (DECL_MODE (decl) == BLKmode)
2158 return false;
2160 /* If -ffloat-store specified, don't put explicit float variables
2161 into registers. */
2162 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2163 propagates values across these stores, and it probably shouldn't. */
2164 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2165 return false;
2167 if (!targetm.calls.allocate_stack_slots_for_args ())
2168 return true;
2170 /* If we're not interested in tracking debugging information for
2171 this decl, then we can certainly put it in a register. */
2172 if (DECL_IGNORED_P (decl))
2173 return true;
2175 if (optimize)
2176 return true;
2178 if (!DECL_REGISTER (decl))
2179 return false;
2181 switch (TREE_CODE (TREE_TYPE (decl)))
2183 case RECORD_TYPE:
2184 case UNION_TYPE:
2185 case QUAL_UNION_TYPE:
2186 /* When not optimizing, disregard register keyword for variables with
2187 types containing methods, otherwise the methods won't be callable
2188 from the debugger. */
2189 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
2190 return false;
2191 break;
2192 default:
2193 break;
2196 return true;
2199 /* Structures to communicate between the subroutines of assign_parms.
2200 The first holds data persistent across all parameters, the second
2201 is cleared out for each parameter. */
2203 struct assign_parm_data_all
2205 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2206 should become a job of the target or otherwise encapsulated. */
2207 CUMULATIVE_ARGS args_so_far_v;
2208 cumulative_args_t args_so_far;
2209 struct args_size stack_args_size;
2210 tree function_result_decl;
2211 tree orig_fnargs;
2212 rtx_insn *first_conversion_insn;
2213 rtx_insn *last_conversion_insn;
2214 HOST_WIDE_INT pretend_args_size;
2215 HOST_WIDE_INT extra_pretend_bytes;
2216 int reg_parm_stack_space;
2219 struct assign_parm_data_one
2221 tree nominal_type;
2222 tree passed_type;
2223 rtx entry_parm;
2224 rtx stack_parm;
2225 machine_mode nominal_mode;
2226 machine_mode passed_mode;
2227 machine_mode promoted_mode;
2228 struct locate_and_pad_arg_data locate;
2229 int partial;
2230 BOOL_BITFIELD named_arg : 1;
2231 BOOL_BITFIELD passed_pointer : 1;
2232 BOOL_BITFIELD on_stack : 1;
2233 BOOL_BITFIELD loaded_in_reg : 1;
2236 struct bounds_parm_data
2238 assign_parm_data_one parm_data;
2239 tree bounds_parm;
2240 tree ptr_parm;
2241 rtx ptr_entry;
2242 int bound_no;
2245 /* A subroutine of assign_parms. Initialize ALL. */
2247 static void
2248 assign_parms_initialize_all (struct assign_parm_data_all *all)
2250 tree fntype ATTRIBUTE_UNUSED;
2252 memset (all, 0, sizeof (*all));
2254 fntype = TREE_TYPE (current_function_decl);
2256 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2257 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2258 #else
2259 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2260 current_function_decl, -1);
2261 #endif
2262 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2264 #ifdef INCOMING_REG_PARM_STACK_SPACE
2265 all->reg_parm_stack_space
2266 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2267 #endif
2270 /* If ARGS contains entries with complex types, split the entry into two
2271 entries of the component type. Return a new list of substitutions are
2272 needed, else the old list. */
2274 static void
2275 split_complex_args (struct assign_parm_data_all *all, vec<tree> *args)
2277 unsigned i;
2278 tree p;
2280 FOR_EACH_VEC_ELT (*args, i, p)
2282 tree type = TREE_TYPE (p);
2283 if (TREE_CODE (type) == COMPLEX_TYPE
2284 && targetm.calls.split_complex_arg (type))
2286 tree cparm = p;
2287 tree decl;
2288 tree subtype = TREE_TYPE (type);
2289 bool addressable = TREE_ADDRESSABLE (p);
2291 /* Rewrite the PARM_DECL's type with its component. */
2292 p = copy_node (p);
2293 TREE_TYPE (p) = subtype;
2294 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2295 DECL_MODE (p) = VOIDmode;
2296 DECL_SIZE (p) = NULL;
2297 DECL_SIZE_UNIT (p) = NULL;
2298 /* If this arg must go in memory, put it in a pseudo here.
2299 We can't allow it to go in memory as per normal parms,
2300 because the usual place might not have the imag part
2301 adjacent to the real part. */
2302 DECL_ARTIFICIAL (p) = addressable;
2303 DECL_IGNORED_P (p) = addressable;
2304 TREE_ADDRESSABLE (p) = 0;
2305 /* Reset the RTL before layout_decl, or it may change the
2306 mode of the RTL of the original argument copied to P. */
2307 SET_DECL_RTL (p, NULL_RTX);
2308 layout_decl (p, 0);
2309 (*args)[i] = p;
2311 /* Build a second synthetic decl. */
2312 decl = build_decl (EXPR_LOCATION (p),
2313 PARM_DECL, NULL_TREE, subtype);
2314 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2315 DECL_ARTIFICIAL (decl) = addressable;
2316 DECL_IGNORED_P (decl) = addressable;
2317 layout_decl (decl, 0);
2318 args->safe_insert (++i, decl);
2320 /* If we are expanding a function, rather than gimplifying
2321 it, propagate the RTL of the complex parm to the split
2322 declarations, and set their contexts so that
2323 maybe_reset_rtl_for_parm can recognize them and refrain
2324 from resetting their RTL. */
2325 if (currently_expanding_to_rtl)
2327 maybe_reset_rtl_for_parm (cparm);
2328 rtx rtl = rtl_for_parm (all, cparm);
2329 if (rtl)
2331 /* If this is parm is unassigned, assign it now: the
2332 newly-created decls wouldn't expect the need for
2333 assignment, and if they were assigned
2334 independently, they might not end up in adjacent
2335 slots, so unsplit wouldn't be able to fill in the
2336 unassigned address of the complex MEM. */
2337 if (parm_in_unassigned_mem_p (cparm, rtl))
2339 int align = STACK_SLOT_ALIGNMENT
2340 (TREE_TYPE (cparm), GET_MODE (rtl), MEM_ALIGN (rtl));
2341 rtx loc = assign_stack_local
2342 (GET_MODE (rtl), GET_MODE_SIZE (GET_MODE (rtl)),
2343 align);
2344 XEXP (rtl, 0) = XEXP (loc, 0);
2347 SET_DECL_RTL (p, read_complex_part (rtl, false));
2348 SET_DECL_RTL (decl, read_complex_part (rtl, true));
2350 DECL_CONTEXT (p) = cparm;
2351 DECL_CONTEXT (decl) = cparm;
2358 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2359 the hidden struct return argument, and (abi willing) complex args.
2360 Return the new parameter list. */
2362 static vec<tree>
2363 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2365 tree fndecl = current_function_decl;
2366 tree fntype = TREE_TYPE (fndecl);
2367 vec<tree> fnargs = vNULL;
2368 tree arg;
2370 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2371 fnargs.safe_push (arg);
2373 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2375 /* If struct value address is treated as the first argument, make it so. */
2376 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2377 && ! cfun->returns_pcc_struct
2378 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2380 tree type = build_pointer_type (TREE_TYPE (fntype));
2381 tree decl;
2383 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2384 PARM_DECL, get_identifier (".result_ptr"), type);
2385 DECL_ARG_TYPE (decl) = type;
2386 DECL_ARTIFICIAL (decl) = 1;
2387 DECL_NAMELESS (decl) = 1;
2388 TREE_CONSTANT (decl) = 1;
2390 DECL_CHAIN (decl) = all->orig_fnargs;
2391 all->orig_fnargs = decl;
2392 fnargs.safe_insert (0, decl);
2394 all->function_result_decl = decl;
2396 /* If function is instrumented then bounds of the
2397 passed structure address is the second argument. */
2398 if (chkp_function_instrumented_p (fndecl))
2400 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2401 PARM_DECL, get_identifier (".result_bnd"),
2402 pointer_bounds_type_node);
2403 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2404 DECL_ARTIFICIAL (decl) = 1;
2405 DECL_NAMELESS (decl) = 1;
2406 TREE_CONSTANT (decl) = 1;
2408 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2409 DECL_CHAIN (all->orig_fnargs) = decl;
2410 fnargs.safe_insert (1, decl);
2414 /* If the target wants to split complex arguments into scalars, do so. */
2415 if (targetm.calls.split_complex_arg)
2416 split_complex_args (all, &fnargs);
2418 return fnargs;
2421 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2422 data for the parameter. Incorporate ABI specifics such as pass-by-
2423 reference and type promotion. */
2425 static void
2426 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2427 struct assign_parm_data_one *data)
2429 tree nominal_type, passed_type;
2430 machine_mode nominal_mode, passed_mode, promoted_mode;
2431 int unsignedp;
2433 memset (data, 0, sizeof (*data));
2435 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2436 if (!cfun->stdarg)
2437 data->named_arg = 1; /* No variadic parms. */
2438 else if (DECL_CHAIN (parm))
2439 data->named_arg = 1; /* Not the last non-variadic parm. */
2440 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2441 data->named_arg = 1; /* Only variadic ones are unnamed. */
2442 else
2443 data->named_arg = 0; /* Treat as variadic. */
2445 nominal_type = TREE_TYPE (parm);
2446 passed_type = DECL_ARG_TYPE (parm);
2448 /* Look out for errors propagating this far. Also, if the parameter's
2449 type is void then its value doesn't matter. */
2450 if (TREE_TYPE (parm) == error_mark_node
2451 /* This can happen after weird syntax errors
2452 or if an enum type is defined among the parms. */
2453 || TREE_CODE (parm) != PARM_DECL
2454 || passed_type == NULL
2455 || VOID_TYPE_P (nominal_type))
2457 nominal_type = passed_type = void_type_node;
2458 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2459 goto egress;
2462 /* Find mode of arg as it is passed, and mode of arg as it should be
2463 during execution of this function. */
2464 passed_mode = TYPE_MODE (passed_type);
2465 nominal_mode = TYPE_MODE (nominal_type);
2467 /* If the parm is to be passed as a transparent union or record, use the
2468 type of the first field for the tests below. We have already verified
2469 that the modes are the same. */
2470 if ((TREE_CODE (passed_type) == UNION_TYPE
2471 || TREE_CODE (passed_type) == RECORD_TYPE)
2472 && TYPE_TRANSPARENT_AGGR (passed_type))
2473 passed_type = TREE_TYPE (first_field (passed_type));
2475 /* See if this arg was passed by invisible reference. */
2476 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2477 passed_type, data->named_arg))
2479 passed_type = nominal_type = build_pointer_type (passed_type);
2480 data->passed_pointer = true;
2481 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2484 /* Find mode as it is passed by the ABI. */
2485 unsignedp = TYPE_UNSIGNED (passed_type);
2486 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2487 TREE_TYPE (current_function_decl), 0);
2489 egress:
2490 data->nominal_type = nominal_type;
2491 data->passed_type = passed_type;
2492 data->nominal_mode = nominal_mode;
2493 data->passed_mode = passed_mode;
2494 data->promoted_mode = promoted_mode;
2497 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2499 static void
2500 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2501 struct assign_parm_data_one *data, bool no_rtl)
2503 int varargs_pretend_bytes = 0;
2505 targetm.calls.setup_incoming_varargs (all->args_so_far,
2506 data->promoted_mode,
2507 data->passed_type,
2508 &varargs_pretend_bytes, no_rtl);
2510 /* If the back-end has requested extra stack space, record how much is
2511 needed. Do not change pretend_args_size otherwise since it may be
2512 nonzero from an earlier partial argument. */
2513 if (varargs_pretend_bytes > 0)
2514 all->pretend_args_size = varargs_pretend_bytes;
2517 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2518 the incoming location of the current parameter. */
2520 static void
2521 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2522 struct assign_parm_data_one *data)
2524 HOST_WIDE_INT pretend_bytes = 0;
2525 rtx entry_parm;
2526 bool in_regs;
2528 if (data->promoted_mode == VOIDmode)
2530 data->entry_parm = data->stack_parm = const0_rtx;
2531 return;
2534 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2535 data->promoted_mode,
2536 data->passed_type,
2537 data->named_arg);
2539 if (entry_parm == 0)
2540 data->promoted_mode = data->passed_mode;
2542 /* Determine parm's home in the stack, in case it arrives in the stack
2543 or we should pretend it did. Compute the stack position and rtx where
2544 the argument arrives and its size.
2546 There is one complexity here: If this was a parameter that would
2547 have been passed in registers, but wasn't only because it is
2548 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2549 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2550 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2551 as it was the previous time. */
2552 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2553 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2554 in_regs = true;
2555 #endif
2556 if (!in_regs && !data->named_arg)
2558 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2560 rtx tem;
2561 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2562 data->promoted_mode,
2563 data->passed_type, true);
2564 in_regs = tem != NULL;
2568 /* If this parameter was passed both in registers and in the stack, use
2569 the copy on the stack. */
2570 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2571 data->passed_type))
2572 entry_parm = 0;
2574 if (entry_parm)
2576 int partial;
2578 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2579 data->promoted_mode,
2580 data->passed_type,
2581 data->named_arg);
2582 data->partial = partial;
2584 /* The caller might already have allocated stack space for the
2585 register parameters. */
2586 if (partial != 0 && all->reg_parm_stack_space == 0)
2588 /* Part of this argument is passed in registers and part
2589 is passed on the stack. Ask the prologue code to extend
2590 the stack part so that we can recreate the full value.
2592 PRETEND_BYTES is the size of the registers we need to store.
2593 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2594 stack space that the prologue should allocate.
2596 Internally, gcc assumes that the argument pointer is aligned
2597 to STACK_BOUNDARY bits. This is used both for alignment
2598 optimizations (see init_emit) and to locate arguments that are
2599 aligned to more than PARM_BOUNDARY bits. We must preserve this
2600 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2601 a stack boundary. */
2603 /* We assume at most one partial arg, and it must be the first
2604 argument on the stack. */
2605 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2607 pretend_bytes = partial;
2608 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2610 /* We want to align relative to the actual stack pointer, so
2611 don't include this in the stack size until later. */
2612 all->extra_pretend_bytes = all->pretend_args_size;
2616 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2617 all->reg_parm_stack_space,
2618 entry_parm ? data->partial : 0, current_function_decl,
2619 &all->stack_args_size, &data->locate);
2621 /* Update parm_stack_boundary if this parameter is passed in the
2622 stack. */
2623 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2624 crtl->parm_stack_boundary = data->locate.boundary;
2626 /* Adjust offsets to include the pretend args. */
2627 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2628 data->locate.slot_offset.constant += pretend_bytes;
2629 data->locate.offset.constant += pretend_bytes;
2631 data->entry_parm = entry_parm;
2634 /* A subroutine of assign_parms. If there is actually space on the stack
2635 for this parm, count it in stack_args_size and return true. */
2637 static bool
2638 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2639 struct assign_parm_data_one *data)
2641 /* Bounds are never passed on the stack to keep compatibility
2642 with not instrumented code. */
2643 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2644 return false;
2645 /* Trivially true if we've no incoming register. */
2646 else if (data->entry_parm == NULL)
2648 /* Also true if we're partially in registers and partially not,
2649 since we've arranged to drop the entire argument on the stack. */
2650 else if (data->partial != 0)
2652 /* Also true if the target says that it's passed in both registers
2653 and on the stack. */
2654 else if (GET_CODE (data->entry_parm) == PARALLEL
2655 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2657 /* Also true if the target says that there's stack allocated for
2658 all register parameters. */
2659 else if (all->reg_parm_stack_space > 0)
2661 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2662 else
2663 return false;
2665 all->stack_args_size.constant += data->locate.size.constant;
2666 if (data->locate.size.var)
2667 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2669 return true;
2672 /* A subroutine of assign_parms. Given that this parameter is allocated
2673 stack space by the ABI, find it. */
2675 static void
2676 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2678 rtx offset_rtx, stack_parm;
2679 unsigned int align, boundary;
2681 /* If we're passing this arg using a reg, make its stack home the
2682 aligned stack slot. */
2683 if (data->entry_parm)
2684 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2685 else
2686 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2688 stack_parm = crtl->args.internal_arg_pointer;
2689 if (offset_rtx != const0_rtx)
2690 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2691 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2693 if (!data->passed_pointer)
2695 set_mem_attributes (stack_parm, parm, 1);
2696 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2697 while promoted mode's size is needed. */
2698 if (data->promoted_mode != BLKmode
2699 && data->promoted_mode != DECL_MODE (parm))
2701 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2702 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2704 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2705 data->promoted_mode);
2706 if (offset)
2707 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2712 boundary = data->locate.boundary;
2713 align = BITS_PER_UNIT;
2715 /* If we're padding upward, we know that the alignment of the slot
2716 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2717 intentionally forcing upward padding. Otherwise we have to come
2718 up with a guess at the alignment based on OFFSET_RTX. */
2719 if (data->locate.where_pad != downward || data->entry_parm)
2720 align = boundary;
2721 else if (CONST_INT_P (offset_rtx))
2723 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2724 align = align & -align;
2726 set_mem_align (stack_parm, align);
2728 if (data->entry_parm)
2729 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2731 data->stack_parm = stack_parm;
2734 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2735 always valid and contiguous. */
2737 static void
2738 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2740 rtx entry_parm = data->entry_parm;
2741 rtx stack_parm = data->stack_parm;
2743 /* If this parm was passed part in regs and part in memory, pretend it
2744 arrived entirely in memory by pushing the register-part onto the stack.
2745 In the special case of a DImode or DFmode that is split, we could put
2746 it together in a pseudoreg directly, but for now that's not worth
2747 bothering with. */
2748 if (data->partial != 0)
2750 /* Handle calls that pass values in multiple non-contiguous
2751 locations. The Irix 6 ABI has examples of this. */
2752 if (GET_CODE (entry_parm) == PARALLEL)
2753 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2754 data->passed_type,
2755 int_size_in_bytes (data->passed_type));
2756 else
2758 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2759 move_block_from_reg (REGNO (entry_parm),
2760 validize_mem (copy_rtx (stack_parm)),
2761 data->partial / UNITS_PER_WORD);
2764 entry_parm = stack_parm;
2767 /* If we didn't decide this parm came in a register, by default it came
2768 on the stack. */
2769 else if (entry_parm == NULL)
2770 entry_parm = stack_parm;
2772 /* When an argument is passed in multiple locations, we can't make use
2773 of this information, but we can save some copying if the whole argument
2774 is passed in a single register. */
2775 else if (GET_CODE (entry_parm) == PARALLEL
2776 && data->nominal_mode != BLKmode
2777 && data->passed_mode != BLKmode)
2779 size_t i, len = XVECLEN (entry_parm, 0);
2781 for (i = 0; i < len; i++)
2782 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2783 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2784 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2785 == data->passed_mode)
2786 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2788 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2789 break;
2793 data->entry_parm = entry_parm;
2796 /* A subroutine of assign_parms. Reconstitute any values which were
2797 passed in multiple registers and would fit in a single register. */
2799 static void
2800 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2802 rtx entry_parm = data->entry_parm;
2804 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2805 This can be done with register operations rather than on the
2806 stack, even if we will store the reconstituted parameter on the
2807 stack later. */
2808 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2810 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2811 emit_group_store (parmreg, entry_parm, data->passed_type,
2812 GET_MODE_SIZE (GET_MODE (entry_parm)));
2813 entry_parm = parmreg;
2816 data->entry_parm = entry_parm;
2819 /* Wrapper for use_register_for_decl, that special-cases the
2820 .result_ptr as the function's RESULT_DECL when the RESULT_DECL is
2821 passed by reference. */
2823 static bool
2824 use_register_for_parm_decl (struct assign_parm_data_all *all, tree parm)
2826 if (parm == all->function_result_decl)
2828 tree result = DECL_RESULT (current_function_decl);
2830 if (DECL_BY_REFERENCE (result))
2831 parm = result;
2834 return use_register_for_decl (parm);
2837 /* Wrapper for get_rtl_for_parm_ssa_default_def, that special-cases
2838 the .result_ptr as the function's RESULT_DECL when the RESULT_DECL
2839 is passed by reference. */
2841 static rtx
2842 rtl_for_parm (struct assign_parm_data_all *all, tree parm)
2844 if (parm == all->function_result_decl)
2846 tree result = DECL_RESULT (current_function_decl);
2848 if (!DECL_BY_REFERENCE (result))
2849 return NULL_RTX;
2851 parm = result;
2854 return get_rtl_for_parm_ssa_default_def (parm);
2857 /* Reset the location of PARM_DECLs and RESULT_DECLs that had
2858 SSA_NAMEs in multiple partitions, so that assign_parms will choose
2859 the default def, if it exists, or create new RTL to hold the unused
2860 entry value. If we are coalescing across variables, we want to
2861 reset the location too, because a parm without a default def
2862 (incoming value unused) might be coalesced with one with a default
2863 def, and then assign_parms would copy both incoming values to the
2864 same location, which might cause the wrong value to survive. */
2865 static void
2866 maybe_reset_rtl_for_parm (tree parm)
2868 gcc_assert (TREE_CODE (parm) == PARM_DECL
2869 || TREE_CODE (parm) == RESULT_DECL);
2871 /* This is a split complex parameter, and its context was set to its
2872 original PARM_DECL in split_complex_args so that we could
2873 recognize it here and not reset its RTL. */
2874 if (DECL_CONTEXT (parm) && TREE_CODE (DECL_CONTEXT (parm)) == PARM_DECL)
2876 DECL_CONTEXT (parm) = DECL_CONTEXT (DECL_CONTEXT (parm));
2877 return;
2880 if ((flag_tree_coalesce_vars
2881 || (DECL_RTL_SET_P (parm) && DECL_RTL (parm) == pc_rtx))
2882 && is_gimple_reg (parm))
2883 SET_DECL_RTL (parm, NULL_RTX);
2886 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2887 always valid and properly aligned. */
2889 static void
2890 assign_parm_adjust_stack_rtl (struct assign_parm_data_all *all, tree parm,
2891 struct assign_parm_data_one *data)
2893 rtx stack_parm = data->stack_parm;
2895 /* If out-of-SSA assigned RTL to the parm default def, make sure we
2896 don't use what we might have computed before. */
2897 rtx ssa_assigned = rtl_for_parm (all, parm);
2898 if (ssa_assigned)
2899 stack_parm = NULL;
2901 /* If we can't trust the parm stack slot to be aligned enough for its
2902 ultimate type, don't use that slot after entry. We'll make another
2903 stack slot, if we need one. */
2904 else if (stack_parm
2905 && ((STRICT_ALIGNMENT
2906 && (GET_MODE_ALIGNMENT (data->nominal_mode)
2907 > MEM_ALIGN (stack_parm)))
2908 || (data->nominal_type
2909 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2910 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2911 stack_parm = NULL;
2913 /* If parm was passed in memory, and we need to convert it on entry,
2914 don't store it back in that same slot. */
2915 else if (data->entry_parm == stack_parm
2916 && data->nominal_mode != BLKmode
2917 && data->nominal_mode != data->passed_mode)
2918 stack_parm = NULL;
2920 /* If stack protection is in effect for this function, don't leave any
2921 pointers in their passed stack slots. */
2922 else if (crtl->stack_protect_guard
2923 && (flag_stack_protect == 2
2924 || data->passed_pointer
2925 || POINTER_TYPE_P (data->nominal_type)))
2926 stack_parm = NULL;
2928 data->stack_parm = stack_parm;
2931 /* A subroutine of assign_parms. Return true if the current parameter
2932 should be stored as a BLKmode in the current frame. */
2934 static bool
2935 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2937 if (data->nominal_mode == BLKmode)
2938 return true;
2939 if (GET_MODE (data->entry_parm) == BLKmode)
2940 return true;
2942 #ifdef BLOCK_REG_PADDING
2943 /* Only assign_parm_setup_block knows how to deal with register arguments
2944 that are padded at the least significant end. */
2945 if (REG_P (data->entry_parm)
2946 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2947 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2948 == (BYTES_BIG_ENDIAN ? upward : downward)))
2949 return true;
2950 #endif
2952 return false;
2955 /* Return true if FROM_EXPAND is a MEM with an address to be filled in
2956 by assign_params. This should be the case if, and only if,
2957 parm_in_stack_slot_p holds for the parm DECL that expanded to
2958 FROM_EXPAND, so we check that, too. */
2960 static bool
2961 parm_in_unassigned_mem_p (tree decl, rtx from_expand)
2963 bool result = MEM_P (from_expand) && !XEXP (from_expand, 0);
2965 gcc_assert (result == parm_in_stack_slot_p (decl)
2966 /* Maybe it was already assigned. That's ok, especially
2967 for split complex args. */
2968 || (!result && MEM_P (from_expand)
2969 && (XEXP (from_expand, 0) == virtual_stack_vars_rtx
2970 || (GET_CODE (XEXP (from_expand, 0)) == PLUS
2971 && XEXP (XEXP (from_expand, 0), 0) == virtual_stack_vars_rtx))));
2973 return result;
2976 /* A subroutine of assign_parms. Arrange for the parameter to be
2977 present and valid in DATA->STACK_RTL. */
2979 static void
2980 assign_parm_setup_block (struct assign_parm_data_all *all,
2981 tree parm, struct assign_parm_data_one *data)
2983 rtx entry_parm = data->entry_parm;
2984 rtx stack_parm = data->stack_parm;
2985 HOST_WIDE_INT size;
2986 HOST_WIDE_INT size_stored;
2988 if (GET_CODE (entry_parm) == PARALLEL)
2989 entry_parm = emit_group_move_into_temps (entry_parm);
2991 size = int_size_in_bytes (data->passed_type);
2992 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2994 if (stack_parm == 0)
2996 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2997 rtx from_expand = rtl_for_parm (all, parm);
2998 if (from_expand && !parm_in_unassigned_mem_p (parm, from_expand))
2999 stack_parm = copy_rtx (from_expand);
3000 else
3002 stack_parm = assign_stack_local (BLKmode, size_stored,
3003 DECL_ALIGN (parm));
3004 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
3005 PUT_MODE (stack_parm, GET_MODE (entry_parm));
3006 if (from_expand)
3008 gcc_assert (GET_CODE (stack_parm) == MEM);
3009 gcc_assert (parm_in_unassigned_mem_p (parm, from_expand));
3010 XEXP (from_expand, 0) = XEXP (stack_parm, 0);
3011 PUT_MODE (from_expand, GET_MODE (stack_parm));
3012 stack_parm = copy_rtx (from_expand);
3014 else
3015 set_mem_attributes (stack_parm, parm, 1);
3019 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
3020 calls that pass values in multiple non-contiguous locations. */
3021 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
3023 rtx mem;
3025 /* Note that we will be storing an integral number of words.
3026 So we have to be careful to ensure that we allocate an
3027 integral number of words. We do this above when we call
3028 assign_stack_local if space was not allocated in the argument
3029 list. If it was, this will not work if PARM_BOUNDARY is not
3030 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3031 if it becomes a problem. Exception is when BLKmode arrives
3032 with arguments not conforming to word_mode. */
3034 if (data->stack_parm == 0)
3036 else if (GET_CODE (entry_parm) == PARALLEL)
3038 else
3039 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
3041 mem = validize_mem (copy_rtx (stack_parm));
3043 /* Handle values in multiple non-contiguous locations. */
3044 if (GET_CODE (entry_parm) == PARALLEL)
3046 push_to_sequence2 (all->first_conversion_insn,
3047 all->last_conversion_insn);
3048 emit_group_store (mem, entry_parm, data->passed_type, size);
3049 all->first_conversion_insn = get_insns ();
3050 all->last_conversion_insn = get_last_insn ();
3051 end_sequence ();
3054 else if (size == 0)
3057 /* MEM may be a REG if coalescing assigns the param's partition
3058 to a pseudo. */
3059 else if (REG_P (mem))
3060 emit_move_insn (mem, entry_parm);
3062 /* If SIZE is that of a mode no bigger than a word, just use
3063 that mode's store operation. */
3064 else if (size <= UNITS_PER_WORD)
3066 machine_mode mode
3067 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
3069 if (mode != BLKmode
3070 #ifdef BLOCK_REG_PADDING
3071 && (size == UNITS_PER_WORD
3072 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3073 != (BYTES_BIG_ENDIAN ? upward : downward)))
3074 #endif
3077 rtx reg;
3079 /* We are really truncating a word_mode value containing
3080 SIZE bytes into a value of mode MODE. If such an
3081 operation requires no actual instructions, we can refer
3082 to the value directly in mode MODE, otherwise we must
3083 start with the register in word_mode and explicitly
3084 convert it. */
3085 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
3086 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3087 else
3089 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3090 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3092 emit_move_insn (change_address (mem, mode, 0), reg);
3095 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3096 machine must be aligned to the left before storing
3097 to memory. Note that the previous test doesn't
3098 handle all cases (e.g. SIZE == 3). */
3099 else if (size != UNITS_PER_WORD
3100 #ifdef BLOCK_REG_PADDING
3101 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3102 == downward)
3103 #else
3104 && BYTES_BIG_ENDIAN
3105 #endif
3108 rtx tem, x;
3109 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3110 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3112 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3113 tem = change_address (mem, word_mode, 0);
3114 emit_move_insn (tem, x);
3116 else
3117 move_block_from_reg (REGNO (entry_parm), mem,
3118 size_stored / UNITS_PER_WORD);
3120 else
3121 move_block_from_reg (REGNO (entry_parm), mem,
3122 size_stored / UNITS_PER_WORD);
3124 else if (data->stack_parm == 0)
3126 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3127 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3128 BLOCK_OP_NORMAL);
3129 all->first_conversion_insn = get_insns ();
3130 all->last_conversion_insn = get_last_insn ();
3131 end_sequence ();
3134 data->stack_parm = stack_parm;
3135 SET_DECL_RTL (parm, stack_parm);
3138 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3139 parameter. Get it there. Perform all ABI specified conversions. */
3141 static void
3142 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3143 struct assign_parm_data_one *data)
3145 rtx parmreg, validated_mem;
3146 rtx equiv_stack_parm;
3147 machine_mode promoted_nominal_mode;
3148 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3149 bool did_conversion = false;
3150 bool need_conversion, moved;
3152 /* Store the parm in a pseudoregister during the function, but we may
3153 need to do it in a wider mode. Using 2 here makes the result
3154 consistent with promote_decl_mode and thus expand_expr_real_1. */
3155 promoted_nominal_mode
3156 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3157 TREE_TYPE (current_function_decl), 2);
3159 rtx from_expand = parmreg = rtl_for_parm (all, parm);
3161 if (from_expand && !data->passed_pointer)
3163 if (GET_MODE (parmreg) != promoted_nominal_mode)
3164 parmreg = gen_lowpart (promoted_nominal_mode, parmreg);
3166 else if (!from_expand || parm_in_unassigned_mem_p (parm, from_expand))
3168 parmreg = gen_reg_rtx (promoted_nominal_mode);
3169 if (!DECL_ARTIFICIAL (parm))
3170 mark_user_reg (parmreg);
3172 if (from_expand)
3174 gcc_assert (data->passed_pointer);
3175 gcc_assert (GET_CODE (from_expand) == MEM
3176 && XEXP (from_expand, 0) == NULL_RTX);
3177 XEXP (from_expand, 0) = parmreg;
3181 /* If this was an item that we received a pointer to,
3182 set DECL_RTL appropriately. */
3183 if (from_expand)
3184 SET_DECL_RTL (parm, from_expand);
3185 else if (data->passed_pointer)
3187 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3188 set_mem_attributes (x, parm, 1);
3189 SET_DECL_RTL (parm, x);
3191 else
3192 SET_DECL_RTL (parm, parmreg);
3194 assign_parm_remove_parallels (data);
3196 /* Copy the value into the register, thus bridging between
3197 assign_parm_find_data_types and expand_expr_real_1. */
3199 equiv_stack_parm = data->stack_parm;
3200 if (!equiv_stack_parm)
3201 equiv_stack_parm = data->entry_parm;
3202 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3204 need_conversion = (data->nominal_mode != data->passed_mode
3205 || promoted_nominal_mode != data->promoted_mode);
3206 gcc_assert (!(need_conversion && data->passed_pointer && from_expand));
3207 moved = false;
3209 if (need_conversion
3210 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3211 && data->nominal_mode == data->passed_mode
3212 && data->nominal_mode == GET_MODE (data->entry_parm))
3214 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3215 mode, by the caller. We now have to convert it to
3216 NOMINAL_MODE, if different. However, PARMREG may be in
3217 a different mode than NOMINAL_MODE if it is being stored
3218 promoted.
3220 If ENTRY_PARM is a hard register, it might be in a register
3221 not valid for operating in its mode (e.g., an odd-numbered
3222 register for a DFmode). In that case, moves are the only
3223 thing valid, so we can't do a convert from there. This
3224 occurs when the calling sequence allow such misaligned
3225 usages.
3227 In addition, the conversion may involve a call, which could
3228 clobber parameters which haven't been copied to pseudo
3229 registers yet.
3231 First, we try to emit an insn which performs the necessary
3232 conversion. We verify that this insn does not clobber any
3233 hard registers. */
3235 enum insn_code icode;
3236 rtx op0, op1;
3238 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3239 unsignedp);
3241 op0 = parmreg;
3242 op1 = validated_mem;
3243 if (icode != CODE_FOR_nothing
3244 && insn_operand_matches (icode, 0, op0)
3245 && insn_operand_matches (icode, 1, op1))
3247 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3248 rtx_insn *insn, *insns;
3249 rtx t = op1;
3250 HARD_REG_SET hardregs;
3252 start_sequence ();
3253 /* If op1 is a hard register that is likely spilled, first
3254 force it into a pseudo, otherwise combiner might extend
3255 its lifetime too much. */
3256 if (GET_CODE (t) == SUBREG)
3257 t = SUBREG_REG (t);
3258 if (REG_P (t)
3259 && HARD_REGISTER_P (t)
3260 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3261 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3263 t = gen_reg_rtx (GET_MODE (op1));
3264 emit_move_insn (t, op1);
3266 else
3267 t = op1;
3268 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3269 data->passed_mode, unsignedp);
3270 emit_insn (pat);
3271 insns = get_insns ();
3273 moved = true;
3274 CLEAR_HARD_REG_SET (hardregs);
3275 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3277 if (INSN_P (insn))
3278 note_stores (PATTERN (insn), record_hard_reg_sets,
3279 &hardregs);
3280 if (!hard_reg_set_empty_p (hardregs))
3281 moved = false;
3284 end_sequence ();
3286 if (moved)
3288 emit_insn (insns);
3289 if (equiv_stack_parm != NULL_RTX)
3290 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3291 equiv_stack_parm);
3296 if (moved)
3297 /* Nothing to do. */
3299 else if (need_conversion)
3301 /* We did not have an insn to convert directly, or the sequence
3302 generated appeared unsafe. We must first copy the parm to a
3303 pseudo reg, and save the conversion until after all
3304 parameters have been moved. */
3306 int save_tree_used;
3307 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3309 emit_move_insn (tempreg, validated_mem);
3311 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3312 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3314 if (GET_CODE (tempreg) == SUBREG
3315 && GET_MODE (tempreg) == data->nominal_mode
3316 && REG_P (SUBREG_REG (tempreg))
3317 && data->nominal_mode == data->passed_mode
3318 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3319 && GET_MODE_SIZE (GET_MODE (tempreg))
3320 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3322 /* The argument is already sign/zero extended, so note it
3323 into the subreg. */
3324 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3325 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3328 /* TREE_USED gets set erroneously during expand_assignment. */
3329 save_tree_used = TREE_USED (parm);
3330 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3331 TREE_USED (parm) = save_tree_used;
3332 all->first_conversion_insn = get_insns ();
3333 all->last_conversion_insn = get_last_insn ();
3334 end_sequence ();
3336 did_conversion = true;
3338 /* We don't want to copy the incoming pointer to a parmreg expected
3339 to hold the value rather than the pointer. */
3340 else if (!data->passed_pointer || parmreg != from_expand)
3341 emit_move_insn (parmreg, validated_mem);
3343 /* If we were passed a pointer but the actual value can safely live
3344 in a register, retrieve it and use it directly. */
3345 if (data->passed_pointer
3346 && (from_expand || TYPE_MODE (TREE_TYPE (parm)) != BLKmode))
3348 rtx src = DECL_RTL (parm);
3350 /* We can't use nominal_mode, because it will have been set to
3351 Pmode above. We must use the actual mode of the parm. */
3352 if (from_expand)
3354 parmreg = from_expand;
3355 gcc_assert (GET_MODE (parmreg) == TYPE_MODE (TREE_TYPE (parm)));
3356 src = gen_rtx_MEM (GET_MODE (parmreg), validated_mem);
3357 set_mem_attributes (src, parm, 1);
3359 else if (use_register_for_decl (parm))
3361 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3362 mark_user_reg (parmreg);
3364 else
3366 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3367 TYPE_MODE (TREE_TYPE (parm)),
3368 TYPE_ALIGN (TREE_TYPE (parm)));
3369 parmreg
3370 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3371 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3372 align);
3373 set_mem_attributes (parmreg, parm, 1);
3376 if (GET_MODE (parmreg) != GET_MODE (src))
3378 rtx tempreg = gen_reg_rtx (GET_MODE (src));
3379 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3381 push_to_sequence2 (all->first_conversion_insn,
3382 all->last_conversion_insn);
3383 emit_move_insn (tempreg, src);
3384 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3385 emit_move_insn (parmreg, tempreg);
3386 all->first_conversion_insn = get_insns ();
3387 all->last_conversion_insn = get_last_insn ();
3388 end_sequence ();
3390 did_conversion = true;
3392 else if (GET_MODE (parmreg) == BLKmode)
3393 gcc_assert (parm_in_stack_slot_p (parm));
3394 else
3395 emit_move_insn (parmreg, src);
3397 SET_DECL_RTL (parm, parmreg);
3399 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3400 now the parm. */
3401 data->stack_parm = equiv_stack_parm = NULL;
3404 /* Mark the register as eliminable if we did no conversion and it was
3405 copied from memory at a fixed offset, and the arg pointer was not
3406 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3407 offset formed an invalid address, such memory-equivalences as we
3408 make here would screw up life analysis for it. */
3409 if (data->nominal_mode == data->passed_mode
3410 && !did_conversion
3411 && equiv_stack_parm != 0
3412 && MEM_P (equiv_stack_parm)
3413 && data->locate.offset.var == 0
3414 && reg_mentioned_p (virtual_incoming_args_rtx,
3415 XEXP (equiv_stack_parm, 0)))
3417 rtx_insn *linsn = get_last_insn ();
3418 rtx_insn *sinsn;
3419 rtx set;
3421 /* Mark complex types separately. */
3422 if (GET_CODE (parmreg) == CONCAT)
3424 machine_mode submode
3425 = GET_MODE_INNER (GET_MODE (parmreg));
3426 int regnor = REGNO (XEXP (parmreg, 0));
3427 int regnoi = REGNO (XEXP (parmreg, 1));
3428 rtx stackr = adjust_address_nv (equiv_stack_parm, submode, 0);
3429 rtx stacki = adjust_address_nv (equiv_stack_parm, submode,
3430 GET_MODE_SIZE (submode));
3432 /* Scan backwards for the set of the real and
3433 imaginary parts. */
3434 for (sinsn = linsn; sinsn != 0;
3435 sinsn = prev_nonnote_insn (sinsn))
3437 set = single_set (sinsn);
3438 if (set == 0)
3439 continue;
3441 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3442 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3443 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3444 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3447 else
3448 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3451 /* For pointer data type, suggest pointer register. */
3452 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3453 mark_reg_pointer (parmreg,
3454 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3457 /* A subroutine of assign_parms. Allocate stack space to hold the current
3458 parameter. Get it there. Perform all ABI specified conversions. */
3460 static void
3461 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3462 struct assign_parm_data_one *data)
3464 /* Value must be stored in the stack slot STACK_PARM during function
3465 execution. */
3466 bool to_conversion = false;
3468 assign_parm_remove_parallels (data);
3470 if (data->promoted_mode != data->nominal_mode)
3472 /* Conversion is required. */
3473 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3475 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3477 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3478 to_conversion = true;
3480 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3481 TYPE_UNSIGNED (TREE_TYPE (parm)));
3483 if (data->stack_parm)
3485 int offset = subreg_lowpart_offset (data->nominal_mode,
3486 GET_MODE (data->stack_parm));
3487 /* ??? This may need a big-endian conversion on sparc64. */
3488 data->stack_parm
3489 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3490 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3491 set_mem_offset (data->stack_parm,
3492 MEM_OFFSET (data->stack_parm) + offset);
3496 if (data->entry_parm != data->stack_parm)
3498 rtx src, dest;
3499 rtx from_expand = NULL_RTX;
3501 if (data->stack_parm == 0)
3503 from_expand = rtl_for_parm (all, parm);
3504 if (from_expand)
3505 gcc_assert (GET_MODE (from_expand) == GET_MODE (data->entry_parm));
3506 if (from_expand && !parm_in_unassigned_mem_p (parm, from_expand))
3507 data->stack_parm = from_expand;
3510 if (data->stack_parm == 0)
3512 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3513 GET_MODE (data->entry_parm),
3514 TYPE_ALIGN (data->passed_type));
3515 data->stack_parm
3516 = assign_stack_local (GET_MODE (data->entry_parm),
3517 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3518 align);
3519 if (!from_expand)
3520 set_mem_attributes (data->stack_parm, parm, 1);
3521 else
3523 gcc_assert (GET_CODE (data->stack_parm) == MEM);
3524 gcc_assert (parm_in_unassigned_mem_p (parm, from_expand));
3525 XEXP (from_expand, 0) = XEXP (data->stack_parm, 0);
3526 PUT_MODE (from_expand, GET_MODE (data->stack_parm));
3527 data->stack_parm = copy_rtx (from_expand);
3531 dest = validize_mem (copy_rtx (data->stack_parm));
3532 src = validize_mem (copy_rtx (data->entry_parm));
3534 if (MEM_P (src))
3536 /* Use a block move to handle potentially misaligned entry_parm. */
3537 if (!to_conversion)
3538 push_to_sequence2 (all->first_conversion_insn,
3539 all->last_conversion_insn);
3540 to_conversion = true;
3542 emit_block_move (dest, src,
3543 GEN_INT (int_size_in_bytes (data->passed_type)),
3544 BLOCK_OP_NORMAL);
3546 else
3547 emit_move_insn (dest, src);
3550 if (to_conversion)
3552 all->first_conversion_insn = get_insns ();
3553 all->last_conversion_insn = get_last_insn ();
3554 end_sequence ();
3557 SET_DECL_RTL (parm, data->stack_parm);
3560 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3561 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3563 static void
3564 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3565 vec<tree> fnargs)
3567 tree parm;
3568 tree orig_fnargs = all->orig_fnargs;
3569 unsigned i = 0;
3571 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3573 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3574 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3576 rtx tmp, real, imag;
3577 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3579 real = DECL_RTL (fnargs[i]);
3580 imag = DECL_RTL (fnargs[i + 1]);
3581 if (inner != GET_MODE (real))
3583 real = simplify_gen_subreg (inner, real, GET_MODE (real),
3584 subreg_lowpart_offset
3585 (inner, GET_MODE (real)));
3586 imag = simplify_gen_subreg (inner, imag, GET_MODE (imag),
3587 subreg_lowpart_offset
3588 (inner, GET_MODE (imag)));
3591 if ((tmp = rtl_for_parm (all, parm)) != NULL_RTX
3592 && rtx_equal_p (real,
3593 read_complex_part (tmp, false))
3594 && rtx_equal_p (imag,
3595 read_complex_part (tmp, true)))
3596 ; /* We now have the right rtl in tmp. */
3597 else if (TREE_ADDRESSABLE (parm))
3599 rtx rmem, imem;
3600 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3601 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3602 DECL_MODE (parm),
3603 TYPE_ALIGN (TREE_TYPE (parm)));
3605 /* split_complex_arg put the real and imag parts in
3606 pseudos. Move them to memory. */
3607 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3608 set_mem_attributes (tmp, parm, 1);
3609 rmem = adjust_address_nv (tmp, inner, 0);
3610 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3611 push_to_sequence2 (all->first_conversion_insn,
3612 all->last_conversion_insn);
3613 emit_move_insn (rmem, real);
3614 emit_move_insn (imem, imag);
3615 all->first_conversion_insn = get_insns ();
3616 all->last_conversion_insn = get_last_insn ();
3617 end_sequence ();
3619 else
3620 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3621 SET_DECL_RTL (parm, tmp);
3623 real = DECL_INCOMING_RTL (fnargs[i]);
3624 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3625 if (inner != GET_MODE (real))
3627 real = gen_lowpart_SUBREG (inner, real);
3628 imag = gen_lowpart_SUBREG (inner, imag);
3630 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3631 set_decl_incoming_rtl (parm, tmp, false);
3632 i++;
3637 /* Load bounds of PARM from bounds table. */
3638 static void
3639 assign_parm_load_bounds (struct assign_parm_data_one *data,
3640 tree parm,
3641 rtx entry,
3642 unsigned bound_no)
3644 bitmap_iterator bi;
3645 unsigned i, offs = 0;
3646 int bnd_no = -1;
3647 rtx slot = NULL, ptr = NULL;
3649 if (parm)
3651 bitmap slots;
3652 bitmap_obstack_initialize (NULL);
3653 slots = BITMAP_ALLOC (NULL);
3654 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3655 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3657 if (bound_no)
3658 bound_no--;
3659 else
3661 bnd_no = i;
3662 break;
3665 BITMAP_FREE (slots);
3666 bitmap_obstack_release (NULL);
3669 /* We may have bounds not associated with any pointer. */
3670 if (bnd_no != -1)
3671 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3673 /* Find associated pointer. */
3674 if (bnd_no == -1)
3676 /* If bounds are not associated with any bounds,
3677 then it is passed in a register or special slot. */
3678 gcc_assert (data->entry_parm);
3679 ptr = const0_rtx;
3681 else if (MEM_P (entry))
3682 slot = adjust_address (entry, Pmode, offs);
3683 else if (REG_P (entry))
3684 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3685 else if (GET_CODE (entry) == PARALLEL)
3686 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3687 else
3688 gcc_unreachable ();
3689 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3690 data->entry_parm);
3693 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3695 static void
3696 assign_bounds (vec<bounds_parm_data> &bndargs,
3697 struct assign_parm_data_all &all,
3698 bool assign_regs, bool assign_special,
3699 bool assign_bt)
3701 unsigned i, pass;
3702 bounds_parm_data *pbdata;
3704 if (!bndargs.exists ())
3705 return;
3707 /* We make few passes to store input bounds. Firstly handle bounds
3708 passed in registers. After that we load bounds passed in special
3709 slots. Finally we load bounds from Bounds Table. */
3710 for (pass = 0; pass < 3; pass++)
3711 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3713 /* Pass 0 => regs only. */
3714 if (pass == 0
3715 && (!assign_regs
3716 ||(!pbdata->parm_data.entry_parm
3717 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3718 continue;
3719 /* Pass 1 => slots only. */
3720 else if (pass == 1
3721 && (!assign_special
3722 || (!pbdata->parm_data.entry_parm
3723 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3724 continue;
3725 /* Pass 2 => BT only. */
3726 else if (pass == 2
3727 && (!assign_bt
3728 || pbdata->parm_data.entry_parm))
3729 continue;
3731 if (!pbdata->parm_data.entry_parm
3732 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3733 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3734 pbdata->ptr_entry, pbdata->bound_no);
3736 set_decl_incoming_rtl (pbdata->bounds_parm,
3737 pbdata->parm_data.entry_parm, false);
3739 if (assign_parm_setup_block_p (&pbdata->parm_data))
3740 assign_parm_setup_block (&all, pbdata->bounds_parm,
3741 &pbdata->parm_data);
3742 else if (pbdata->parm_data.passed_pointer
3743 || use_register_for_parm_decl (&all, pbdata->bounds_parm))
3744 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3745 &pbdata->parm_data);
3746 else
3747 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3748 &pbdata->parm_data);
3752 /* Assign RTL expressions to the function's parameters. This may involve
3753 copying them into registers and using those registers as the DECL_RTL. */
3755 static void
3756 assign_parms (tree fndecl)
3758 struct assign_parm_data_all all;
3759 tree parm;
3760 vec<tree> fnargs;
3761 unsigned i, bound_no = 0;
3762 tree last_arg = NULL;
3763 rtx last_arg_entry = NULL;
3764 vec<bounds_parm_data> bndargs = vNULL;
3765 bounds_parm_data bdata;
3767 crtl->args.internal_arg_pointer
3768 = targetm.calls.internal_arg_pointer ();
3770 assign_parms_initialize_all (&all);
3771 fnargs = assign_parms_augmented_arg_list (&all);
3773 FOR_EACH_VEC_ELT (fnargs, i, parm)
3775 struct assign_parm_data_one data;
3777 /* Extract the type of PARM; adjust it according to ABI. */
3778 assign_parm_find_data_types (&all, parm, &data);
3780 /* Early out for errors and void parameters. */
3781 if (data.passed_mode == VOIDmode)
3783 SET_DECL_RTL (parm, const0_rtx);
3784 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3785 continue;
3787 else
3788 maybe_reset_rtl_for_parm (parm);
3790 /* Estimate stack alignment from parameter alignment. */
3791 if (SUPPORTS_STACK_ALIGNMENT)
3793 unsigned int align
3794 = targetm.calls.function_arg_boundary (data.promoted_mode,
3795 data.passed_type);
3796 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3797 align);
3798 if (TYPE_ALIGN (data.nominal_type) > align)
3799 align = MINIMUM_ALIGNMENT (data.nominal_type,
3800 TYPE_MODE (data.nominal_type),
3801 TYPE_ALIGN (data.nominal_type));
3802 if (crtl->stack_alignment_estimated < align)
3804 gcc_assert (!crtl->stack_realign_processed);
3805 crtl->stack_alignment_estimated = align;
3809 /* Find out where the parameter arrives in this function. */
3810 assign_parm_find_entry_rtl (&all, &data);
3812 /* Find out where stack space for this parameter might be. */
3813 if (assign_parm_is_stack_parm (&all, &data))
3815 assign_parm_find_stack_rtl (parm, &data);
3816 assign_parm_adjust_entry_rtl (&data);
3818 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3820 /* Remember where last non bounds arg was passed in case
3821 we have to load associated bounds for it from Bounds
3822 Table. */
3823 last_arg = parm;
3824 last_arg_entry = data.entry_parm;
3825 bound_no = 0;
3827 /* Record permanently how this parm was passed. */
3828 if (data.passed_pointer)
3830 rtx incoming_rtl
3831 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3832 data.entry_parm);
3833 set_decl_incoming_rtl (parm, incoming_rtl, true);
3835 else
3836 set_decl_incoming_rtl (parm, data.entry_parm, false);
3838 assign_parm_adjust_stack_rtl (&all, parm, &data);
3840 /* Bounds should be loaded in the particular order to
3841 have registers allocated correctly. Collect info about
3842 input bounds and load them later. */
3843 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3845 /* Expect bounds in instrumented functions only. */
3846 gcc_assert (chkp_function_instrumented_p (fndecl));
3848 bdata.parm_data = data;
3849 bdata.bounds_parm = parm;
3850 bdata.ptr_parm = last_arg;
3851 bdata.ptr_entry = last_arg_entry;
3852 bdata.bound_no = bound_no;
3853 bndargs.safe_push (bdata);
3855 else
3857 if (assign_parm_setup_block_p (&data))
3858 assign_parm_setup_block (&all, parm, &data);
3859 else if (data.passed_pointer
3860 || use_register_for_parm_decl (&all, parm))
3861 assign_parm_setup_reg (&all, parm, &data);
3862 else
3863 assign_parm_setup_stack (&all, parm, &data);
3866 if (cfun->stdarg && !DECL_CHAIN (parm))
3868 int pretend_bytes = 0;
3870 assign_parms_setup_varargs (&all, &data, false);
3872 if (chkp_function_instrumented_p (fndecl))
3874 /* We expect this is the last parm. Otherwise it is wrong
3875 to assign bounds right now. */
3876 gcc_assert (i == (fnargs.length () - 1));
3877 assign_bounds (bndargs, all, true, false, false);
3878 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3879 data.promoted_mode,
3880 data.passed_type,
3881 &pretend_bytes,
3882 false);
3883 assign_bounds (bndargs, all, false, true, true);
3884 bndargs.release ();
3888 /* Update info on where next arg arrives in registers. */
3889 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3890 data.passed_type, data.named_arg);
3892 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3893 bound_no++;
3896 assign_bounds (bndargs, all, true, true, true);
3897 bndargs.release ();
3899 if (targetm.calls.split_complex_arg)
3900 assign_parms_unsplit_complex (&all, fnargs);
3902 fnargs.release ();
3904 /* Output all parameter conversion instructions (possibly including calls)
3905 now that all parameters have been copied out of hard registers. */
3906 emit_insn (all.first_conversion_insn);
3908 /* Estimate reload stack alignment from scalar return mode. */
3909 if (SUPPORTS_STACK_ALIGNMENT)
3911 if (DECL_RESULT (fndecl))
3913 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3914 machine_mode mode = TYPE_MODE (type);
3916 if (mode != BLKmode
3917 && mode != VOIDmode
3918 && !AGGREGATE_TYPE_P (type))
3920 unsigned int align = GET_MODE_ALIGNMENT (mode);
3921 if (crtl->stack_alignment_estimated < align)
3923 gcc_assert (!crtl->stack_realign_processed);
3924 crtl->stack_alignment_estimated = align;
3930 /* If we are receiving a struct value address as the first argument, set up
3931 the RTL for the function result. As this might require code to convert
3932 the transmitted address to Pmode, we do this here to ensure that possible
3933 preliminary conversions of the address have been emitted already. */
3934 if (all.function_result_decl)
3936 tree result = DECL_RESULT (current_function_decl);
3937 rtx addr = DECL_RTL (all.function_result_decl);
3938 rtx x;
3940 if (DECL_BY_REFERENCE (result))
3942 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3943 x = addr;
3945 else
3947 SET_DECL_VALUE_EXPR (result,
3948 build1 (INDIRECT_REF, TREE_TYPE (result),
3949 all.function_result_decl));
3950 addr = convert_memory_address (Pmode, addr);
3951 x = gen_rtx_MEM (DECL_MODE (result), addr);
3952 set_mem_attributes (x, result, 1);
3955 DECL_HAS_VALUE_EXPR_P (result) = 1;
3957 SET_DECL_RTL (result, x);
3960 /* We have aligned all the args, so add space for the pretend args. */
3961 crtl->args.pretend_args_size = all.pretend_args_size;
3962 all.stack_args_size.constant += all.extra_pretend_bytes;
3963 crtl->args.size = all.stack_args_size.constant;
3965 /* Adjust function incoming argument size for alignment and
3966 minimum length. */
3968 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3969 crtl->args.size = CEIL_ROUND (crtl->args.size,
3970 PARM_BOUNDARY / BITS_PER_UNIT);
3972 if (ARGS_GROW_DOWNWARD)
3974 crtl->args.arg_offset_rtx
3975 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3976 : expand_expr (size_diffop (all.stack_args_size.var,
3977 size_int (-all.stack_args_size.constant)),
3978 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3980 else
3981 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3983 /* See how many bytes, if any, of its args a function should try to pop
3984 on return. */
3986 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3987 TREE_TYPE (fndecl),
3988 crtl->args.size);
3990 /* For stdarg.h function, save info about
3991 regs and stack space used by the named args. */
3993 crtl->args.info = all.args_so_far_v;
3995 /* Set the rtx used for the function return value. Put this in its
3996 own variable so any optimizers that need this information don't have
3997 to include tree.h. Do this here so it gets done when an inlined
3998 function gets output. */
4000 crtl->return_rtx
4001 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
4002 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
4004 /* If scalar return value was computed in a pseudo-reg, or was a named
4005 return value that got dumped to the stack, copy that to the hard
4006 return register. */
4007 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
4009 tree decl_result = DECL_RESULT (fndecl);
4010 rtx decl_rtl = DECL_RTL (decl_result);
4012 if (REG_P (decl_rtl)
4013 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4014 : DECL_REGISTER (decl_result))
4016 rtx real_decl_rtl;
4018 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
4019 fndecl, true);
4020 if (chkp_function_instrumented_p (fndecl))
4021 crtl->return_bnd
4022 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
4023 fndecl, true);
4024 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
4025 /* The delay slot scheduler assumes that crtl->return_rtx
4026 holds the hard register containing the return value, not a
4027 temporary pseudo. */
4028 crtl->return_rtx = real_decl_rtl;
4033 /* A subroutine of gimplify_parameters, invoked via walk_tree.
4034 For all seen types, gimplify their sizes. */
4036 static tree
4037 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
4039 tree t = *tp;
4041 *walk_subtrees = 0;
4042 if (TYPE_P (t))
4044 if (POINTER_TYPE_P (t))
4045 *walk_subtrees = 1;
4046 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
4047 && !TYPE_SIZES_GIMPLIFIED (t))
4049 gimplify_type_sizes (t, (gimple_seq *) data);
4050 *walk_subtrees = 1;
4054 return NULL;
4057 /* Gimplify the parameter list for current_function_decl. This involves
4058 evaluating SAVE_EXPRs of variable sized parameters and generating code
4059 to implement callee-copies reference parameters. Returns a sequence of
4060 statements to add to the beginning of the function. */
4062 gimple_seq
4063 gimplify_parameters (void)
4065 struct assign_parm_data_all all;
4066 tree parm;
4067 gimple_seq stmts = NULL;
4068 vec<tree> fnargs;
4069 unsigned i;
4071 assign_parms_initialize_all (&all);
4072 fnargs = assign_parms_augmented_arg_list (&all);
4074 FOR_EACH_VEC_ELT (fnargs, i, parm)
4076 struct assign_parm_data_one data;
4078 /* Extract the type of PARM; adjust it according to ABI. */
4079 assign_parm_find_data_types (&all, parm, &data);
4081 /* Early out for errors and void parameters. */
4082 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
4083 continue;
4085 /* Update info on where next arg arrives in registers. */
4086 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
4087 data.passed_type, data.named_arg);
4089 /* ??? Once upon a time variable_size stuffed parameter list
4090 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4091 turned out to be less than manageable in the gimple world.
4092 Now we have to hunt them down ourselves. */
4093 walk_tree_without_duplicates (&data.passed_type,
4094 gimplify_parm_type, &stmts);
4096 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4098 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4099 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4102 if (data.passed_pointer)
4104 tree type = TREE_TYPE (data.passed_type);
4105 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4106 type, data.named_arg))
4108 tree local, t;
4110 /* For constant-sized objects, this is trivial; for
4111 variable-sized objects, we have to play games. */
4112 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4113 && !(flag_stack_check == GENERIC_STACK_CHECK
4114 && compare_tree_int (DECL_SIZE_UNIT (parm),
4115 STACK_CHECK_MAX_VAR_SIZE) > 0))
4117 local = create_tmp_var (type, get_name (parm));
4118 DECL_IGNORED_P (local) = 0;
4119 /* If PARM was addressable, move that flag over
4120 to the local copy, as its address will be taken,
4121 not the PARMs. Keep the parms address taken
4122 as we'll query that flag during gimplification. */
4123 if (TREE_ADDRESSABLE (parm))
4124 TREE_ADDRESSABLE (local) = 1;
4125 else if (TREE_CODE (type) == COMPLEX_TYPE
4126 || TREE_CODE (type) == VECTOR_TYPE)
4127 DECL_GIMPLE_REG_P (local) = 1;
4129 else
4131 tree ptr_type, addr;
4133 ptr_type = build_pointer_type (type);
4134 addr = create_tmp_reg (ptr_type, get_name (parm));
4135 DECL_IGNORED_P (addr) = 0;
4136 local = build_fold_indirect_ref (addr);
4138 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4139 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
4140 size_int (DECL_ALIGN (parm)));
4142 /* The call has been built for a variable-sized object. */
4143 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4144 t = fold_convert (ptr_type, t);
4145 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4146 gimplify_and_add (t, &stmts);
4149 gimplify_assign (local, parm, &stmts);
4151 SET_DECL_VALUE_EXPR (parm, local);
4152 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4157 fnargs.release ();
4159 return stmts;
4162 /* Compute the size and offset from the start of the stacked arguments for a
4163 parm passed in mode PASSED_MODE and with type TYPE.
4165 INITIAL_OFFSET_PTR points to the current offset into the stacked
4166 arguments.
4168 The starting offset and size for this parm are returned in
4169 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4170 nonzero, the offset is that of stack slot, which is returned in
4171 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4172 padding required from the initial offset ptr to the stack slot.
4174 IN_REGS is nonzero if the argument will be passed in registers. It will
4175 never be set if REG_PARM_STACK_SPACE is not defined.
4177 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4178 for arguments which are passed in registers.
4180 FNDECL is the function in which the argument was defined.
4182 There are two types of rounding that are done. The first, controlled by
4183 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4184 argument list to be aligned to the specific boundary (in bits). This
4185 rounding affects the initial and starting offsets, but not the argument
4186 size.
4188 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4189 optionally rounds the size of the parm to PARM_BOUNDARY. The
4190 initial offset is not affected by this rounding, while the size always
4191 is and the starting offset may be. */
4193 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4194 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4195 callers pass in the total size of args so far as
4196 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4198 void
4199 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4200 int reg_parm_stack_space, int partial,
4201 tree fndecl ATTRIBUTE_UNUSED,
4202 struct args_size *initial_offset_ptr,
4203 struct locate_and_pad_arg_data *locate)
4205 tree sizetree;
4206 enum direction where_pad;
4207 unsigned int boundary, round_boundary;
4208 int part_size_in_regs;
4210 /* If we have found a stack parm before we reach the end of the
4211 area reserved for registers, skip that area. */
4212 if (! in_regs)
4214 if (reg_parm_stack_space > 0)
4216 if (initial_offset_ptr->var)
4218 initial_offset_ptr->var
4219 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4220 ssize_int (reg_parm_stack_space));
4221 initial_offset_ptr->constant = 0;
4223 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4224 initial_offset_ptr->constant = reg_parm_stack_space;
4228 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4230 sizetree
4231 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4232 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4233 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4234 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4235 type);
4236 locate->where_pad = where_pad;
4238 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4239 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4240 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4242 locate->boundary = boundary;
4244 if (SUPPORTS_STACK_ALIGNMENT)
4246 /* stack_alignment_estimated can't change after stack has been
4247 realigned. */
4248 if (crtl->stack_alignment_estimated < boundary)
4250 if (!crtl->stack_realign_processed)
4251 crtl->stack_alignment_estimated = boundary;
4252 else
4254 /* If stack is realigned and stack alignment value
4255 hasn't been finalized, it is OK not to increase
4256 stack_alignment_estimated. The bigger alignment
4257 requirement is recorded in stack_alignment_needed
4258 below. */
4259 gcc_assert (!crtl->stack_realign_finalized
4260 && crtl->stack_realign_needed);
4265 /* Remember if the outgoing parameter requires extra alignment on the
4266 calling function side. */
4267 if (crtl->stack_alignment_needed < boundary)
4268 crtl->stack_alignment_needed = boundary;
4269 if (crtl->preferred_stack_boundary < boundary)
4270 crtl->preferred_stack_boundary = boundary;
4272 if (ARGS_GROW_DOWNWARD)
4274 locate->slot_offset.constant = -initial_offset_ptr->constant;
4275 if (initial_offset_ptr->var)
4276 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4277 initial_offset_ptr->var);
4280 tree s2 = sizetree;
4281 if (where_pad != none
4282 && (!tree_fits_uhwi_p (sizetree)
4283 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4284 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4285 SUB_PARM_SIZE (locate->slot_offset, s2);
4288 locate->slot_offset.constant += part_size_in_regs;
4290 if (!in_regs || reg_parm_stack_space > 0)
4291 pad_to_arg_alignment (&locate->slot_offset, boundary,
4292 &locate->alignment_pad);
4294 locate->size.constant = (-initial_offset_ptr->constant
4295 - locate->slot_offset.constant);
4296 if (initial_offset_ptr->var)
4297 locate->size.var = size_binop (MINUS_EXPR,
4298 size_binop (MINUS_EXPR,
4299 ssize_int (0),
4300 initial_offset_ptr->var),
4301 locate->slot_offset.var);
4303 /* Pad_below needs the pre-rounded size to know how much to pad
4304 below. */
4305 locate->offset = locate->slot_offset;
4306 if (where_pad == downward)
4307 pad_below (&locate->offset, passed_mode, sizetree);
4310 else
4312 if (!in_regs || reg_parm_stack_space > 0)
4313 pad_to_arg_alignment (initial_offset_ptr, boundary,
4314 &locate->alignment_pad);
4315 locate->slot_offset = *initial_offset_ptr;
4317 #ifdef PUSH_ROUNDING
4318 if (passed_mode != BLKmode)
4319 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4320 #endif
4322 /* Pad_below needs the pre-rounded size to know how much to pad below
4323 so this must be done before rounding up. */
4324 locate->offset = locate->slot_offset;
4325 if (where_pad == downward)
4326 pad_below (&locate->offset, passed_mode, sizetree);
4328 if (where_pad != none
4329 && (!tree_fits_uhwi_p (sizetree)
4330 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4331 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4333 ADD_PARM_SIZE (locate->size, sizetree);
4335 locate->size.constant -= part_size_in_regs;
4338 #ifdef FUNCTION_ARG_OFFSET
4339 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4340 #endif
4343 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4344 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4346 static void
4347 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4348 struct args_size *alignment_pad)
4350 tree save_var = NULL_TREE;
4351 HOST_WIDE_INT save_constant = 0;
4352 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4353 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4355 #ifdef SPARC_STACK_BOUNDARY_HACK
4356 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4357 the real alignment of %sp. However, when it does this, the
4358 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4359 if (SPARC_STACK_BOUNDARY_HACK)
4360 sp_offset = 0;
4361 #endif
4363 if (boundary > PARM_BOUNDARY)
4365 save_var = offset_ptr->var;
4366 save_constant = offset_ptr->constant;
4369 alignment_pad->var = NULL_TREE;
4370 alignment_pad->constant = 0;
4372 if (boundary > BITS_PER_UNIT)
4374 if (offset_ptr->var)
4376 tree sp_offset_tree = ssize_int (sp_offset);
4377 tree offset = size_binop (PLUS_EXPR,
4378 ARGS_SIZE_TREE (*offset_ptr),
4379 sp_offset_tree);
4380 tree rounded;
4381 if (ARGS_GROW_DOWNWARD)
4382 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4383 else
4384 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4386 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4387 /* ARGS_SIZE_TREE includes constant term. */
4388 offset_ptr->constant = 0;
4389 if (boundary > PARM_BOUNDARY)
4390 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4391 save_var);
4393 else
4395 offset_ptr->constant = -sp_offset +
4396 (ARGS_GROW_DOWNWARD
4397 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4398 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4400 if (boundary > PARM_BOUNDARY)
4401 alignment_pad->constant = offset_ptr->constant - save_constant;
4406 static void
4407 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4409 if (passed_mode != BLKmode)
4411 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4412 offset_ptr->constant
4413 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4414 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4415 - GET_MODE_SIZE (passed_mode));
4417 else
4419 if (TREE_CODE (sizetree) != INTEGER_CST
4420 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4422 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4423 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4424 /* Add it in. */
4425 ADD_PARM_SIZE (*offset_ptr, s2);
4426 SUB_PARM_SIZE (*offset_ptr, sizetree);
4432 /* True if register REGNO was alive at a place where `setjmp' was
4433 called and was set more than once or is an argument. Such regs may
4434 be clobbered by `longjmp'. */
4436 static bool
4437 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4439 /* There appear to be cases where some local vars never reach the
4440 backend but have bogus regnos. */
4441 if (regno >= max_reg_num ())
4442 return false;
4444 return ((REG_N_SETS (regno) > 1
4445 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4446 regno))
4447 && REGNO_REG_SET_P (setjmp_crosses, regno));
4450 /* Walk the tree of blocks describing the binding levels within a
4451 function and warn about variables the might be killed by setjmp or
4452 vfork. This is done after calling flow_analysis before register
4453 allocation since that will clobber the pseudo-regs to hard
4454 regs. */
4456 static void
4457 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4459 tree decl, sub;
4461 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4463 if (TREE_CODE (decl) == VAR_DECL
4464 && DECL_RTL_SET_P (decl)
4465 && REG_P (DECL_RTL (decl))
4466 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4467 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4468 " %<longjmp%> or %<vfork%>", decl);
4471 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4472 setjmp_vars_warning (setjmp_crosses, sub);
4475 /* Do the appropriate part of setjmp_vars_warning
4476 but for arguments instead of local variables. */
4478 static void
4479 setjmp_args_warning (bitmap setjmp_crosses)
4481 tree decl;
4482 for (decl = DECL_ARGUMENTS (current_function_decl);
4483 decl; decl = DECL_CHAIN (decl))
4484 if (DECL_RTL (decl) != 0
4485 && REG_P (DECL_RTL (decl))
4486 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4487 warning (OPT_Wclobbered,
4488 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4489 decl);
4492 /* Generate warning messages for variables live across setjmp. */
4494 void
4495 generate_setjmp_warnings (void)
4497 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4499 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4500 || bitmap_empty_p (setjmp_crosses))
4501 return;
4503 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4504 setjmp_args_warning (setjmp_crosses);
4508 /* Reverse the order of elements in the fragment chain T of blocks,
4509 and return the new head of the chain (old last element).
4510 In addition to that clear BLOCK_SAME_RANGE flags when needed
4511 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4512 its super fragment origin. */
4514 static tree
4515 block_fragments_nreverse (tree t)
4517 tree prev = 0, block, next, prev_super = 0;
4518 tree super = BLOCK_SUPERCONTEXT (t);
4519 if (BLOCK_FRAGMENT_ORIGIN (super))
4520 super = BLOCK_FRAGMENT_ORIGIN (super);
4521 for (block = t; block; block = next)
4523 next = BLOCK_FRAGMENT_CHAIN (block);
4524 BLOCK_FRAGMENT_CHAIN (block) = prev;
4525 if ((prev && !BLOCK_SAME_RANGE (prev))
4526 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4527 != prev_super))
4528 BLOCK_SAME_RANGE (block) = 0;
4529 prev_super = BLOCK_SUPERCONTEXT (block);
4530 BLOCK_SUPERCONTEXT (block) = super;
4531 prev = block;
4533 t = BLOCK_FRAGMENT_ORIGIN (t);
4534 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4535 != prev_super)
4536 BLOCK_SAME_RANGE (t) = 0;
4537 BLOCK_SUPERCONTEXT (t) = super;
4538 return prev;
4541 /* Reverse the order of elements in the chain T of blocks,
4542 and return the new head of the chain (old last element).
4543 Also do the same on subblocks and reverse the order of elements
4544 in BLOCK_FRAGMENT_CHAIN as well. */
4546 static tree
4547 blocks_nreverse_all (tree t)
4549 tree prev = 0, block, next;
4550 for (block = t; block; block = next)
4552 next = BLOCK_CHAIN (block);
4553 BLOCK_CHAIN (block) = prev;
4554 if (BLOCK_FRAGMENT_CHAIN (block)
4555 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4557 BLOCK_FRAGMENT_CHAIN (block)
4558 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4559 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4560 BLOCK_SAME_RANGE (block) = 0;
4562 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4563 prev = block;
4565 return prev;
4569 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4570 and create duplicate blocks. */
4571 /* ??? Need an option to either create block fragments or to create
4572 abstract origin duplicates of a source block. It really depends
4573 on what optimization has been performed. */
4575 void
4576 reorder_blocks (void)
4578 tree block = DECL_INITIAL (current_function_decl);
4580 if (block == NULL_TREE)
4581 return;
4583 auto_vec<tree, 10> block_stack;
4585 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4586 clear_block_marks (block);
4588 /* Prune the old trees away, so that they don't get in the way. */
4589 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4590 BLOCK_CHAIN (block) = NULL_TREE;
4592 /* Recreate the block tree from the note nesting. */
4593 reorder_blocks_1 (get_insns (), block, &block_stack);
4594 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4597 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4599 void
4600 clear_block_marks (tree block)
4602 while (block)
4604 TREE_ASM_WRITTEN (block) = 0;
4605 clear_block_marks (BLOCK_SUBBLOCKS (block));
4606 block = BLOCK_CHAIN (block);
4610 static void
4611 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4612 vec<tree> *p_block_stack)
4614 rtx_insn *insn;
4615 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4617 for (insn = insns; insn; insn = NEXT_INSN (insn))
4619 if (NOTE_P (insn))
4621 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4623 tree block = NOTE_BLOCK (insn);
4624 tree origin;
4626 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4627 origin = block;
4629 if (prev_end)
4630 BLOCK_SAME_RANGE (prev_end) = 0;
4631 prev_end = NULL_TREE;
4633 /* If we have seen this block before, that means it now
4634 spans multiple address regions. Create a new fragment. */
4635 if (TREE_ASM_WRITTEN (block))
4637 tree new_block = copy_node (block);
4639 BLOCK_SAME_RANGE (new_block) = 0;
4640 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4641 BLOCK_FRAGMENT_CHAIN (new_block)
4642 = BLOCK_FRAGMENT_CHAIN (origin);
4643 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4645 NOTE_BLOCK (insn) = new_block;
4646 block = new_block;
4649 if (prev_beg == current_block && prev_beg)
4650 BLOCK_SAME_RANGE (block) = 1;
4652 prev_beg = origin;
4654 BLOCK_SUBBLOCKS (block) = 0;
4655 TREE_ASM_WRITTEN (block) = 1;
4656 /* When there's only one block for the entire function,
4657 current_block == block and we mustn't do this, it
4658 will cause infinite recursion. */
4659 if (block != current_block)
4661 tree super;
4662 if (block != origin)
4663 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4664 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4665 (origin))
4666 == current_block);
4667 if (p_block_stack->is_empty ())
4668 super = current_block;
4669 else
4671 super = p_block_stack->last ();
4672 gcc_assert (super == current_block
4673 || BLOCK_FRAGMENT_ORIGIN (super)
4674 == current_block);
4676 BLOCK_SUPERCONTEXT (block) = super;
4677 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4678 BLOCK_SUBBLOCKS (current_block) = block;
4679 current_block = origin;
4681 p_block_stack->safe_push (block);
4683 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4685 NOTE_BLOCK (insn) = p_block_stack->pop ();
4686 current_block = BLOCK_SUPERCONTEXT (current_block);
4687 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4688 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4689 prev_beg = NULL_TREE;
4690 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4691 ? NOTE_BLOCK (insn) : NULL_TREE;
4694 else
4696 prev_beg = NULL_TREE;
4697 if (prev_end)
4698 BLOCK_SAME_RANGE (prev_end) = 0;
4699 prev_end = NULL_TREE;
4704 /* Reverse the order of elements in the chain T of blocks,
4705 and return the new head of the chain (old last element). */
4707 tree
4708 blocks_nreverse (tree t)
4710 tree prev = 0, block, next;
4711 for (block = t; block; block = next)
4713 next = BLOCK_CHAIN (block);
4714 BLOCK_CHAIN (block) = prev;
4715 prev = block;
4717 return prev;
4720 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4721 by modifying the last node in chain 1 to point to chain 2. */
4723 tree
4724 block_chainon (tree op1, tree op2)
4726 tree t1;
4728 if (!op1)
4729 return op2;
4730 if (!op2)
4731 return op1;
4733 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4734 continue;
4735 BLOCK_CHAIN (t1) = op2;
4737 #ifdef ENABLE_TREE_CHECKING
4739 tree t2;
4740 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4741 gcc_assert (t2 != t1);
4743 #endif
4745 return op1;
4748 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4749 non-NULL, list them all into VECTOR, in a depth-first preorder
4750 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4751 blocks. */
4753 static int
4754 all_blocks (tree block, tree *vector)
4756 int n_blocks = 0;
4758 while (block)
4760 TREE_ASM_WRITTEN (block) = 0;
4762 /* Record this block. */
4763 if (vector)
4764 vector[n_blocks] = block;
4766 ++n_blocks;
4768 /* Record the subblocks, and their subblocks... */
4769 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4770 vector ? vector + n_blocks : 0);
4771 block = BLOCK_CHAIN (block);
4774 return n_blocks;
4777 /* Return a vector containing all the blocks rooted at BLOCK. The
4778 number of elements in the vector is stored in N_BLOCKS_P. The
4779 vector is dynamically allocated; it is the caller's responsibility
4780 to call `free' on the pointer returned. */
4782 static tree *
4783 get_block_vector (tree block, int *n_blocks_p)
4785 tree *block_vector;
4787 *n_blocks_p = all_blocks (block, NULL);
4788 block_vector = XNEWVEC (tree, *n_blocks_p);
4789 all_blocks (block, block_vector);
4791 return block_vector;
4794 static GTY(()) int next_block_index = 2;
4796 /* Set BLOCK_NUMBER for all the blocks in FN. */
4798 void
4799 number_blocks (tree fn)
4801 int i;
4802 int n_blocks;
4803 tree *block_vector;
4805 /* For SDB and XCOFF debugging output, we start numbering the blocks
4806 from 1 within each function, rather than keeping a running
4807 count. */
4808 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4809 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4810 next_block_index = 1;
4811 #endif
4813 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4815 /* The top-level BLOCK isn't numbered at all. */
4816 for (i = 1; i < n_blocks; ++i)
4817 /* We number the blocks from two. */
4818 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4820 free (block_vector);
4822 return;
4825 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4827 DEBUG_FUNCTION tree
4828 debug_find_var_in_block_tree (tree var, tree block)
4830 tree t;
4832 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4833 if (t == var)
4834 return block;
4836 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4838 tree ret = debug_find_var_in_block_tree (var, t);
4839 if (ret)
4840 return ret;
4843 return NULL_TREE;
4846 /* Keep track of whether we're in a dummy function context. If we are,
4847 we don't want to invoke the set_current_function hook, because we'll
4848 get into trouble if the hook calls target_reinit () recursively or
4849 when the initial initialization is not yet complete. */
4851 static bool in_dummy_function;
4853 /* Invoke the target hook when setting cfun. Update the optimization options
4854 if the function uses different options than the default. */
4856 static void
4857 invoke_set_current_function_hook (tree fndecl)
4859 if (!in_dummy_function)
4861 tree opts = ((fndecl)
4862 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4863 : optimization_default_node);
4865 if (!opts)
4866 opts = optimization_default_node;
4868 /* Change optimization options if needed. */
4869 if (optimization_current_node != opts)
4871 optimization_current_node = opts;
4872 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4875 targetm.set_current_function (fndecl);
4876 this_fn_optabs = this_target_optabs;
4878 if (opts != optimization_default_node)
4880 init_tree_optimization_optabs (opts);
4881 if (TREE_OPTIMIZATION_OPTABS (opts))
4882 this_fn_optabs = (struct target_optabs *)
4883 TREE_OPTIMIZATION_OPTABS (opts);
4888 /* cfun should never be set directly; use this function. */
4890 void
4891 set_cfun (struct function *new_cfun)
4893 if (cfun != new_cfun)
4895 cfun = new_cfun;
4896 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4900 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4902 static vec<function_p> cfun_stack;
4904 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4905 current_function_decl accordingly. */
4907 void
4908 push_cfun (struct function *new_cfun)
4910 gcc_assert ((!cfun && !current_function_decl)
4911 || (cfun && current_function_decl == cfun->decl));
4912 cfun_stack.safe_push (cfun);
4913 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4914 set_cfun (new_cfun);
4917 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4919 void
4920 pop_cfun (void)
4922 struct function *new_cfun = cfun_stack.pop ();
4923 /* When in_dummy_function, we do have a cfun but current_function_decl is
4924 NULL. We also allow pushing NULL cfun and subsequently changing
4925 current_function_decl to something else and have both restored by
4926 pop_cfun. */
4927 gcc_checking_assert (in_dummy_function
4928 || !cfun
4929 || current_function_decl == cfun->decl);
4930 set_cfun (new_cfun);
4931 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4934 /* Return value of funcdef and increase it. */
4936 get_next_funcdef_no (void)
4938 return funcdef_no++;
4941 /* Return value of funcdef. */
4943 get_last_funcdef_no (void)
4945 return funcdef_no;
4948 /* Allocate a function structure for FNDECL and set its contents
4949 to the defaults. Set cfun to the newly-allocated object.
4950 Some of the helper functions invoked during initialization assume
4951 that cfun has already been set. Therefore, assign the new object
4952 directly into cfun and invoke the back end hook explicitly at the
4953 very end, rather than initializing a temporary and calling set_cfun
4954 on it.
4956 ABSTRACT_P is true if this is a function that will never be seen by
4957 the middle-end. Such functions are front-end concepts (like C++
4958 function templates) that do not correspond directly to functions
4959 placed in object files. */
4961 void
4962 allocate_struct_function (tree fndecl, bool abstract_p)
4964 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4966 cfun = ggc_cleared_alloc<function> ();
4968 init_eh_for_function ();
4970 if (init_machine_status)
4971 cfun->machine = (*init_machine_status) ();
4973 #ifdef OVERRIDE_ABI_FORMAT
4974 OVERRIDE_ABI_FORMAT (fndecl);
4975 #endif
4977 if (fndecl != NULL_TREE)
4979 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4980 cfun->decl = fndecl;
4981 current_function_funcdef_no = get_next_funcdef_no ();
4984 invoke_set_current_function_hook (fndecl);
4986 if (fndecl != NULL_TREE)
4988 tree result = DECL_RESULT (fndecl);
4989 if (!abstract_p && aggregate_value_p (result, fndecl))
4991 #ifdef PCC_STATIC_STRUCT_RETURN
4992 cfun->returns_pcc_struct = 1;
4993 #endif
4994 cfun->returns_struct = 1;
4997 cfun->stdarg = stdarg_p (fntype);
4999 /* Assume all registers in stdarg functions need to be saved. */
5000 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
5001 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
5003 /* ??? This could be set on a per-function basis by the front-end
5004 but is this worth the hassle? */
5005 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
5006 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
5008 if (!profile_flag && !flag_instrument_function_entry_exit)
5009 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
5013 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
5014 instead of just setting it. */
5016 void
5017 push_struct_function (tree fndecl)
5019 /* When in_dummy_function we might be in the middle of a pop_cfun and
5020 current_function_decl and cfun may not match. */
5021 gcc_assert (in_dummy_function
5022 || (!cfun && !current_function_decl)
5023 || (cfun && current_function_decl == cfun->decl));
5024 cfun_stack.safe_push (cfun);
5025 current_function_decl = fndecl;
5026 allocate_struct_function (fndecl, false);
5029 /* Reset crtl and other non-struct-function variables to defaults as
5030 appropriate for emitting rtl at the start of a function. */
5032 static void
5033 prepare_function_start (void)
5035 gcc_assert (!get_last_insn ());
5036 init_temp_slots ();
5037 init_emit ();
5038 init_varasm_status ();
5039 init_expr ();
5040 default_rtl_profile ();
5042 if (flag_stack_usage_info)
5044 cfun->su = ggc_cleared_alloc<stack_usage> ();
5045 cfun->su->static_stack_size = -1;
5048 cse_not_expected = ! optimize;
5050 /* Caller save not needed yet. */
5051 caller_save_needed = 0;
5053 /* We haven't done register allocation yet. */
5054 reg_renumber = 0;
5056 /* Indicate that we have not instantiated virtual registers yet. */
5057 virtuals_instantiated = 0;
5059 /* Indicate that we want CONCATs now. */
5060 generating_concat_p = 1;
5062 /* Indicate we have no need of a frame pointer yet. */
5063 frame_pointer_needed = 0;
5066 void
5067 push_dummy_function (bool with_decl)
5069 tree fn_decl, fn_type, fn_result_decl;
5071 gcc_assert (!in_dummy_function);
5072 in_dummy_function = true;
5074 if (with_decl)
5076 fn_type = build_function_type_list (void_type_node, NULL_TREE);
5077 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
5078 fn_type);
5079 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
5080 NULL_TREE, void_type_node);
5081 DECL_RESULT (fn_decl) = fn_result_decl;
5083 else
5084 fn_decl = NULL_TREE;
5086 push_struct_function (fn_decl);
5089 /* Initialize the rtl expansion mechanism so that we can do simple things
5090 like generate sequences. This is used to provide a context during global
5091 initialization of some passes. You must call expand_dummy_function_end
5092 to exit this context. */
5094 void
5095 init_dummy_function_start (void)
5097 push_dummy_function (false);
5098 prepare_function_start ();
5101 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5102 and initialize static variables for generating RTL for the statements
5103 of the function. */
5105 void
5106 init_function_start (tree subr)
5108 if (subr && DECL_STRUCT_FUNCTION (subr))
5109 set_cfun (DECL_STRUCT_FUNCTION (subr));
5110 else
5111 allocate_struct_function (subr, false);
5113 /* Initialize backend, if needed. */
5114 initialize_rtl ();
5116 prepare_function_start ();
5117 decide_function_section (subr);
5119 /* Warn if this value is an aggregate type,
5120 regardless of which calling convention we are using for it. */
5121 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5122 warning (OPT_Waggregate_return, "function returns an aggregate");
5125 /* Expand code to verify the stack_protect_guard. This is invoked at
5126 the end of a function to be protected. */
5128 void
5129 stack_protect_epilogue (void)
5131 tree guard_decl = targetm.stack_protect_guard ();
5132 rtx_code_label *label = gen_label_rtx ();
5133 rtx x, y, tmp;
5134 rtx_insn *seq;
5136 x = expand_normal (crtl->stack_protect_guard);
5137 y = expand_normal (guard_decl);
5139 /* Allow the target to compare Y with X without leaking either into
5140 a register. */
5141 if (targetm.have_stack_protect_test ()
5142 && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5143 emit_insn (seq);
5144 else
5145 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5147 /* The noreturn predictor has been moved to the tree level. The rtl-level
5148 predictors estimate this branch about 20%, which isn't enough to get
5149 things moved out of line. Since this is the only extant case of adding
5150 a noreturn function at the rtl level, it doesn't seem worth doing ought
5151 except adding the prediction by hand. */
5152 tmp = get_last_insn ();
5153 if (JUMP_P (tmp))
5154 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
5156 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5157 free_temp_slots ();
5158 emit_label (label);
5161 /* Start the RTL for a new function, and set variables used for
5162 emitting RTL.
5163 SUBR is the FUNCTION_DECL node.
5164 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5165 the function's parameters, which must be run at any return statement. */
5167 void
5168 expand_function_start (tree subr)
5170 /* Make sure volatile mem refs aren't considered
5171 valid operands of arithmetic insns. */
5172 init_recog_no_volatile ();
5174 crtl->profile
5175 = (profile_flag
5176 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5178 crtl->limit_stack
5179 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5181 /* Make the label for return statements to jump to. Do not special
5182 case machines with special return instructions -- they will be
5183 handled later during jump, ifcvt, or epilogue creation. */
5184 return_label = gen_label_rtx ();
5186 /* Initialize rtx used to return the value. */
5187 /* Do this before assign_parms so that we copy the struct value address
5188 before any library calls that assign parms might generate. */
5190 /* Decide whether to return the value in memory or in a register. */
5191 tree res = DECL_RESULT (subr);
5192 maybe_reset_rtl_for_parm (res);
5193 if (aggregate_value_p (res, subr))
5195 /* Returning something that won't go in a register. */
5196 rtx value_address = 0;
5198 #ifdef PCC_STATIC_STRUCT_RETURN
5199 if (cfun->returns_pcc_struct)
5201 int size = int_size_in_bytes (TREE_TYPE (res));
5202 value_address = assemble_static_space (size);
5204 else
5205 #endif
5207 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5208 /* Expect to be passed the address of a place to store the value.
5209 If it is passed as an argument, assign_parms will take care of
5210 it. */
5211 if (sv)
5213 if (DECL_BY_REFERENCE (res))
5214 value_address = get_rtl_for_parm_ssa_default_def (res);
5215 if (!value_address)
5216 value_address = gen_reg_rtx (Pmode);
5217 emit_move_insn (value_address, sv);
5220 if (value_address)
5222 rtx x = value_address;
5223 if (!DECL_BY_REFERENCE (res))
5225 x = get_rtl_for_parm_ssa_default_def (res);
5226 if (!x)
5228 x = gen_rtx_MEM (DECL_MODE (res), value_address);
5229 set_mem_attributes (x, res, 1);
5232 SET_DECL_RTL (res, x);
5235 else if (DECL_MODE (res) == VOIDmode)
5236 /* If return mode is void, this decl rtl should not be used. */
5237 SET_DECL_RTL (res, NULL_RTX);
5238 else
5240 /* Compute the return values into a pseudo reg, which we will copy
5241 into the true return register after the cleanups are done. */
5242 tree return_type = TREE_TYPE (res);
5243 rtx x = get_rtl_for_parm_ssa_default_def (res);
5244 if (x)
5245 /* Use it. */;
5246 else if (TYPE_MODE (return_type) != BLKmode
5247 && targetm.calls.return_in_msb (return_type))
5248 /* expand_function_end will insert the appropriate padding in
5249 this case. Use the return value's natural (unpadded) mode
5250 within the function proper. */
5251 x = gen_reg_rtx (TYPE_MODE (return_type));
5252 else
5254 /* In order to figure out what mode to use for the pseudo, we
5255 figure out what the mode of the eventual return register will
5256 actually be, and use that. */
5257 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5259 /* Structures that are returned in registers are not
5260 aggregate_value_p, so we may see a PARALLEL or a REG. */
5261 if (REG_P (hard_reg))
5262 x = gen_reg_rtx (GET_MODE (hard_reg));
5263 else
5265 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5266 x = gen_group_rtx (hard_reg);
5270 SET_DECL_RTL (res, x);
5272 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5273 result to the real return register(s). */
5274 DECL_REGISTER (res) = 1;
5276 if (chkp_function_instrumented_p (current_function_decl))
5278 tree return_type = TREE_TYPE (res);
5279 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5280 subr, 1);
5281 SET_DECL_BOUNDS_RTL (res, bounds);
5285 /* Initialize rtx for parameters and local variables.
5286 In some cases this requires emitting insns. */
5287 assign_parms (subr);
5289 /* If function gets a static chain arg, store it. */
5290 if (cfun->static_chain_decl)
5292 tree parm = cfun->static_chain_decl;
5293 rtx local, chain;
5294 rtx_insn *insn;
5296 local = get_rtl_for_parm_ssa_default_def (parm);
5297 if (!local)
5298 local = gen_reg_rtx (Pmode);
5299 chain = targetm.calls.static_chain (current_function_decl, true);
5301 set_decl_incoming_rtl (parm, chain, false);
5302 SET_DECL_RTL (parm, local);
5303 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5305 if (GET_MODE (local) != Pmode)
5306 local = convert_to_mode (Pmode, local,
5307 TYPE_UNSIGNED (TREE_TYPE (parm)));
5309 insn = emit_move_insn (local, chain);
5311 /* Mark the register as eliminable, similar to parameters. */
5312 if (MEM_P (chain)
5313 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5314 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5316 /* If we aren't optimizing, save the static chain onto the stack. */
5317 if (!optimize)
5319 tree saved_static_chain_decl
5320 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5321 DECL_NAME (parm), TREE_TYPE (parm));
5322 rtx saved_static_chain_rtx
5323 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5324 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5325 emit_move_insn (saved_static_chain_rtx, chain);
5326 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5327 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5331 /* If the function receives a non-local goto, then store the
5332 bits we need to restore the frame pointer. */
5333 if (cfun->nonlocal_goto_save_area)
5335 tree t_save;
5336 rtx r_save;
5338 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5339 gcc_assert (DECL_RTL_SET_P (var));
5341 t_save = build4 (ARRAY_REF,
5342 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5343 cfun->nonlocal_goto_save_area,
5344 integer_zero_node, NULL_TREE, NULL_TREE);
5345 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5346 gcc_assert (GET_MODE (r_save) == Pmode);
5348 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5349 update_nonlocal_goto_save_area ();
5352 /* The following was moved from init_function_start.
5353 The move is supposed to make sdb output more accurate. */
5354 /* Indicate the beginning of the function body,
5355 as opposed to parm setup. */
5356 emit_note (NOTE_INSN_FUNCTION_BEG);
5358 gcc_assert (NOTE_P (get_last_insn ()));
5360 parm_birth_insn = get_last_insn ();
5362 if (crtl->profile)
5364 #ifdef PROFILE_HOOK
5365 PROFILE_HOOK (current_function_funcdef_no);
5366 #endif
5369 /* If we are doing generic stack checking, the probe should go here. */
5370 if (flag_stack_check == GENERIC_STACK_CHECK)
5371 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5374 void
5375 pop_dummy_function (void)
5377 pop_cfun ();
5378 in_dummy_function = false;
5381 /* Undo the effects of init_dummy_function_start. */
5382 void
5383 expand_dummy_function_end (void)
5385 gcc_assert (in_dummy_function);
5387 /* End any sequences that failed to be closed due to syntax errors. */
5388 while (in_sequence_p ())
5389 end_sequence ();
5391 /* Outside function body, can't compute type's actual size
5392 until next function's body starts. */
5394 free_after_parsing (cfun);
5395 free_after_compilation (cfun);
5396 pop_dummy_function ();
5399 /* Helper for diddle_return_value. */
5401 void
5402 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5404 if (! outgoing)
5405 return;
5407 if (REG_P (outgoing))
5408 (*doit) (outgoing, arg);
5409 else if (GET_CODE (outgoing) == PARALLEL)
5411 int i;
5413 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5415 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5417 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5418 (*doit) (x, arg);
5423 /* Call DOIT for each hard register used as a return value from
5424 the current function. */
5426 void
5427 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5429 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5430 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5433 static void
5434 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5436 emit_clobber (reg);
5439 void
5440 clobber_return_register (void)
5442 diddle_return_value (do_clobber_return_reg, NULL);
5444 /* In case we do use pseudo to return value, clobber it too. */
5445 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5447 tree decl_result = DECL_RESULT (current_function_decl);
5448 rtx decl_rtl = DECL_RTL (decl_result);
5449 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5451 do_clobber_return_reg (decl_rtl, NULL);
5456 static void
5457 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5459 emit_use (reg);
5462 static void
5463 use_return_register (void)
5465 diddle_return_value (do_use_return_reg, NULL);
5468 /* Set the location of the insn chain starting at INSN to LOC. */
5470 static void
5471 set_insn_locations (rtx_insn *insn, int loc)
5473 while (insn != NULL)
5475 if (INSN_P (insn))
5476 INSN_LOCATION (insn) = loc;
5477 insn = NEXT_INSN (insn);
5481 /* Generate RTL for the end of the current function. */
5483 void
5484 expand_function_end (void)
5486 /* If arg_pointer_save_area was referenced only from a nested
5487 function, we will not have initialized it yet. Do that now. */
5488 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5489 get_arg_pointer_save_area ();
5491 /* If we are doing generic stack checking and this function makes calls,
5492 do a stack probe at the start of the function to ensure we have enough
5493 space for another stack frame. */
5494 if (flag_stack_check == GENERIC_STACK_CHECK)
5496 rtx_insn *insn, *seq;
5498 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5499 if (CALL_P (insn))
5501 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5502 start_sequence ();
5503 if (STACK_CHECK_MOVING_SP)
5504 anti_adjust_stack_and_probe (max_frame_size, true);
5505 else
5506 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5507 seq = get_insns ();
5508 end_sequence ();
5509 set_insn_locations (seq, prologue_location);
5510 emit_insn_before (seq, stack_check_probe_note);
5511 break;
5515 /* End any sequences that failed to be closed due to syntax errors. */
5516 while (in_sequence_p ())
5517 end_sequence ();
5519 clear_pending_stack_adjust ();
5520 do_pending_stack_adjust ();
5522 /* Output a linenumber for the end of the function.
5523 SDB depends on this. */
5524 set_curr_insn_location (input_location);
5526 /* Before the return label (if any), clobber the return
5527 registers so that they are not propagated live to the rest of
5528 the function. This can only happen with functions that drop
5529 through; if there had been a return statement, there would
5530 have either been a return rtx, or a jump to the return label.
5532 We delay actual code generation after the current_function_value_rtx
5533 is computed. */
5534 rtx_insn *clobber_after = get_last_insn ();
5536 /* Output the label for the actual return from the function. */
5537 emit_label (return_label);
5539 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5541 /* Let except.c know where it should emit the call to unregister
5542 the function context for sjlj exceptions. */
5543 if (flag_exceptions)
5544 sjlj_emit_function_exit_after (get_last_insn ());
5546 else
5548 /* We want to ensure that instructions that may trap are not
5549 moved into the epilogue by scheduling, because we don't
5550 always emit unwind information for the epilogue. */
5551 if (cfun->can_throw_non_call_exceptions)
5552 emit_insn (gen_blockage ());
5555 /* If this is an implementation of throw, do what's necessary to
5556 communicate between __builtin_eh_return and the epilogue. */
5557 expand_eh_return ();
5559 /* If scalar return value was computed in a pseudo-reg, or was a named
5560 return value that got dumped to the stack, copy that to the hard
5561 return register. */
5562 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5564 tree decl_result = DECL_RESULT (current_function_decl);
5565 rtx decl_rtl = DECL_RTL (decl_result);
5567 if (REG_P (decl_rtl)
5568 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5569 : DECL_REGISTER (decl_result))
5571 rtx real_decl_rtl = crtl->return_rtx;
5573 /* This should be set in assign_parms. */
5574 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5576 /* If this is a BLKmode structure being returned in registers,
5577 then use the mode computed in expand_return. Note that if
5578 decl_rtl is memory, then its mode may have been changed,
5579 but that crtl->return_rtx has not. */
5580 if (GET_MODE (real_decl_rtl) == BLKmode)
5581 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5583 /* If a non-BLKmode return value should be padded at the least
5584 significant end of the register, shift it left by the appropriate
5585 amount. BLKmode results are handled using the group load/store
5586 machinery. */
5587 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5588 && REG_P (real_decl_rtl)
5589 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5591 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5592 REGNO (real_decl_rtl)),
5593 decl_rtl);
5594 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5596 /* If a named return value dumped decl_return to memory, then
5597 we may need to re-do the PROMOTE_MODE signed/unsigned
5598 extension. */
5599 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5601 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5602 promote_function_mode (TREE_TYPE (decl_result),
5603 GET_MODE (decl_rtl), &unsignedp,
5604 TREE_TYPE (current_function_decl), 1);
5606 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5608 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5610 /* If expand_function_start has created a PARALLEL for decl_rtl,
5611 move the result to the real return registers. Otherwise, do
5612 a group load from decl_rtl for a named return. */
5613 if (GET_CODE (decl_rtl) == PARALLEL)
5614 emit_group_move (real_decl_rtl, decl_rtl);
5615 else
5616 emit_group_load (real_decl_rtl, decl_rtl,
5617 TREE_TYPE (decl_result),
5618 int_size_in_bytes (TREE_TYPE (decl_result)));
5620 /* In the case of complex integer modes smaller than a word, we'll
5621 need to generate some non-trivial bitfield insertions. Do that
5622 on a pseudo and not the hard register. */
5623 else if (GET_CODE (decl_rtl) == CONCAT
5624 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5625 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5627 int old_generating_concat_p;
5628 rtx tmp;
5630 old_generating_concat_p = generating_concat_p;
5631 generating_concat_p = 0;
5632 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5633 generating_concat_p = old_generating_concat_p;
5635 emit_move_insn (tmp, decl_rtl);
5636 emit_move_insn (real_decl_rtl, tmp);
5638 else
5639 emit_move_insn (real_decl_rtl, decl_rtl);
5643 /* If returning a structure, arrange to return the address of the value
5644 in a place where debuggers expect to find it.
5646 If returning a structure PCC style,
5647 the caller also depends on this value.
5648 And cfun->returns_pcc_struct is not necessarily set. */
5649 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5650 && !targetm.calls.omit_struct_return_reg)
5652 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5653 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5654 rtx outgoing;
5656 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5657 type = TREE_TYPE (type);
5658 else
5659 value_address = XEXP (value_address, 0);
5661 outgoing = targetm.calls.function_value (build_pointer_type (type),
5662 current_function_decl, true);
5664 /* Mark this as a function return value so integrate will delete the
5665 assignment and USE below when inlining this function. */
5666 REG_FUNCTION_VALUE_P (outgoing) = 1;
5668 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5669 value_address = convert_memory_address (GET_MODE (outgoing),
5670 value_address);
5672 emit_move_insn (outgoing, value_address);
5674 /* Show return register used to hold result (in this case the address
5675 of the result. */
5676 crtl->return_rtx = outgoing;
5679 /* Emit the actual code to clobber return register. Don't emit
5680 it if clobber_after is a barrier, then the previous basic block
5681 certainly doesn't fall thru into the exit block. */
5682 if (!BARRIER_P (clobber_after))
5684 start_sequence ();
5685 clobber_return_register ();
5686 rtx_insn *seq = get_insns ();
5687 end_sequence ();
5689 emit_insn_after (seq, clobber_after);
5692 /* Output the label for the naked return from the function. */
5693 if (naked_return_label)
5694 emit_label (naked_return_label);
5696 /* @@@ This is a kludge. We want to ensure that instructions that
5697 may trap are not moved into the epilogue by scheduling, because
5698 we don't always emit unwind information for the epilogue. */
5699 if (cfun->can_throw_non_call_exceptions
5700 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5701 emit_insn (gen_blockage ());
5703 /* If stack protection is enabled for this function, check the guard. */
5704 if (crtl->stack_protect_guard)
5705 stack_protect_epilogue ();
5707 /* If we had calls to alloca, and this machine needs
5708 an accurate stack pointer to exit the function,
5709 insert some code to save and restore the stack pointer. */
5710 if (! EXIT_IGNORE_STACK
5711 && cfun->calls_alloca)
5713 rtx tem = 0;
5715 start_sequence ();
5716 emit_stack_save (SAVE_FUNCTION, &tem);
5717 rtx_insn *seq = get_insns ();
5718 end_sequence ();
5719 emit_insn_before (seq, parm_birth_insn);
5721 emit_stack_restore (SAVE_FUNCTION, tem);
5724 /* ??? This should no longer be necessary since stupid is no longer with
5725 us, but there are some parts of the compiler (eg reload_combine, and
5726 sh mach_dep_reorg) that still try and compute their own lifetime info
5727 instead of using the general framework. */
5728 use_return_register ();
5732 get_arg_pointer_save_area (void)
5734 rtx ret = arg_pointer_save_area;
5736 if (! ret)
5738 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5739 arg_pointer_save_area = ret;
5742 if (! crtl->arg_pointer_save_area_init)
5744 /* Save the arg pointer at the beginning of the function. The
5745 generated stack slot may not be a valid memory address, so we
5746 have to check it and fix it if necessary. */
5747 start_sequence ();
5748 emit_move_insn (validize_mem (copy_rtx (ret)),
5749 crtl->args.internal_arg_pointer);
5750 rtx_insn *seq = get_insns ();
5751 end_sequence ();
5753 push_topmost_sequence ();
5754 emit_insn_after (seq, entry_of_function ());
5755 pop_topmost_sequence ();
5757 crtl->arg_pointer_save_area_init = true;
5760 return ret;
5763 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5764 for the first time. */
5766 static void
5767 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5769 rtx_insn *tmp;
5770 hash_table<insn_cache_hasher> *hash = *hashp;
5772 if (hash == NULL)
5773 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5775 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5777 rtx *slot = hash->find_slot (tmp, INSERT);
5778 gcc_assert (*slot == NULL);
5779 *slot = tmp;
5783 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5784 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5785 insn, then record COPY as well. */
5787 void
5788 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5790 hash_table<insn_cache_hasher> *hash;
5791 rtx *slot;
5793 hash = epilogue_insn_hash;
5794 if (!hash || !hash->find (insn))
5796 hash = prologue_insn_hash;
5797 if (!hash || !hash->find (insn))
5798 return;
5801 slot = hash->find_slot (copy, INSERT);
5802 gcc_assert (*slot == NULL);
5803 *slot = copy;
5806 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5807 we can be running after reorg, SEQUENCE rtl is possible. */
5809 static bool
5810 contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
5812 if (hash == NULL)
5813 return false;
5815 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5817 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5818 int i;
5819 for (i = seq->len () - 1; i >= 0; i--)
5820 if (hash->find (seq->element (i)))
5821 return true;
5822 return false;
5825 return hash->find (const_cast<rtx> (insn)) != NULL;
5829 prologue_epilogue_contains (const_rtx insn)
5831 if (contains (insn, prologue_insn_hash))
5832 return 1;
5833 if (contains (insn, epilogue_insn_hash))
5834 return 1;
5835 return 0;
5838 /* Insert use of return register before the end of BB. */
5840 static void
5841 emit_use_return_register_into_block (basic_block bb)
5843 start_sequence ();
5844 use_return_register ();
5845 rtx_insn *seq = get_insns ();
5846 end_sequence ();
5847 rtx_insn *insn = BB_END (bb);
5848 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5849 insn = prev_cc0_setter (insn);
5851 emit_insn_before (seq, insn);
5855 /* Create a return pattern, either simple_return or return, depending on
5856 simple_p. */
5858 static rtx_insn *
5859 gen_return_pattern (bool simple_p)
5861 return (simple_p
5862 ? targetm.gen_simple_return ()
5863 : targetm.gen_return ());
5866 /* Insert an appropriate return pattern at the end of block BB. This
5867 also means updating block_for_insn appropriately. SIMPLE_P is
5868 the same as in gen_return_pattern and passed to it. */
5870 void
5871 emit_return_into_block (bool simple_p, basic_block bb)
5873 rtx_jump_insn *jump = emit_jump_insn_after (gen_return_pattern (simple_p),
5874 BB_END (bb));
5875 rtx pat = PATTERN (jump);
5876 if (GET_CODE (pat) == PARALLEL)
5877 pat = XVECEXP (pat, 0, 0);
5878 gcc_assert (ANY_RETURN_P (pat));
5879 JUMP_LABEL (jump) = pat;
5882 /* Set JUMP_LABEL for a return insn. */
5884 void
5885 set_return_jump_label (rtx_insn *returnjump)
5887 rtx pat = PATTERN (returnjump);
5888 if (GET_CODE (pat) == PARALLEL)
5889 pat = XVECEXP (pat, 0, 0);
5890 if (ANY_RETURN_P (pat))
5891 JUMP_LABEL (returnjump) = pat;
5892 else
5893 JUMP_LABEL (returnjump) = ret_rtx;
5896 /* Return true if there are any active insns between HEAD and TAIL. */
5897 bool
5898 active_insn_between (rtx_insn *head, rtx_insn *tail)
5900 while (tail)
5902 if (active_insn_p (tail))
5903 return true;
5904 if (tail == head)
5905 return false;
5906 tail = PREV_INSN (tail);
5908 return false;
5911 /* LAST_BB is a block that exits, and empty of active instructions.
5912 Examine its predecessors for jumps that can be converted to
5913 (conditional) returns. */
5914 vec<edge>
5915 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5916 vec<edge> unconverted ATTRIBUTE_UNUSED)
5918 int i;
5919 basic_block bb;
5920 edge_iterator ei;
5921 edge e;
5922 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5924 FOR_EACH_EDGE (e, ei, last_bb->preds)
5925 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5926 src_bbs.quick_push (e->src);
5928 rtx_insn *label = BB_HEAD (last_bb);
5930 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5932 rtx_insn *jump = BB_END (bb);
5934 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5935 continue;
5937 e = find_edge (bb, last_bb);
5939 /* If we have an unconditional jump, we can replace that
5940 with a simple return instruction. */
5941 if (simplejump_p (jump))
5943 /* The use of the return register might be present in the exit
5944 fallthru block. Either:
5945 - removing the use is safe, and we should remove the use in
5946 the exit fallthru block, or
5947 - removing the use is not safe, and we should add it here.
5948 For now, we conservatively choose the latter. Either of the
5949 2 helps in crossjumping. */
5950 emit_use_return_register_into_block (bb);
5952 emit_return_into_block (simple_p, bb);
5953 delete_insn (jump);
5956 /* If we have a conditional jump branching to the last
5957 block, we can try to replace that with a conditional
5958 return instruction. */
5959 else if (condjump_p (jump))
5961 rtx dest;
5963 if (simple_p)
5964 dest = simple_return_rtx;
5965 else
5966 dest = ret_rtx;
5967 if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
5969 if (targetm.have_simple_return () && simple_p)
5971 if (dump_file)
5972 fprintf (dump_file,
5973 "Failed to redirect bb %d branch.\n", bb->index);
5974 unconverted.safe_push (e);
5976 continue;
5979 /* See comment in simplejump_p case above. */
5980 emit_use_return_register_into_block (bb);
5982 /* If this block has only one successor, it both jumps
5983 and falls through to the fallthru block, so we can't
5984 delete the edge. */
5985 if (single_succ_p (bb))
5986 continue;
5988 else
5990 if (targetm.have_simple_return () && simple_p)
5992 if (dump_file)
5993 fprintf (dump_file,
5994 "Failed to redirect bb %d branch.\n", bb->index);
5995 unconverted.safe_push (e);
5997 continue;
6000 /* Fix up the CFG for the successful change we just made. */
6001 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
6002 e->flags &= ~EDGE_CROSSING;
6004 src_bbs.release ();
6005 return unconverted;
6008 /* Emit a return insn for the exit fallthru block. */
6009 basic_block
6010 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
6012 basic_block last_bb = exit_fallthru_edge->src;
6014 if (JUMP_P (BB_END (last_bb)))
6016 last_bb = split_edge (exit_fallthru_edge);
6017 exit_fallthru_edge = single_succ_edge (last_bb);
6019 emit_barrier_after (BB_END (last_bb));
6020 emit_return_into_block (simple_p, last_bb);
6021 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
6022 return last_bb;
6026 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6027 this into place with notes indicating where the prologue ends and where
6028 the epilogue begins. Update the basic block information when possible.
6030 Notes on epilogue placement:
6031 There are several kinds of edges to the exit block:
6032 * a single fallthru edge from LAST_BB
6033 * possibly, edges from blocks containing sibcalls
6034 * possibly, fake edges from infinite loops
6036 The epilogue is always emitted on the fallthru edge from the last basic
6037 block in the function, LAST_BB, into the exit block.
6039 If LAST_BB is empty except for a label, it is the target of every
6040 other basic block in the function that ends in a return. If a
6041 target has a return or simple_return pattern (possibly with
6042 conditional variants), these basic blocks can be changed so that a
6043 return insn is emitted into them, and their target is adjusted to
6044 the real exit block.
6046 Notes on shrink wrapping: We implement a fairly conservative
6047 version of shrink-wrapping rather than the textbook one. We only
6048 generate a single prologue and a single epilogue. This is
6049 sufficient to catch a number of interesting cases involving early
6050 exits.
6052 First, we identify the blocks that require the prologue to occur before
6053 them. These are the ones that modify a call-saved register, or reference
6054 any of the stack or frame pointer registers. To simplify things, we then
6055 mark everything reachable from these blocks as also requiring a prologue.
6056 This takes care of loops automatically, and avoids the need to examine
6057 whether MEMs reference the frame, since it is sufficient to check for
6058 occurrences of the stack or frame pointer.
6060 We then compute the set of blocks for which the need for a prologue
6061 is anticipatable (borrowing terminology from the shrink-wrapping
6062 description in Muchnick's book). These are the blocks which either
6063 require a prologue themselves, or those that have only successors
6064 where the prologue is anticipatable. The prologue needs to be
6065 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6066 is not. For the moment, we ensure that only one such edge exists.
6068 The epilogue is placed as described above, but we make a
6069 distinction between inserting return and simple_return patterns
6070 when modifying other blocks that end in a return. Blocks that end
6071 in a sibcall omit the sibcall_epilogue if the block is not in
6072 ANTIC. */
6074 void
6075 thread_prologue_and_epilogue_insns (void)
6077 bool inserted;
6078 vec<edge> unconverted_simple_returns = vNULL;
6079 bitmap_head bb_flags;
6080 rtx_insn *returnjump;
6081 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
6082 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
6083 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
6084 edge_iterator ei;
6086 df_analyze ();
6088 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6090 inserted = false;
6091 epilogue_end = NULL;
6092 returnjump = NULL;
6094 /* Can't deal with multiple successors of the entry block at the
6095 moment. Function should always have at least one entry
6096 point. */
6097 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6098 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6099 orig_entry_edge = entry_edge;
6101 split_prologue_seq = NULL;
6102 if (flag_split_stack
6103 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
6104 == NULL))
6106 start_sequence ();
6107 emit_insn (targetm.gen_split_stack_prologue ());
6108 split_prologue_seq = get_insns ();
6109 end_sequence ();
6111 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
6112 set_insn_locations (split_prologue_seq, prologue_location);
6115 prologue_seq = NULL;
6116 if (targetm.have_prologue ())
6118 start_sequence ();
6119 rtx_insn *seq = targetm.gen_prologue ();
6120 emit_insn (seq);
6122 /* Insert an explicit USE for the frame pointer
6123 if the profiling is on and the frame pointer is required. */
6124 if (crtl->profile && frame_pointer_needed)
6125 emit_use (hard_frame_pointer_rtx);
6127 /* Retain a map of the prologue insns. */
6128 record_insns (seq, NULL, &prologue_insn_hash);
6129 emit_note (NOTE_INSN_PROLOGUE_END);
6131 /* Ensure that instructions are not moved into the prologue when
6132 profiling is on. The call to the profiling routine can be
6133 emitted within the live range of a call-clobbered register. */
6134 if (!targetm.profile_before_prologue () && crtl->profile)
6135 emit_insn (gen_blockage ());
6137 prologue_seq = get_insns ();
6138 end_sequence ();
6139 set_insn_locations (prologue_seq, prologue_location);
6142 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
6144 /* Try to perform a kind of shrink-wrapping, making sure the
6145 prologue/epilogue is emitted only around those parts of the
6146 function that require it. */
6148 try_shrink_wrapping (&entry_edge, &bb_flags, prologue_seq);
6150 if (split_prologue_seq != NULL_RTX)
6152 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6153 inserted = true;
6155 if (prologue_seq != NULL_RTX)
6157 insert_insn_on_edge (prologue_seq, entry_edge);
6158 inserted = true;
6161 /* If the exit block has no non-fake predecessors, we don't need
6162 an epilogue. */
6163 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6164 if ((e->flags & EDGE_FAKE) == 0)
6165 break;
6166 if (e == NULL)
6167 goto epilogue_done;
6169 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6171 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6173 if (targetm.have_simple_return () && entry_edge != orig_entry_edge)
6174 exit_fallthru_edge
6175 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
6176 &unconverted_simple_returns,
6177 &returnjump);
6178 if (targetm.have_return ())
6180 if (exit_fallthru_edge == NULL)
6181 goto epilogue_done;
6183 if (optimize)
6185 basic_block last_bb = exit_fallthru_edge->src;
6187 if (LABEL_P (BB_HEAD (last_bb))
6188 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6189 convert_jumps_to_returns (last_bb, false, vNULL);
6191 if (EDGE_COUNT (last_bb->preds) != 0
6192 && single_succ_p (last_bb))
6194 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6195 epilogue_end = returnjump = BB_END (last_bb);
6197 /* Emitting the return may add a basic block.
6198 Fix bb_flags for the added block. */
6199 if (targetm.have_simple_return ()
6200 && last_bb != exit_fallthru_edge->src)
6201 bitmap_set_bit (&bb_flags, last_bb->index);
6203 goto epilogue_done;
6208 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6209 this marker for the splits of EH_RETURN patterns, and nothing else
6210 uses the flag in the meantime. */
6211 epilogue_completed = 1;
6213 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6214 some targets, these get split to a special version of the epilogue
6215 code. In order to be able to properly annotate these with unwind
6216 info, try to split them now. If we get a valid split, drop an
6217 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6218 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6220 rtx_insn *prev, *last, *trial;
6222 if (e->flags & EDGE_FALLTHRU)
6223 continue;
6224 last = BB_END (e->src);
6225 if (!eh_returnjump_p (last))
6226 continue;
6228 prev = PREV_INSN (last);
6229 trial = try_split (PATTERN (last), last, 1);
6230 if (trial == last)
6231 continue;
6233 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6234 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6237 /* If nothing falls through into the exit block, we don't need an
6238 epilogue. */
6240 if (exit_fallthru_edge == NULL)
6241 goto epilogue_done;
6243 if (targetm.have_epilogue ())
6245 start_sequence ();
6246 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6247 rtx_insn *seq = targetm.gen_epilogue ();
6248 if (seq)
6249 emit_jump_insn (seq);
6251 /* Retain a map of the epilogue insns. */
6252 record_insns (seq, NULL, &epilogue_insn_hash);
6253 set_insn_locations (seq, epilogue_location);
6255 seq = get_insns ();
6256 returnjump = get_last_insn ();
6257 end_sequence ();
6259 insert_insn_on_edge (seq, exit_fallthru_edge);
6260 inserted = true;
6262 if (JUMP_P (returnjump))
6263 set_return_jump_label (returnjump);
6265 else
6267 basic_block cur_bb;
6269 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6270 goto epilogue_done;
6271 /* We have a fall-through edge to the exit block, the source is not
6272 at the end of the function, and there will be an assembler epilogue
6273 at the end of the function.
6274 We can't use force_nonfallthru here, because that would try to
6275 use return. Inserting a jump 'by hand' is extremely messy, so
6276 we take advantage of cfg_layout_finalize using
6277 fixup_fallthru_exit_predecessor. */
6278 cfg_layout_initialize (0);
6279 FOR_EACH_BB_FN (cur_bb, cfun)
6280 if (cur_bb->index >= NUM_FIXED_BLOCKS
6281 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6282 cur_bb->aux = cur_bb->next_bb;
6283 cfg_layout_finalize ();
6286 epilogue_done:
6288 default_rtl_profile ();
6290 if (inserted)
6292 sbitmap blocks;
6294 commit_edge_insertions ();
6296 /* Look for basic blocks within the prologue insns. */
6297 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6298 bitmap_clear (blocks);
6299 bitmap_set_bit (blocks, entry_edge->dest->index);
6300 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6301 find_many_sub_basic_blocks (blocks);
6302 sbitmap_free (blocks);
6304 /* The epilogue insns we inserted may cause the exit edge to no longer
6305 be fallthru. */
6306 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6308 if (((e->flags & EDGE_FALLTHRU) != 0)
6309 && returnjump_p (BB_END (e->src)))
6310 e->flags &= ~EDGE_FALLTHRU;
6314 if (targetm.have_simple_return ())
6315 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6316 returnjump, unconverted_simple_returns);
6318 /* Emit sibling epilogues before any sibling call sites. */
6319 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6320 ei_safe_edge (ei));
6323 basic_block bb = e->src;
6324 rtx_insn *insn = BB_END (bb);
6326 if (!CALL_P (insn)
6327 || ! SIBLING_CALL_P (insn)
6328 || (targetm.have_simple_return ()
6329 && entry_edge != orig_entry_edge
6330 && !bitmap_bit_p (&bb_flags, bb->index)))
6332 ei_next (&ei);
6333 continue;
6336 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6338 start_sequence ();
6339 emit_note (NOTE_INSN_EPILOGUE_BEG);
6340 emit_insn (ep_seq);
6341 rtx_insn *seq = get_insns ();
6342 end_sequence ();
6344 /* Retain a map of the epilogue insns. Used in life analysis to
6345 avoid getting rid of sibcall epilogue insns. Do this before we
6346 actually emit the sequence. */
6347 record_insns (seq, NULL, &epilogue_insn_hash);
6348 set_insn_locations (seq, epilogue_location);
6350 emit_insn_before (seq, insn);
6352 ei_next (&ei);
6355 if (epilogue_end)
6357 rtx_insn *insn, *next;
6359 /* Similarly, move any line notes that appear after the epilogue.
6360 There is no need, however, to be quite so anal about the existence
6361 of such a note. Also possibly move
6362 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6363 info generation. */
6364 for (insn = epilogue_end; insn; insn = next)
6366 next = NEXT_INSN (insn);
6367 if (NOTE_P (insn)
6368 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6369 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6373 bitmap_clear (&bb_flags);
6375 /* Threading the prologue and epilogue changes the artificial refs
6376 in the entry and exit blocks. */
6377 epilogue_completed = 1;
6378 df_update_entry_exit_and_calls ();
6381 /* Reposition the prologue-end and epilogue-begin notes after
6382 instruction scheduling. */
6384 void
6385 reposition_prologue_and_epilogue_notes (void)
6387 if (!targetm.have_prologue ()
6388 && !targetm.have_epilogue ()
6389 && !targetm.have_sibcall_epilogue ())
6390 return;
6392 /* Since the hash table is created on demand, the fact that it is
6393 non-null is a signal that it is non-empty. */
6394 if (prologue_insn_hash != NULL)
6396 size_t len = prologue_insn_hash->elements ();
6397 rtx_insn *insn, *last = NULL, *note = NULL;
6399 /* Scan from the beginning until we reach the last prologue insn. */
6400 /* ??? While we do have the CFG intact, there are two problems:
6401 (1) The prologue can contain loops (typically probing the stack),
6402 which means that the end of the prologue isn't in the first bb.
6403 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6404 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6406 if (NOTE_P (insn))
6408 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6409 note = insn;
6411 else if (contains (insn, prologue_insn_hash))
6413 last = insn;
6414 if (--len == 0)
6415 break;
6419 if (last)
6421 if (note == NULL)
6423 /* Scan forward looking for the PROLOGUE_END note. It should
6424 be right at the beginning of the block, possibly with other
6425 insn notes that got moved there. */
6426 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6428 if (NOTE_P (note)
6429 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6430 break;
6434 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6435 if (LABEL_P (last))
6436 last = NEXT_INSN (last);
6437 reorder_insns (note, note, last);
6441 if (epilogue_insn_hash != NULL)
6443 edge_iterator ei;
6444 edge e;
6446 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6448 rtx_insn *insn, *first = NULL, *note = NULL;
6449 basic_block bb = e->src;
6451 /* Scan from the beginning until we reach the first epilogue insn. */
6452 FOR_BB_INSNS (bb, insn)
6454 if (NOTE_P (insn))
6456 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6458 note = insn;
6459 if (first != NULL)
6460 break;
6463 else if (first == NULL && contains (insn, epilogue_insn_hash))
6465 first = insn;
6466 if (note != NULL)
6467 break;
6471 if (note)
6473 /* If the function has a single basic block, and no real
6474 epilogue insns (e.g. sibcall with no cleanup), the
6475 epilogue note can get scheduled before the prologue
6476 note. If we have frame related prologue insns, having
6477 them scanned during the epilogue will result in a crash.
6478 In this case re-order the epilogue note to just before
6479 the last insn in the block. */
6480 if (first == NULL)
6481 first = BB_END (bb);
6483 if (PREV_INSN (first) != note)
6484 reorder_insns (note, note, PREV_INSN (first));
6490 /* Returns the name of function declared by FNDECL. */
6491 const char *
6492 fndecl_name (tree fndecl)
6494 if (fndecl == NULL)
6495 return "(nofn)";
6496 return lang_hooks.decl_printable_name (fndecl, 2);
6499 /* Returns the name of function FN. */
6500 const char *
6501 function_name (struct function *fn)
6503 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6504 return fndecl_name (fndecl);
6507 /* Returns the name of the current function. */
6508 const char *
6509 current_function_name (void)
6511 return function_name (cfun);
6515 static unsigned int
6516 rest_of_handle_check_leaf_regs (void)
6518 #ifdef LEAF_REGISTERS
6519 crtl->uses_only_leaf_regs
6520 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6521 #endif
6522 return 0;
6525 /* Insert a TYPE into the used types hash table of CFUN. */
6527 static void
6528 used_types_insert_helper (tree type, struct function *func)
6530 if (type != NULL && func != NULL)
6532 if (func->used_types_hash == NULL)
6533 func->used_types_hash = hash_set<tree>::create_ggc (37);
6535 func->used_types_hash->add (type);
6539 /* Given a type, insert it into the used hash table in cfun. */
6540 void
6541 used_types_insert (tree t)
6543 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6544 if (TYPE_NAME (t))
6545 break;
6546 else
6547 t = TREE_TYPE (t);
6548 if (TREE_CODE (t) == ERROR_MARK)
6549 return;
6550 if (TYPE_NAME (t) == NULL_TREE
6551 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6552 t = TYPE_MAIN_VARIANT (t);
6553 if (debug_info_level > DINFO_LEVEL_NONE)
6555 if (cfun)
6556 used_types_insert_helper (t, cfun);
6557 else
6559 /* So this might be a type referenced by a global variable.
6560 Record that type so that we can later decide to emit its
6561 debug information. */
6562 vec_safe_push (types_used_by_cur_var_decl, t);
6567 /* Helper to Hash a struct types_used_by_vars_entry. */
6569 static hashval_t
6570 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6572 gcc_assert (entry && entry->var_decl && entry->type);
6574 return iterative_hash_object (entry->type,
6575 iterative_hash_object (entry->var_decl, 0));
6578 /* Hash function of the types_used_by_vars_entry hash table. */
6580 hashval_t
6581 used_type_hasher::hash (types_used_by_vars_entry *entry)
6583 return hash_types_used_by_vars_entry (entry);
6586 /*Equality function of the types_used_by_vars_entry hash table. */
6588 bool
6589 used_type_hasher::equal (types_used_by_vars_entry *e1,
6590 types_used_by_vars_entry *e2)
6592 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6595 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6597 void
6598 types_used_by_var_decl_insert (tree type, tree var_decl)
6600 if (type != NULL && var_decl != NULL)
6602 types_used_by_vars_entry **slot;
6603 struct types_used_by_vars_entry e;
6604 e.var_decl = var_decl;
6605 e.type = type;
6606 if (types_used_by_vars_hash == NULL)
6607 types_used_by_vars_hash
6608 = hash_table<used_type_hasher>::create_ggc (37);
6610 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6611 if (*slot == NULL)
6613 struct types_used_by_vars_entry *entry;
6614 entry = ggc_alloc<types_used_by_vars_entry> ();
6615 entry->type = type;
6616 entry->var_decl = var_decl;
6617 *slot = entry;
6622 namespace {
6624 const pass_data pass_data_leaf_regs =
6626 RTL_PASS, /* type */
6627 "*leaf_regs", /* name */
6628 OPTGROUP_NONE, /* optinfo_flags */
6629 TV_NONE, /* tv_id */
6630 0, /* properties_required */
6631 0, /* properties_provided */
6632 0, /* properties_destroyed */
6633 0, /* todo_flags_start */
6634 0, /* todo_flags_finish */
6637 class pass_leaf_regs : public rtl_opt_pass
6639 public:
6640 pass_leaf_regs (gcc::context *ctxt)
6641 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6644 /* opt_pass methods: */
6645 virtual unsigned int execute (function *)
6647 return rest_of_handle_check_leaf_regs ();
6650 }; // class pass_leaf_regs
6652 } // anon namespace
6654 rtl_opt_pass *
6655 make_pass_leaf_regs (gcc::context *ctxt)
6657 return new pass_leaf_regs (ctxt);
6660 static unsigned int
6661 rest_of_handle_thread_prologue_and_epilogue (void)
6663 if (optimize)
6664 cleanup_cfg (CLEANUP_EXPENSIVE);
6666 /* On some machines, the prologue and epilogue code, or parts thereof,
6667 can be represented as RTL. Doing so lets us schedule insns between
6668 it and the rest of the code and also allows delayed branch
6669 scheduling to operate in the epilogue. */
6670 thread_prologue_and_epilogue_insns ();
6672 /* Some non-cold blocks may now be only reachable from cold blocks.
6673 Fix that up. */
6674 fixup_partitions ();
6676 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6677 see PR57320. */
6678 cleanup_cfg (0);
6680 /* The stack usage info is finalized during prologue expansion. */
6681 if (flag_stack_usage_info)
6682 output_stack_usage ();
6684 return 0;
6687 namespace {
6689 const pass_data pass_data_thread_prologue_and_epilogue =
6691 RTL_PASS, /* type */
6692 "pro_and_epilogue", /* name */
6693 OPTGROUP_NONE, /* optinfo_flags */
6694 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6695 0, /* properties_required */
6696 0, /* properties_provided */
6697 0, /* properties_destroyed */
6698 0, /* todo_flags_start */
6699 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6702 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6704 public:
6705 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6706 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6709 /* opt_pass methods: */
6710 virtual unsigned int execute (function *)
6712 return rest_of_handle_thread_prologue_and_epilogue ();
6715 }; // class pass_thread_prologue_and_epilogue
6717 } // anon namespace
6719 rtl_opt_pass *
6720 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6722 return new pass_thread_prologue_and_epilogue (ctxt);
6726 /* This mini-pass fixes fall-out from SSA in asm statements that have
6727 in-out constraints. Say you start with
6729 orig = inout;
6730 asm ("": "+mr" (inout));
6731 use (orig);
6733 which is transformed very early to use explicit output and match operands:
6735 orig = inout;
6736 asm ("": "=mr" (inout) : "0" (inout));
6737 use (orig);
6739 Or, after SSA and copyprop,
6741 asm ("": "=mr" (inout_2) : "0" (inout_1));
6742 use (inout_1);
6744 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6745 they represent two separate values, so they will get different pseudo
6746 registers during expansion. Then, since the two operands need to match
6747 per the constraints, but use different pseudo registers, reload can
6748 only register a reload for these operands. But reloads can only be
6749 satisfied by hardregs, not by memory, so we need a register for this
6750 reload, just because we are presented with non-matching operands.
6751 So, even though we allow memory for this operand, no memory can be
6752 used for it, just because the two operands don't match. This can
6753 cause reload failures on register-starved targets.
6755 So it's a symptom of reload not being able to use memory for reloads
6756 or, alternatively it's also a symptom of both operands not coming into
6757 reload as matching (in which case the pseudo could go to memory just
6758 fine, as the alternative allows it, and no reload would be necessary).
6759 We fix the latter problem here, by transforming
6761 asm ("": "=mr" (inout_2) : "0" (inout_1));
6763 back to
6765 inout_2 = inout_1;
6766 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6768 static void
6769 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6771 int i;
6772 bool changed = false;
6773 rtx op = SET_SRC (p_sets[0]);
6774 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6775 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6776 bool *output_matched = XALLOCAVEC (bool, noutputs);
6778 memset (output_matched, 0, noutputs * sizeof (bool));
6779 for (i = 0; i < ninputs; i++)
6781 rtx input, output;
6782 rtx_insn *insns;
6783 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6784 char *end;
6785 int match, j;
6787 if (*constraint == '%')
6788 constraint++;
6790 match = strtoul (constraint, &end, 10);
6791 if (end == constraint)
6792 continue;
6794 gcc_assert (match < noutputs);
6795 output = SET_DEST (p_sets[match]);
6796 input = RTVEC_ELT (inputs, i);
6797 /* Only do the transformation for pseudos. */
6798 if (! REG_P (output)
6799 || rtx_equal_p (output, input)
6800 || (GET_MODE (input) != VOIDmode
6801 && GET_MODE (input) != GET_MODE (output)))
6802 continue;
6804 /* We can't do anything if the output is also used as input,
6805 as we're going to overwrite it. */
6806 for (j = 0; j < ninputs; j++)
6807 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6808 break;
6809 if (j != ninputs)
6810 continue;
6812 /* Avoid changing the same input several times. For
6813 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6814 only change in once (to out1), rather than changing it
6815 first to out1 and afterwards to out2. */
6816 if (i > 0)
6818 for (j = 0; j < noutputs; j++)
6819 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6820 break;
6821 if (j != noutputs)
6822 continue;
6824 output_matched[match] = true;
6826 start_sequence ();
6827 emit_move_insn (output, input);
6828 insns = get_insns ();
6829 end_sequence ();
6830 emit_insn_before (insns, insn);
6832 /* Now replace all mentions of the input with output. We can't
6833 just replace the occurrence in inputs[i], as the register might
6834 also be used in some other input (or even in an address of an
6835 output), which would mean possibly increasing the number of
6836 inputs by one (namely 'output' in addition), which might pose
6837 a too complicated problem for reload to solve. E.g. this situation:
6839 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6841 Here 'input' is used in two occurrences as input (once for the
6842 input operand, once for the address in the second output operand).
6843 If we would replace only the occurrence of the input operand (to
6844 make the matching) we would be left with this:
6846 output = input
6847 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6849 Now we suddenly have two different input values (containing the same
6850 value, but different pseudos) where we formerly had only one.
6851 With more complicated asms this might lead to reload failures
6852 which wouldn't have happen without this pass. So, iterate over
6853 all operands and replace all occurrences of the register used. */
6854 for (j = 0; j < noutputs; j++)
6855 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6856 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6857 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6858 input, output);
6859 for (j = 0; j < ninputs; j++)
6860 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6861 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6862 input, output);
6864 changed = true;
6867 if (changed)
6868 df_insn_rescan (insn);
6871 /* Add the decl D to the local_decls list of FUN. */
6873 void
6874 add_local_decl (struct function *fun, tree d)
6876 gcc_assert (TREE_CODE (d) == VAR_DECL);
6877 vec_safe_push (fun->local_decls, d);
6880 namespace {
6882 const pass_data pass_data_match_asm_constraints =
6884 RTL_PASS, /* type */
6885 "asmcons", /* name */
6886 OPTGROUP_NONE, /* optinfo_flags */
6887 TV_NONE, /* tv_id */
6888 0, /* properties_required */
6889 0, /* properties_provided */
6890 0, /* properties_destroyed */
6891 0, /* todo_flags_start */
6892 0, /* todo_flags_finish */
6895 class pass_match_asm_constraints : public rtl_opt_pass
6897 public:
6898 pass_match_asm_constraints (gcc::context *ctxt)
6899 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6902 /* opt_pass methods: */
6903 virtual unsigned int execute (function *);
6905 }; // class pass_match_asm_constraints
6907 unsigned
6908 pass_match_asm_constraints::execute (function *fun)
6910 basic_block bb;
6911 rtx_insn *insn;
6912 rtx pat, *p_sets;
6913 int noutputs;
6915 if (!crtl->has_asm_statement)
6916 return 0;
6918 df_set_flags (DF_DEFER_INSN_RESCAN);
6919 FOR_EACH_BB_FN (bb, fun)
6921 FOR_BB_INSNS (bb, insn)
6923 if (!INSN_P (insn))
6924 continue;
6926 pat = PATTERN (insn);
6927 if (GET_CODE (pat) == PARALLEL)
6928 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6929 else if (GET_CODE (pat) == SET)
6930 p_sets = &PATTERN (insn), noutputs = 1;
6931 else
6932 continue;
6934 if (GET_CODE (*p_sets) == SET
6935 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6936 match_asm_constraints_1 (insn, p_sets, noutputs);
6940 return TODO_df_finish;
6943 } // anon namespace
6945 rtl_opt_pass *
6946 make_pass_match_asm_constraints (gcc::context *ctxt)
6948 return new pass_match_asm_constraints (ctxt);
6952 #include "gt-function.h"