[RTL-ifcvt] PR rtl-optimization/68506: Fix emitting order of insns in IF-THEN-JOIN...
[official-gcc.git] / gcc / function.c
blob515d7c042203d24f0ee5eb17bb58807eea14a53f
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "tm_p.h"
45 #include "stringpool.h"
46 #include "expmed.h"
47 #include "optabs.h"
48 #include "regs.h"
49 #include "emit-rtl.h"
50 #include "recog.h"
51 #include "rtl-error.h"
52 #include "alias.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
55 #include "varasm.h"
56 #include "except.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "expr.h"
61 #include "optabs-tree.h"
62 #include "output.h"
63 #include "langhooks.h"
64 #include "common/common-target.h"
65 #include "gimplify.h"
66 #include "tree-pass.h"
67 #include "cfgrtl.h"
68 #include "cfganal.h"
69 #include "cfgbuild.h"
70 #include "cfgcleanup.h"
71 #include "cfgexpand.h"
72 #include "shrink-wrap.h"
73 #include "toplev.h"
74 #include "rtl-iter.h"
75 #include "tree-chkp.h"
76 #include "rtl-chkp.h"
77 #include "tree-dfa.h"
79 /* So we can assign to cfun in this file. */
80 #undef cfun
82 #ifndef STACK_ALIGNMENT_NEEDED
83 #define STACK_ALIGNMENT_NEEDED 1
84 #endif
86 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97 /* Nonzero once virtual register instantiation has been done.
98 assign_stack_local uses frame_pointer_rtx when this is nonzero.
99 calls.c:emit_library_call_value_1 uses it to set up
100 post-instantiation libcalls. */
101 int virtuals_instantiated;
103 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
104 static GTY(()) int funcdef_no;
106 /* These variables hold pointers to functions to create and destroy
107 target specific, per-function data structures. */
108 struct machine_function * (*init_machine_status) (void);
110 /* The currently compiled function. */
111 struct function *cfun = 0;
113 /* These hashes record the prologue and epilogue insns. */
115 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
117 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
118 static bool equal (rtx a, rtx b) { return a == b; }
121 static GTY((cache))
122 hash_table<insn_cache_hasher> *prologue_insn_hash;
123 static GTY((cache))
124 hash_table<insn_cache_hasher> *epilogue_insn_hash;
127 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
128 vec<tree, va_gc> *types_used_by_cur_var_decl;
130 /* Forward declarations. */
132 static struct temp_slot *find_temp_slot_from_address (rtx);
133 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
134 static void pad_below (struct args_size *, machine_mode, tree);
135 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
136 static int all_blocks (tree, tree *);
137 static tree *get_block_vector (tree, int *);
138 extern tree debug_find_var_in_block_tree (tree, tree);
139 /* We always define `record_insns' even if it's not used so that we
140 can always export `prologue_epilogue_contains'. */
141 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
142 ATTRIBUTE_UNUSED;
143 static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
144 static void prepare_function_start (void);
145 static void do_clobber_return_reg (rtx, void *);
146 static void do_use_return_reg (rtx, void *);
149 /* Stack of nested functions. */
150 /* Keep track of the cfun stack. */
152 static vec<function *> function_context_stack;
154 /* Save the current context for compilation of a nested function.
155 This is called from language-specific code. */
157 void
158 push_function_context (void)
160 if (cfun == 0)
161 allocate_struct_function (NULL, false);
163 function_context_stack.safe_push (cfun);
164 set_cfun (NULL);
167 /* Restore the last saved context, at the end of a nested function.
168 This function is called from language-specific code. */
170 void
171 pop_function_context (void)
173 struct function *p = function_context_stack.pop ();
174 set_cfun (p);
175 current_function_decl = p->decl;
177 /* Reset variables that have known state during rtx generation. */
178 virtuals_instantiated = 0;
179 generating_concat_p = 1;
182 /* Clear out all parts of the state in F that can safely be discarded
183 after the function has been parsed, but not compiled, to let
184 garbage collection reclaim the memory. */
186 void
187 free_after_parsing (struct function *f)
189 f->language = 0;
192 /* Clear out all parts of the state in F that can safely be discarded
193 after the function has been compiled, to let garbage collection
194 reclaim the memory. */
196 void
197 free_after_compilation (struct function *f)
199 prologue_insn_hash = NULL;
200 epilogue_insn_hash = NULL;
202 free (crtl->emit.regno_pointer_align);
204 memset (crtl, 0, sizeof (struct rtl_data));
205 f->eh = NULL;
206 f->machine = NULL;
207 f->cfg = NULL;
208 f->curr_properties &= ~PROP_cfg;
210 regno_reg_rtx = NULL;
213 /* Return size needed for stack frame based on slots so far allocated.
214 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
215 the caller may have to do that. */
217 HOST_WIDE_INT
218 get_frame_size (void)
220 if (FRAME_GROWS_DOWNWARD)
221 return -frame_offset;
222 else
223 return frame_offset;
226 /* Issue an error message and return TRUE if frame OFFSET overflows in
227 the signed target pointer arithmetics for function FUNC. Otherwise
228 return FALSE. */
230 bool
231 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
233 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
235 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
236 /* Leave room for the fixed part of the frame. */
237 - 64 * UNITS_PER_WORD)
239 error_at (DECL_SOURCE_LOCATION (func),
240 "total size of local objects too large");
241 return TRUE;
244 return FALSE;
247 /* Return stack slot alignment in bits for TYPE and MODE. */
249 static unsigned int
250 get_stack_local_alignment (tree type, machine_mode mode)
252 unsigned int alignment;
254 if (mode == BLKmode)
255 alignment = BIGGEST_ALIGNMENT;
256 else
257 alignment = GET_MODE_ALIGNMENT (mode);
259 /* Allow the frond-end to (possibly) increase the alignment of this
260 stack slot. */
261 if (! type)
262 type = lang_hooks.types.type_for_mode (mode, 0);
264 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
267 /* Determine whether it is possible to fit a stack slot of size SIZE and
268 alignment ALIGNMENT into an area in the stack frame that starts at
269 frame offset START and has a length of LENGTH. If so, store the frame
270 offset to be used for the stack slot in *POFFSET and return true;
271 return false otherwise. This function will extend the frame size when
272 given a start/length pair that lies at the end of the frame. */
274 static bool
275 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
276 HOST_WIDE_INT size, unsigned int alignment,
277 HOST_WIDE_INT *poffset)
279 HOST_WIDE_INT this_frame_offset;
280 int frame_off, frame_alignment, frame_phase;
282 /* Calculate how many bytes the start of local variables is off from
283 stack alignment. */
284 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
285 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
286 frame_phase = frame_off ? frame_alignment - frame_off : 0;
288 /* Round the frame offset to the specified alignment. */
290 /* We must be careful here, since FRAME_OFFSET might be negative and
291 division with a negative dividend isn't as well defined as we might
292 like. So we instead assume that ALIGNMENT is a power of two and
293 use logical operations which are unambiguous. */
294 if (FRAME_GROWS_DOWNWARD)
295 this_frame_offset
296 = (FLOOR_ROUND (start + length - size - frame_phase,
297 (unsigned HOST_WIDE_INT) alignment)
298 + frame_phase);
299 else
300 this_frame_offset
301 = (CEIL_ROUND (start - frame_phase,
302 (unsigned HOST_WIDE_INT) alignment)
303 + frame_phase);
305 /* See if it fits. If this space is at the edge of the frame,
306 consider extending the frame to make it fit. Our caller relies on
307 this when allocating a new slot. */
308 if (frame_offset == start && this_frame_offset < frame_offset)
309 frame_offset = this_frame_offset;
310 else if (this_frame_offset < start)
311 return false;
312 else if (start + length == frame_offset
313 && this_frame_offset + size > start + length)
314 frame_offset = this_frame_offset + size;
315 else if (this_frame_offset + size > start + length)
316 return false;
318 *poffset = this_frame_offset;
319 return true;
322 /* Create a new frame_space structure describing free space in the stack
323 frame beginning at START and ending at END, and chain it into the
324 function's frame_space_list. */
326 static void
327 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
329 struct frame_space *space = ggc_alloc<frame_space> ();
330 space->next = crtl->frame_space_list;
331 crtl->frame_space_list = space;
332 space->start = start;
333 space->length = end - start;
336 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
337 with machine mode MODE.
339 ALIGN controls the amount of alignment for the address of the slot:
340 0 means according to MODE,
341 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
342 -2 means use BITS_PER_UNIT,
343 positive specifies alignment boundary in bits.
345 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
346 alignment and ASLK_RECORD_PAD bit set if we should remember
347 extra space we allocated for alignment purposes. When we are
348 called from assign_stack_temp_for_type, it is not set so we don't
349 track the same stack slot in two independent lists.
351 We do not round to stack_boundary here. */
354 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
355 int align, int kind)
357 rtx x, addr;
358 int bigend_correction = 0;
359 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
360 unsigned int alignment, alignment_in_bits;
362 if (align == 0)
364 alignment = get_stack_local_alignment (NULL, mode);
365 alignment /= BITS_PER_UNIT;
367 else if (align == -1)
369 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
370 size = CEIL_ROUND (size, alignment);
372 else if (align == -2)
373 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
374 else
375 alignment = align / BITS_PER_UNIT;
377 alignment_in_bits = alignment * BITS_PER_UNIT;
379 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
380 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
382 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
383 alignment = alignment_in_bits / BITS_PER_UNIT;
386 if (SUPPORTS_STACK_ALIGNMENT)
388 if (crtl->stack_alignment_estimated < alignment_in_bits)
390 if (!crtl->stack_realign_processed)
391 crtl->stack_alignment_estimated = alignment_in_bits;
392 else
394 /* If stack is realigned and stack alignment value
395 hasn't been finalized, it is OK not to increase
396 stack_alignment_estimated. The bigger alignment
397 requirement is recorded in stack_alignment_needed
398 below. */
399 gcc_assert (!crtl->stack_realign_finalized);
400 if (!crtl->stack_realign_needed)
402 /* It is OK to reduce the alignment as long as the
403 requested size is 0 or the estimated stack
404 alignment >= mode alignment. */
405 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
406 || size == 0
407 || (crtl->stack_alignment_estimated
408 >= GET_MODE_ALIGNMENT (mode)));
409 alignment_in_bits = crtl->stack_alignment_estimated;
410 alignment = alignment_in_bits / BITS_PER_UNIT;
416 if (crtl->stack_alignment_needed < alignment_in_bits)
417 crtl->stack_alignment_needed = alignment_in_bits;
418 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
419 crtl->max_used_stack_slot_alignment = alignment_in_bits;
421 if (mode != BLKmode || size != 0)
423 if (kind & ASLK_RECORD_PAD)
425 struct frame_space **psp;
427 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
429 struct frame_space *space = *psp;
430 if (!try_fit_stack_local (space->start, space->length, size,
431 alignment, &slot_offset))
432 continue;
433 *psp = space->next;
434 if (slot_offset > space->start)
435 add_frame_space (space->start, slot_offset);
436 if (slot_offset + size < space->start + space->length)
437 add_frame_space (slot_offset + size,
438 space->start + space->length);
439 goto found_space;
443 else if (!STACK_ALIGNMENT_NEEDED)
445 slot_offset = frame_offset;
446 goto found_space;
449 old_frame_offset = frame_offset;
451 if (FRAME_GROWS_DOWNWARD)
453 frame_offset -= size;
454 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
456 if (kind & ASLK_RECORD_PAD)
458 if (slot_offset > frame_offset)
459 add_frame_space (frame_offset, slot_offset);
460 if (slot_offset + size < old_frame_offset)
461 add_frame_space (slot_offset + size, old_frame_offset);
464 else
466 frame_offset += size;
467 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
469 if (kind & ASLK_RECORD_PAD)
471 if (slot_offset > old_frame_offset)
472 add_frame_space (old_frame_offset, slot_offset);
473 if (slot_offset + size < frame_offset)
474 add_frame_space (slot_offset + size, frame_offset);
478 found_space:
479 /* On a big-endian machine, if we are allocating more space than we will use,
480 use the least significant bytes of those that are allocated. */
481 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
482 bigend_correction = size - GET_MODE_SIZE (mode);
484 /* If we have already instantiated virtual registers, return the actual
485 address relative to the frame pointer. */
486 if (virtuals_instantiated)
487 addr = plus_constant (Pmode, frame_pointer_rtx,
488 trunc_int_for_mode
489 (slot_offset + bigend_correction
490 + STARTING_FRAME_OFFSET, Pmode));
491 else
492 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
493 trunc_int_for_mode
494 (slot_offset + bigend_correction,
495 Pmode));
497 x = gen_rtx_MEM (mode, addr);
498 set_mem_align (x, alignment_in_bits);
499 MEM_NOTRAP_P (x) = 1;
501 stack_slot_list
502 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
504 if (frame_offset_overflow (frame_offset, current_function_decl))
505 frame_offset = 0;
507 return x;
510 /* Wrap up assign_stack_local_1 with last parameter as false. */
513 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
515 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
518 /* In order to evaluate some expressions, such as function calls returning
519 structures in memory, we need to temporarily allocate stack locations.
520 We record each allocated temporary in the following structure.
522 Associated with each temporary slot is a nesting level. When we pop up
523 one level, all temporaries associated with the previous level are freed.
524 Normally, all temporaries are freed after the execution of the statement
525 in which they were created. However, if we are inside a ({...}) grouping,
526 the result may be in a temporary and hence must be preserved. If the
527 result could be in a temporary, we preserve it if we can determine which
528 one it is in. If we cannot determine which temporary may contain the
529 result, all temporaries are preserved. A temporary is preserved by
530 pretending it was allocated at the previous nesting level. */
532 struct GTY(()) temp_slot {
533 /* Points to next temporary slot. */
534 struct temp_slot *next;
535 /* Points to previous temporary slot. */
536 struct temp_slot *prev;
537 /* The rtx to used to reference the slot. */
538 rtx slot;
539 /* The size, in units, of the slot. */
540 HOST_WIDE_INT size;
541 /* The type of the object in the slot, or zero if it doesn't correspond
542 to a type. We use this to determine whether a slot can be reused.
543 It can be reused if objects of the type of the new slot will always
544 conflict with objects of the type of the old slot. */
545 tree type;
546 /* The alignment (in bits) of the slot. */
547 unsigned int align;
548 /* Nonzero if this temporary is currently in use. */
549 char in_use;
550 /* Nesting level at which this slot is being used. */
551 int level;
552 /* The offset of the slot from the frame_pointer, including extra space
553 for alignment. This info is for combine_temp_slots. */
554 HOST_WIDE_INT base_offset;
555 /* The size of the slot, including extra space for alignment. This
556 info is for combine_temp_slots. */
557 HOST_WIDE_INT full_size;
560 /* Entry for the below hash table. */
561 struct GTY((for_user)) temp_slot_address_entry {
562 hashval_t hash;
563 rtx address;
564 struct temp_slot *temp_slot;
567 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
569 static hashval_t hash (temp_slot_address_entry *);
570 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
573 /* A table of addresses that represent a stack slot. The table is a mapping
574 from address RTXen to a temp slot. */
575 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
576 static size_t n_temp_slots_in_use;
578 /* Removes temporary slot TEMP from LIST. */
580 static void
581 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
583 if (temp->next)
584 temp->next->prev = temp->prev;
585 if (temp->prev)
586 temp->prev->next = temp->next;
587 else
588 *list = temp->next;
590 temp->prev = temp->next = NULL;
593 /* Inserts temporary slot TEMP to LIST. */
595 static void
596 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
598 temp->next = *list;
599 if (*list)
600 (*list)->prev = temp;
601 temp->prev = NULL;
602 *list = temp;
605 /* Returns the list of used temp slots at LEVEL. */
607 static struct temp_slot **
608 temp_slots_at_level (int level)
610 if (level >= (int) vec_safe_length (used_temp_slots))
611 vec_safe_grow_cleared (used_temp_slots, level + 1);
613 return &(*used_temp_slots)[level];
616 /* Returns the maximal temporary slot level. */
618 static int
619 max_slot_level (void)
621 if (!used_temp_slots)
622 return -1;
624 return used_temp_slots->length () - 1;
627 /* Moves temporary slot TEMP to LEVEL. */
629 static void
630 move_slot_to_level (struct temp_slot *temp, int level)
632 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
633 insert_slot_to_list (temp, temp_slots_at_level (level));
634 temp->level = level;
637 /* Make temporary slot TEMP available. */
639 static void
640 make_slot_available (struct temp_slot *temp)
642 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
643 insert_slot_to_list (temp, &avail_temp_slots);
644 temp->in_use = 0;
645 temp->level = -1;
646 n_temp_slots_in_use--;
649 /* Compute the hash value for an address -> temp slot mapping.
650 The value is cached on the mapping entry. */
651 static hashval_t
652 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
654 int do_not_record = 0;
655 return hash_rtx (t->address, GET_MODE (t->address),
656 &do_not_record, NULL, false);
659 /* Return the hash value for an address -> temp slot mapping. */
660 hashval_t
661 temp_address_hasher::hash (temp_slot_address_entry *t)
663 return t->hash;
666 /* Compare two address -> temp slot mapping entries. */
667 bool
668 temp_address_hasher::equal (temp_slot_address_entry *t1,
669 temp_slot_address_entry *t2)
671 return exp_equiv_p (t1->address, t2->address, 0, true);
674 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
675 static void
676 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
678 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
679 t->address = address;
680 t->temp_slot = temp_slot;
681 t->hash = temp_slot_address_compute_hash (t);
682 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
685 /* Remove an address -> temp slot mapping entry if the temp slot is
686 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
688 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
690 const struct temp_slot_address_entry *t = *slot;
691 if (! t->temp_slot->in_use)
692 temp_slot_address_table->clear_slot (slot);
693 return 1;
696 /* Remove all mappings of addresses to unused temp slots. */
697 static void
698 remove_unused_temp_slot_addresses (void)
700 /* Use quicker clearing if there aren't any active temp slots. */
701 if (n_temp_slots_in_use)
702 temp_slot_address_table->traverse
703 <void *, remove_unused_temp_slot_addresses_1> (NULL);
704 else
705 temp_slot_address_table->empty ();
708 /* Find the temp slot corresponding to the object at address X. */
710 static struct temp_slot *
711 find_temp_slot_from_address (rtx x)
713 struct temp_slot *p;
714 struct temp_slot_address_entry tmp, *t;
716 /* First try the easy way:
717 See if X exists in the address -> temp slot mapping. */
718 tmp.address = x;
719 tmp.temp_slot = NULL;
720 tmp.hash = temp_slot_address_compute_hash (&tmp);
721 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
722 if (t)
723 return t->temp_slot;
725 /* If we have a sum involving a register, see if it points to a temp
726 slot. */
727 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
728 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
729 return p;
730 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
731 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
732 return p;
734 /* Last resort: Address is a virtual stack var address. */
735 if (GET_CODE (x) == PLUS
736 && XEXP (x, 0) == virtual_stack_vars_rtx
737 && CONST_INT_P (XEXP (x, 1)))
739 int i;
740 for (i = max_slot_level (); i >= 0; i--)
741 for (p = *temp_slots_at_level (i); p; p = p->next)
743 if (INTVAL (XEXP (x, 1)) >= p->base_offset
744 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
745 return p;
749 return NULL;
752 /* Allocate a temporary stack slot and record it for possible later
753 reuse.
755 MODE is the machine mode to be given to the returned rtx.
757 SIZE is the size in units of the space required. We do no rounding here
758 since assign_stack_local will do any required rounding.
760 TYPE is the type that will be used for the stack slot. */
763 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
764 tree type)
766 unsigned int align;
767 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
768 rtx slot;
770 /* If SIZE is -1 it means that somebody tried to allocate a temporary
771 of a variable size. */
772 gcc_assert (size != -1);
774 align = get_stack_local_alignment (type, mode);
776 /* Try to find an available, already-allocated temporary of the proper
777 mode which meets the size and alignment requirements. Choose the
778 smallest one with the closest alignment.
780 If assign_stack_temp is called outside of the tree->rtl expansion,
781 we cannot reuse the stack slots (that may still refer to
782 VIRTUAL_STACK_VARS_REGNUM). */
783 if (!virtuals_instantiated)
785 for (p = avail_temp_slots; p; p = p->next)
787 if (p->align >= align && p->size >= size
788 && GET_MODE (p->slot) == mode
789 && objects_must_conflict_p (p->type, type)
790 && (best_p == 0 || best_p->size > p->size
791 || (best_p->size == p->size && best_p->align > p->align)))
793 if (p->align == align && p->size == size)
795 selected = p;
796 cut_slot_from_list (selected, &avail_temp_slots);
797 best_p = 0;
798 break;
800 best_p = p;
805 /* Make our best, if any, the one to use. */
806 if (best_p)
808 selected = best_p;
809 cut_slot_from_list (selected, &avail_temp_slots);
811 /* If there are enough aligned bytes left over, make them into a new
812 temp_slot so that the extra bytes don't get wasted. Do this only
813 for BLKmode slots, so that we can be sure of the alignment. */
814 if (GET_MODE (best_p->slot) == BLKmode)
816 int alignment = best_p->align / BITS_PER_UNIT;
817 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
819 if (best_p->size - rounded_size >= alignment)
821 p = ggc_alloc<temp_slot> ();
822 p->in_use = 0;
823 p->size = best_p->size - rounded_size;
824 p->base_offset = best_p->base_offset + rounded_size;
825 p->full_size = best_p->full_size - rounded_size;
826 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
827 p->align = best_p->align;
828 p->type = best_p->type;
829 insert_slot_to_list (p, &avail_temp_slots);
831 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
832 stack_slot_list);
834 best_p->size = rounded_size;
835 best_p->full_size = rounded_size;
840 /* If we still didn't find one, make a new temporary. */
841 if (selected == 0)
843 HOST_WIDE_INT frame_offset_old = frame_offset;
845 p = ggc_alloc<temp_slot> ();
847 /* We are passing an explicit alignment request to assign_stack_local.
848 One side effect of that is assign_stack_local will not round SIZE
849 to ensure the frame offset remains suitably aligned.
851 So for requests which depended on the rounding of SIZE, we go ahead
852 and round it now. We also make sure ALIGNMENT is at least
853 BIGGEST_ALIGNMENT. */
854 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
855 p->slot = assign_stack_local_1 (mode,
856 (mode == BLKmode
857 ? CEIL_ROUND (size,
858 (int) align
859 / BITS_PER_UNIT)
860 : size),
861 align, 0);
863 p->align = align;
865 /* The following slot size computation is necessary because we don't
866 know the actual size of the temporary slot until assign_stack_local
867 has performed all the frame alignment and size rounding for the
868 requested temporary. Note that extra space added for alignment
869 can be either above or below this stack slot depending on which
870 way the frame grows. We include the extra space if and only if it
871 is above this slot. */
872 if (FRAME_GROWS_DOWNWARD)
873 p->size = frame_offset_old - frame_offset;
874 else
875 p->size = size;
877 /* Now define the fields used by combine_temp_slots. */
878 if (FRAME_GROWS_DOWNWARD)
880 p->base_offset = frame_offset;
881 p->full_size = frame_offset_old - frame_offset;
883 else
885 p->base_offset = frame_offset_old;
886 p->full_size = frame_offset - frame_offset_old;
889 selected = p;
892 p = selected;
893 p->in_use = 1;
894 p->type = type;
895 p->level = temp_slot_level;
896 n_temp_slots_in_use++;
898 pp = temp_slots_at_level (p->level);
899 insert_slot_to_list (p, pp);
900 insert_temp_slot_address (XEXP (p->slot, 0), p);
902 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
903 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
904 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
906 /* If we know the alias set for the memory that will be used, use
907 it. If there's no TYPE, then we don't know anything about the
908 alias set for the memory. */
909 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
910 set_mem_align (slot, align);
912 /* If a type is specified, set the relevant flags. */
913 if (type != 0)
914 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
915 MEM_NOTRAP_P (slot) = 1;
917 return slot;
920 /* Allocate a temporary stack slot and record it for possible later
921 reuse. First two arguments are same as in preceding function. */
924 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
926 return assign_stack_temp_for_type (mode, size, NULL_TREE);
929 /* Assign a temporary.
930 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
931 and so that should be used in error messages. In either case, we
932 allocate of the given type.
933 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
934 it is 0 if a register is OK.
935 DONT_PROMOTE is 1 if we should not promote values in register
936 to wider modes. */
939 assign_temp (tree type_or_decl, int memory_required,
940 int dont_promote ATTRIBUTE_UNUSED)
942 tree type, decl;
943 machine_mode mode;
944 #ifdef PROMOTE_MODE
945 int unsignedp;
946 #endif
948 if (DECL_P (type_or_decl))
949 decl = type_or_decl, type = TREE_TYPE (decl);
950 else
951 decl = NULL, type = type_or_decl;
953 mode = TYPE_MODE (type);
954 #ifdef PROMOTE_MODE
955 unsignedp = TYPE_UNSIGNED (type);
956 #endif
958 if (mode == BLKmode || memory_required)
960 HOST_WIDE_INT size = int_size_in_bytes (type);
961 rtx tmp;
963 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
964 problems with allocating the stack space. */
965 if (size == 0)
966 size = 1;
968 /* Unfortunately, we don't yet know how to allocate variable-sized
969 temporaries. However, sometimes we can find a fixed upper limit on
970 the size, so try that instead. */
971 else if (size == -1)
972 size = max_int_size_in_bytes (type);
974 /* The size of the temporary may be too large to fit into an integer. */
975 /* ??? Not sure this should happen except for user silliness, so limit
976 this to things that aren't compiler-generated temporaries. The
977 rest of the time we'll die in assign_stack_temp_for_type. */
978 if (decl && size == -1
979 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
981 error ("size of variable %q+D is too large", decl);
982 size = 1;
985 tmp = assign_stack_temp_for_type (mode, size, type);
986 return tmp;
989 #ifdef PROMOTE_MODE
990 if (! dont_promote)
991 mode = promote_mode (type, mode, &unsignedp);
992 #endif
994 return gen_reg_rtx (mode);
997 /* Combine temporary stack slots which are adjacent on the stack.
999 This allows for better use of already allocated stack space. This is only
1000 done for BLKmode slots because we can be sure that we won't have alignment
1001 problems in this case. */
1003 static void
1004 combine_temp_slots (void)
1006 struct temp_slot *p, *q, *next, *next_q;
1007 int num_slots;
1009 /* We can't combine slots, because the information about which slot
1010 is in which alias set will be lost. */
1011 if (flag_strict_aliasing)
1012 return;
1014 /* If there are a lot of temp slots, don't do anything unless
1015 high levels of optimization. */
1016 if (! flag_expensive_optimizations)
1017 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1018 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1019 return;
1021 for (p = avail_temp_slots; p; p = next)
1023 int delete_p = 0;
1025 next = p->next;
1027 if (GET_MODE (p->slot) != BLKmode)
1028 continue;
1030 for (q = p->next; q; q = next_q)
1032 int delete_q = 0;
1034 next_q = q->next;
1036 if (GET_MODE (q->slot) != BLKmode)
1037 continue;
1039 if (p->base_offset + p->full_size == q->base_offset)
1041 /* Q comes after P; combine Q into P. */
1042 p->size += q->size;
1043 p->full_size += q->full_size;
1044 delete_q = 1;
1046 else if (q->base_offset + q->full_size == p->base_offset)
1048 /* P comes after Q; combine P into Q. */
1049 q->size += p->size;
1050 q->full_size += p->full_size;
1051 delete_p = 1;
1052 break;
1054 if (delete_q)
1055 cut_slot_from_list (q, &avail_temp_slots);
1058 /* Either delete P or advance past it. */
1059 if (delete_p)
1060 cut_slot_from_list (p, &avail_temp_slots);
1064 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1065 slot that previously was known by OLD_RTX. */
1067 void
1068 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1070 struct temp_slot *p;
1072 if (rtx_equal_p (old_rtx, new_rtx))
1073 return;
1075 p = find_temp_slot_from_address (old_rtx);
1077 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1078 NEW_RTX is a register, see if one operand of the PLUS is a
1079 temporary location. If so, NEW_RTX points into it. Otherwise,
1080 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1081 in common between them. If so, try a recursive call on those
1082 values. */
1083 if (p == 0)
1085 if (GET_CODE (old_rtx) != PLUS)
1086 return;
1088 if (REG_P (new_rtx))
1090 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1091 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1092 return;
1094 else if (GET_CODE (new_rtx) != PLUS)
1095 return;
1097 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1098 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1099 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1100 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1101 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1102 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1103 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1104 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1106 return;
1109 /* Otherwise add an alias for the temp's address. */
1110 insert_temp_slot_address (new_rtx, p);
1113 /* If X could be a reference to a temporary slot, mark that slot as
1114 belonging to the to one level higher than the current level. If X
1115 matched one of our slots, just mark that one. Otherwise, we can't
1116 easily predict which it is, so upgrade all of them.
1118 This is called when an ({...}) construct occurs and a statement
1119 returns a value in memory. */
1121 void
1122 preserve_temp_slots (rtx x)
1124 struct temp_slot *p = 0, *next;
1126 if (x == 0)
1127 return;
1129 /* If X is a register that is being used as a pointer, see if we have
1130 a temporary slot we know it points to. */
1131 if (REG_P (x) && REG_POINTER (x))
1132 p = find_temp_slot_from_address (x);
1134 /* If X is not in memory or is at a constant address, it cannot be in
1135 a temporary slot. */
1136 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1137 return;
1139 /* First see if we can find a match. */
1140 if (p == 0)
1141 p = find_temp_slot_from_address (XEXP (x, 0));
1143 if (p != 0)
1145 if (p->level == temp_slot_level)
1146 move_slot_to_level (p, temp_slot_level - 1);
1147 return;
1150 /* Otherwise, preserve all non-kept slots at this level. */
1151 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1153 next = p->next;
1154 move_slot_to_level (p, temp_slot_level - 1);
1158 /* Free all temporaries used so far. This is normally called at the
1159 end of generating code for a statement. */
1161 void
1162 free_temp_slots (void)
1164 struct temp_slot *p, *next;
1165 bool some_available = false;
1167 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1169 next = p->next;
1170 make_slot_available (p);
1171 some_available = true;
1174 if (some_available)
1176 remove_unused_temp_slot_addresses ();
1177 combine_temp_slots ();
1181 /* Push deeper into the nesting level for stack temporaries. */
1183 void
1184 push_temp_slots (void)
1186 temp_slot_level++;
1189 /* Pop a temporary nesting level. All slots in use in the current level
1190 are freed. */
1192 void
1193 pop_temp_slots (void)
1195 free_temp_slots ();
1196 temp_slot_level--;
1199 /* Initialize temporary slots. */
1201 void
1202 init_temp_slots (void)
1204 /* We have not allocated any temporaries yet. */
1205 avail_temp_slots = 0;
1206 vec_alloc (used_temp_slots, 0);
1207 temp_slot_level = 0;
1208 n_temp_slots_in_use = 0;
1210 /* Set up the table to map addresses to temp slots. */
1211 if (! temp_slot_address_table)
1212 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1213 else
1214 temp_slot_address_table->empty ();
1217 /* Functions and data structures to keep track of the values hard regs
1218 had at the start of the function. */
1220 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1221 and has_hard_reg_initial_val.. */
1222 struct GTY(()) initial_value_pair {
1223 rtx hard_reg;
1224 rtx pseudo;
1226 /* ??? This could be a VEC but there is currently no way to define an
1227 opaque VEC type. This could be worked around by defining struct
1228 initial_value_pair in function.h. */
1229 struct GTY(()) initial_value_struct {
1230 int num_entries;
1231 int max_entries;
1232 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1235 /* If a pseudo represents an initial hard reg (or expression), return
1236 it, else return NULL_RTX. */
1239 get_hard_reg_initial_reg (rtx reg)
1241 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1242 int i;
1244 if (ivs == 0)
1245 return NULL_RTX;
1247 for (i = 0; i < ivs->num_entries; i++)
1248 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1249 return ivs->entries[i].hard_reg;
1251 return NULL_RTX;
1254 /* Make sure that there's a pseudo register of mode MODE that stores the
1255 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1258 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1260 struct initial_value_struct *ivs;
1261 rtx rv;
1263 rv = has_hard_reg_initial_val (mode, regno);
1264 if (rv)
1265 return rv;
1267 ivs = crtl->hard_reg_initial_vals;
1268 if (ivs == 0)
1270 ivs = ggc_alloc<initial_value_struct> ();
1271 ivs->num_entries = 0;
1272 ivs->max_entries = 5;
1273 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1274 crtl->hard_reg_initial_vals = ivs;
1277 if (ivs->num_entries >= ivs->max_entries)
1279 ivs->max_entries += 5;
1280 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1281 ivs->max_entries);
1284 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1285 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1287 return ivs->entries[ivs->num_entries++].pseudo;
1290 /* See if get_hard_reg_initial_val has been used to create a pseudo
1291 for the initial value of hard register REGNO in mode MODE. Return
1292 the associated pseudo if so, otherwise return NULL. */
1295 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1297 struct initial_value_struct *ivs;
1298 int i;
1300 ivs = crtl->hard_reg_initial_vals;
1301 if (ivs != 0)
1302 for (i = 0; i < ivs->num_entries; i++)
1303 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1304 && REGNO (ivs->entries[i].hard_reg) == regno)
1305 return ivs->entries[i].pseudo;
1307 return NULL_RTX;
1310 unsigned int
1311 emit_initial_value_sets (void)
1313 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1314 int i;
1315 rtx_insn *seq;
1317 if (ivs == 0)
1318 return 0;
1320 start_sequence ();
1321 for (i = 0; i < ivs->num_entries; i++)
1322 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1323 seq = get_insns ();
1324 end_sequence ();
1326 emit_insn_at_entry (seq);
1327 return 0;
1330 /* Return the hardreg-pseudoreg initial values pair entry I and
1331 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1332 bool
1333 initial_value_entry (int i, rtx *hreg, rtx *preg)
1335 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1336 if (!ivs || i >= ivs->num_entries)
1337 return false;
1339 *hreg = ivs->entries[i].hard_reg;
1340 *preg = ivs->entries[i].pseudo;
1341 return true;
1344 /* These routines are responsible for converting virtual register references
1345 to the actual hard register references once RTL generation is complete.
1347 The following four variables are used for communication between the
1348 routines. They contain the offsets of the virtual registers from their
1349 respective hard registers. */
1351 static int in_arg_offset;
1352 static int var_offset;
1353 static int dynamic_offset;
1354 static int out_arg_offset;
1355 static int cfa_offset;
1357 /* In most machines, the stack pointer register is equivalent to the bottom
1358 of the stack. */
1360 #ifndef STACK_POINTER_OFFSET
1361 #define STACK_POINTER_OFFSET 0
1362 #endif
1364 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1365 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1366 #endif
1368 /* If not defined, pick an appropriate default for the offset of dynamically
1369 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1370 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1372 #ifndef STACK_DYNAMIC_OFFSET
1374 /* The bottom of the stack points to the actual arguments. If
1375 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1376 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1377 stack space for register parameters is not pushed by the caller, but
1378 rather part of the fixed stack areas and hence not included in
1379 `crtl->outgoing_args_size'. Nevertheless, we must allow
1380 for it when allocating stack dynamic objects. */
1382 #ifdef INCOMING_REG_PARM_STACK_SPACE
1383 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1384 ((ACCUMULATE_OUTGOING_ARGS \
1385 ? (crtl->outgoing_args_size \
1386 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1387 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1388 : 0) + (STACK_POINTER_OFFSET))
1389 #else
1390 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1391 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1392 + (STACK_POINTER_OFFSET))
1393 #endif
1394 #endif
1397 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1398 is a virtual register, return the equivalent hard register and set the
1399 offset indirectly through the pointer. Otherwise, return 0. */
1401 static rtx
1402 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1404 rtx new_rtx;
1405 HOST_WIDE_INT offset;
1407 if (x == virtual_incoming_args_rtx)
1409 if (stack_realign_drap)
1411 /* Replace virtual_incoming_args_rtx with internal arg
1412 pointer if DRAP is used to realign stack. */
1413 new_rtx = crtl->args.internal_arg_pointer;
1414 offset = 0;
1416 else
1417 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1419 else if (x == virtual_stack_vars_rtx)
1420 new_rtx = frame_pointer_rtx, offset = var_offset;
1421 else if (x == virtual_stack_dynamic_rtx)
1422 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1423 else if (x == virtual_outgoing_args_rtx)
1424 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1425 else if (x == virtual_cfa_rtx)
1427 #ifdef FRAME_POINTER_CFA_OFFSET
1428 new_rtx = frame_pointer_rtx;
1429 #else
1430 new_rtx = arg_pointer_rtx;
1431 #endif
1432 offset = cfa_offset;
1434 else if (x == virtual_preferred_stack_boundary_rtx)
1436 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1437 offset = 0;
1439 else
1440 return NULL_RTX;
1442 *poffset = offset;
1443 return new_rtx;
1446 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1447 registers present inside of *LOC. The expression is simplified,
1448 as much as possible, but is not to be considered "valid" in any sense
1449 implied by the target. Return true if any change is made. */
1451 static bool
1452 instantiate_virtual_regs_in_rtx (rtx *loc)
1454 if (!*loc)
1455 return false;
1456 bool changed = false;
1457 subrtx_ptr_iterator::array_type array;
1458 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1460 rtx *loc = *iter;
1461 if (rtx x = *loc)
1463 rtx new_rtx;
1464 HOST_WIDE_INT offset;
1465 switch (GET_CODE (x))
1467 case REG:
1468 new_rtx = instantiate_new_reg (x, &offset);
1469 if (new_rtx)
1471 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1472 changed = true;
1474 iter.skip_subrtxes ();
1475 break;
1477 case PLUS:
1478 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1479 if (new_rtx)
1481 XEXP (x, 0) = new_rtx;
1482 *loc = plus_constant (GET_MODE (x), x, offset, true);
1483 changed = true;
1484 iter.skip_subrtxes ();
1485 break;
1488 /* FIXME -- from old code */
1489 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1490 we can commute the PLUS and SUBREG because pointers into the
1491 frame are well-behaved. */
1492 break;
1494 default:
1495 break;
1499 return changed;
1502 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1503 matches the predicate for insn CODE operand OPERAND. */
1505 static int
1506 safe_insn_predicate (int code, int operand, rtx x)
1508 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1511 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1512 registers present inside of insn. The result will be a valid insn. */
1514 static void
1515 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1517 HOST_WIDE_INT offset;
1518 int insn_code, i;
1519 bool any_change = false;
1520 rtx set, new_rtx, x;
1521 rtx_insn *seq;
1523 /* There are some special cases to be handled first. */
1524 set = single_set (insn);
1525 if (set)
1527 /* We're allowed to assign to a virtual register. This is interpreted
1528 to mean that the underlying register gets assigned the inverse
1529 transformation. This is used, for example, in the handling of
1530 non-local gotos. */
1531 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1532 if (new_rtx)
1534 start_sequence ();
1536 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1537 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1538 gen_int_mode (-offset, GET_MODE (new_rtx)));
1539 x = force_operand (x, new_rtx);
1540 if (x != new_rtx)
1541 emit_move_insn (new_rtx, x);
1543 seq = get_insns ();
1544 end_sequence ();
1546 emit_insn_before (seq, insn);
1547 delete_insn (insn);
1548 return;
1551 /* Handle a straight copy from a virtual register by generating a
1552 new add insn. The difference between this and falling through
1553 to the generic case is avoiding a new pseudo and eliminating a
1554 move insn in the initial rtl stream. */
1555 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1556 if (new_rtx && offset != 0
1557 && REG_P (SET_DEST (set))
1558 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1560 start_sequence ();
1562 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1563 gen_int_mode (offset,
1564 GET_MODE (SET_DEST (set))),
1565 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1566 if (x != SET_DEST (set))
1567 emit_move_insn (SET_DEST (set), x);
1569 seq = get_insns ();
1570 end_sequence ();
1572 emit_insn_before (seq, insn);
1573 delete_insn (insn);
1574 return;
1577 extract_insn (insn);
1578 insn_code = INSN_CODE (insn);
1580 /* Handle a plus involving a virtual register by determining if the
1581 operands remain valid if they're modified in place. */
1582 if (GET_CODE (SET_SRC (set)) == PLUS
1583 && recog_data.n_operands >= 3
1584 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1585 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1586 && CONST_INT_P (recog_data.operand[2])
1587 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1589 offset += INTVAL (recog_data.operand[2]);
1591 /* If the sum is zero, then replace with a plain move. */
1592 if (offset == 0
1593 && REG_P (SET_DEST (set))
1594 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1596 start_sequence ();
1597 emit_move_insn (SET_DEST (set), new_rtx);
1598 seq = get_insns ();
1599 end_sequence ();
1601 emit_insn_before (seq, insn);
1602 delete_insn (insn);
1603 return;
1606 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1608 /* Using validate_change and apply_change_group here leaves
1609 recog_data in an invalid state. Since we know exactly what
1610 we want to check, do those two by hand. */
1611 if (safe_insn_predicate (insn_code, 1, new_rtx)
1612 && safe_insn_predicate (insn_code, 2, x))
1614 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1615 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1616 any_change = true;
1618 /* Fall through into the regular operand fixup loop in
1619 order to take care of operands other than 1 and 2. */
1623 else
1625 extract_insn (insn);
1626 insn_code = INSN_CODE (insn);
1629 /* In the general case, we expect virtual registers to appear only in
1630 operands, and then only as either bare registers or inside memories. */
1631 for (i = 0; i < recog_data.n_operands; ++i)
1633 x = recog_data.operand[i];
1634 switch (GET_CODE (x))
1636 case MEM:
1638 rtx addr = XEXP (x, 0);
1640 if (!instantiate_virtual_regs_in_rtx (&addr))
1641 continue;
1643 start_sequence ();
1644 x = replace_equiv_address (x, addr, true);
1645 /* It may happen that the address with the virtual reg
1646 was valid (e.g. based on the virtual stack reg, which might
1647 be acceptable to the predicates with all offsets), whereas
1648 the address now isn't anymore, for instance when the address
1649 is still offsetted, but the base reg isn't virtual-stack-reg
1650 anymore. Below we would do a force_reg on the whole operand,
1651 but this insn might actually only accept memory. Hence,
1652 before doing that last resort, try to reload the address into
1653 a register, so this operand stays a MEM. */
1654 if (!safe_insn_predicate (insn_code, i, x))
1656 addr = force_reg (GET_MODE (addr), addr);
1657 x = replace_equiv_address (x, addr, true);
1659 seq = get_insns ();
1660 end_sequence ();
1661 if (seq)
1662 emit_insn_before (seq, insn);
1664 break;
1666 case REG:
1667 new_rtx = instantiate_new_reg (x, &offset);
1668 if (new_rtx == NULL)
1669 continue;
1670 if (offset == 0)
1671 x = new_rtx;
1672 else
1674 start_sequence ();
1676 /* Careful, special mode predicates may have stuff in
1677 insn_data[insn_code].operand[i].mode that isn't useful
1678 to us for computing a new value. */
1679 /* ??? Recognize address_operand and/or "p" constraints
1680 to see if (plus new offset) is a valid before we put
1681 this through expand_simple_binop. */
1682 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1683 gen_int_mode (offset, GET_MODE (x)),
1684 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1685 seq = get_insns ();
1686 end_sequence ();
1687 emit_insn_before (seq, insn);
1689 break;
1691 case SUBREG:
1692 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1693 if (new_rtx == NULL)
1694 continue;
1695 if (offset != 0)
1697 start_sequence ();
1698 new_rtx = expand_simple_binop
1699 (GET_MODE (new_rtx), PLUS, new_rtx,
1700 gen_int_mode (offset, GET_MODE (new_rtx)),
1701 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1702 seq = get_insns ();
1703 end_sequence ();
1704 emit_insn_before (seq, insn);
1706 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1707 GET_MODE (new_rtx), SUBREG_BYTE (x));
1708 gcc_assert (x);
1709 break;
1711 default:
1712 continue;
1715 /* At this point, X contains the new value for the operand.
1716 Validate the new value vs the insn predicate. Note that
1717 asm insns will have insn_code -1 here. */
1718 if (!safe_insn_predicate (insn_code, i, x))
1720 start_sequence ();
1721 if (REG_P (x))
1723 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1724 x = copy_to_reg (x);
1726 else
1727 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1728 seq = get_insns ();
1729 end_sequence ();
1730 if (seq)
1731 emit_insn_before (seq, insn);
1734 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1735 any_change = true;
1738 if (any_change)
1740 /* Propagate operand changes into the duplicates. */
1741 for (i = 0; i < recog_data.n_dups; ++i)
1742 *recog_data.dup_loc[i]
1743 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1745 /* Force re-recognition of the instruction for validation. */
1746 INSN_CODE (insn) = -1;
1749 if (asm_noperands (PATTERN (insn)) >= 0)
1751 if (!check_asm_operands (PATTERN (insn)))
1753 error_for_asm (insn, "impossible constraint in %<asm%>");
1754 /* For asm goto, instead of fixing up all the edges
1755 just clear the template and clear input operands
1756 (asm goto doesn't have any output operands). */
1757 if (JUMP_P (insn))
1759 rtx asm_op = extract_asm_operands (PATTERN (insn));
1760 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1761 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1762 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1764 else
1765 delete_insn (insn);
1768 else
1770 if (recog_memoized (insn) < 0)
1771 fatal_insn_not_found (insn);
1775 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1776 do any instantiation required. */
1778 void
1779 instantiate_decl_rtl (rtx x)
1781 rtx addr;
1783 if (x == 0)
1784 return;
1786 /* If this is a CONCAT, recurse for the pieces. */
1787 if (GET_CODE (x) == CONCAT)
1789 instantiate_decl_rtl (XEXP (x, 0));
1790 instantiate_decl_rtl (XEXP (x, 1));
1791 return;
1794 /* If this is not a MEM, no need to do anything. Similarly if the
1795 address is a constant or a register that is not a virtual register. */
1796 if (!MEM_P (x))
1797 return;
1799 addr = XEXP (x, 0);
1800 if (CONSTANT_P (addr)
1801 || (REG_P (addr)
1802 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1803 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1804 return;
1806 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1809 /* Helper for instantiate_decls called via walk_tree: Process all decls
1810 in the given DECL_VALUE_EXPR. */
1812 static tree
1813 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1815 tree t = *tp;
1816 if (! EXPR_P (t))
1818 *walk_subtrees = 0;
1819 if (DECL_P (t))
1821 if (DECL_RTL_SET_P (t))
1822 instantiate_decl_rtl (DECL_RTL (t));
1823 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1824 && DECL_INCOMING_RTL (t))
1825 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1826 if ((TREE_CODE (t) == VAR_DECL
1827 || TREE_CODE (t) == RESULT_DECL)
1828 && DECL_HAS_VALUE_EXPR_P (t))
1830 tree v = DECL_VALUE_EXPR (t);
1831 walk_tree (&v, instantiate_expr, NULL, NULL);
1835 return NULL;
1838 /* Subroutine of instantiate_decls: Process all decls in the given
1839 BLOCK node and all its subblocks. */
1841 static void
1842 instantiate_decls_1 (tree let)
1844 tree t;
1846 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1848 if (DECL_RTL_SET_P (t))
1849 instantiate_decl_rtl (DECL_RTL (t));
1850 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1852 tree v = DECL_VALUE_EXPR (t);
1853 walk_tree (&v, instantiate_expr, NULL, NULL);
1857 /* Process all subblocks. */
1858 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1859 instantiate_decls_1 (t);
1862 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1863 all virtual registers in their DECL_RTL's. */
1865 static void
1866 instantiate_decls (tree fndecl)
1868 tree decl;
1869 unsigned ix;
1871 /* Process all parameters of the function. */
1872 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1874 instantiate_decl_rtl (DECL_RTL (decl));
1875 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1876 if (DECL_HAS_VALUE_EXPR_P (decl))
1878 tree v = DECL_VALUE_EXPR (decl);
1879 walk_tree (&v, instantiate_expr, NULL, NULL);
1883 if ((decl = DECL_RESULT (fndecl))
1884 && TREE_CODE (decl) == RESULT_DECL)
1886 if (DECL_RTL_SET_P (decl))
1887 instantiate_decl_rtl (DECL_RTL (decl));
1888 if (DECL_HAS_VALUE_EXPR_P (decl))
1890 tree v = DECL_VALUE_EXPR (decl);
1891 walk_tree (&v, instantiate_expr, NULL, NULL);
1895 /* Process the saved static chain if it exists. */
1896 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1897 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1898 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1900 /* Now process all variables defined in the function or its subblocks. */
1901 instantiate_decls_1 (DECL_INITIAL (fndecl));
1903 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1904 if (DECL_RTL_SET_P (decl))
1905 instantiate_decl_rtl (DECL_RTL (decl));
1906 vec_free (cfun->local_decls);
1909 /* Pass through the INSNS of function FNDECL and convert virtual register
1910 references to hard register references. */
1912 static unsigned int
1913 instantiate_virtual_regs (void)
1915 rtx_insn *insn;
1917 /* Compute the offsets to use for this function. */
1918 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1919 var_offset = STARTING_FRAME_OFFSET;
1920 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1921 out_arg_offset = STACK_POINTER_OFFSET;
1922 #ifdef FRAME_POINTER_CFA_OFFSET
1923 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1924 #else
1925 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1926 #endif
1928 /* Initialize recognition, indicating that volatile is OK. */
1929 init_recog ();
1931 /* Scan through all the insns, instantiating every virtual register still
1932 present. */
1933 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1934 if (INSN_P (insn))
1936 /* These patterns in the instruction stream can never be recognized.
1937 Fortunately, they shouldn't contain virtual registers either. */
1938 if (GET_CODE (PATTERN (insn)) == USE
1939 || GET_CODE (PATTERN (insn)) == CLOBBER
1940 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1941 continue;
1942 else if (DEBUG_INSN_P (insn))
1943 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1944 else
1945 instantiate_virtual_regs_in_insn (insn);
1947 if (insn->deleted ())
1948 continue;
1950 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1952 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1953 if (CALL_P (insn))
1954 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1957 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1958 instantiate_decls (current_function_decl);
1960 targetm.instantiate_decls ();
1962 /* Indicate that, from now on, assign_stack_local should use
1963 frame_pointer_rtx. */
1964 virtuals_instantiated = 1;
1966 return 0;
1969 namespace {
1971 const pass_data pass_data_instantiate_virtual_regs =
1973 RTL_PASS, /* type */
1974 "vregs", /* name */
1975 OPTGROUP_NONE, /* optinfo_flags */
1976 TV_NONE, /* tv_id */
1977 0, /* properties_required */
1978 0, /* properties_provided */
1979 0, /* properties_destroyed */
1980 0, /* todo_flags_start */
1981 0, /* todo_flags_finish */
1984 class pass_instantiate_virtual_regs : public rtl_opt_pass
1986 public:
1987 pass_instantiate_virtual_regs (gcc::context *ctxt)
1988 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
1991 /* opt_pass methods: */
1992 virtual unsigned int execute (function *)
1994 return instantiate_virtual_regs ();
1997 }; // class pass_instantiate_virtual_regs
1999 } // anon namespace
2001 rtl_opt_pass *
2002 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2004 return new pass_instantiate_virtual_regs (ctxt);
2008 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2009 This means a type for which function calls must pass an address to the
2010 function or get an address back from the function.
2011 EXP may be a type node or an expression (whose type is tested). */
2014 aggregate_value_p (const_tree exp, const_tree fntype)
2016 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2017 int i, regno, nregs;
2018 rtx reg;
2020 if (fntype)
2021 switch (TREE_CODE (fntype))
2023 case CALL_EXPR:
2025 tree fndecl = get_callee_fndecl (fntype);
2026 if (fndecl)
2027 fntype = TREE_TYPE (fndecl);
2028 else if (CALL_EXPR_FN (fntype))
2029 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2030 else
2031 /* For internal functions, assume nothing needs to be
2032 returned in memory. */
2033 return 0;
2035 break;
2036 case FUNCTION_DECL:
2037 fntype = TREE_TYPE (fntype);
2038 break;
2039 case FUNCTION_TYPE:
2040 case METHOD_TYPE:
2041 break;
2042 case IDENTIFIER_NODE:
2043 fntype = NULL_TREE;
2044 break;
2045 default:
2046 /* We don't expect other tree types here. */
2047 gcc_unreachable ();
2050 if (VOID_TYPE_P (type))
2051 return 0;
2053 /* If a record should be passed the same as its first (and only) member
2054 don't pass it as an aggregate. */
2055 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2056 return aggregate_value_p (first_field (type), fntype);
2058 /* If the front end has decided that this needs to be passed by
2059 reference, do so. */
2060 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2061 && DECL_BY_REFERENCE (exp))
2062 return 1;
2064 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2065 if (fntype && TREE_ADDRESSABLE (fntype))
2066 return 1;
2068 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2069 and thus can't be returned in registers. */
2070 if (TREE_ADDRESSABLE (type))
2071 return 1;
2073 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2074 return 1;
2076 if (targetm.calls.return_in_memory (type, fntype))
2077 return 1;
2079 /* Make sure we have suitable call-clobbered regs to return
2080 the value in; if not, we must return it in memory. */
2081 reg = hard_function_value (type, 0, fntype, 0);
2083 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2084 it is OK. */
2085 if (!REG_P (reg))
2086 return 0;
2088 regno = REGNO (reg);
2089 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2090 for (i = 0; i < nregs; i++)
2091 if (! call_used_regs[regno + i])
2092 return 1;
2094 return 0;
2097 /* Return true if we should assign DECL a pseudo register; false if it
2098 should live on the local stack. */
2100 bool
2101 use_register_for_decl (const_tree decl)
2103 if (TREE_CODE (decl) == SSA_NAME)
2105 /* We often try to use the SSA_NAME, instead of its underlying
2106 decl, to get type information and guide decisions, to avoid
2107 differences of behavior between anonymous and named
2108 variables, but in this one case we have to go for the actual
2109 variable if there is one. The main reason is that, at least
2110 at -O0, we want to place user variables on the stack, but we
2111 don't mind using pseudos for anonymous or ignored temps.
2112 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2113 should go in pseudos, whereas their corresponding variables
2114 might have to go on the stack. So, disregarding the decl
2115 here would negatively impact debug info at -O0, enable
2116 coalescing between SSA_NAMEs that ought to get different
2117 stack/pseudo assignments, and get the incoming argument
2118 processing thoroughly confused by PARM_DECLs expected to live
2119 in stack slots but assigned to pseudos. */
2120 if (!SSA_NAME_VAR (decl))
2121 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2122 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2124 decl = SSA_NAME_VAR (decl);
2127 /* Honor volatile. */
2128 if (TREE_SIDE_EFFECTS (decl))
2129 return false;
2131 /* Honor addressability. */
2132 if (TREE_ADDRESSABLE (decl))
2133 return false;
2135 /* RESULT_DECLs are a bit special in that they're assigned without
2136 regard to use_register_for_decl, but we generally only store in
2137 them. If we coalesce their SSA NAMEs, we'd better return a
2138 result that matches the assignment in expand_function_start. */
2139 if (TREE_CODE (decl) == RESULT_DECL)
2141 /* If it's not an aggregate, we're going to use a REG or a
2142 PARALLEL containing a REG. */
2143 if (!aggregate_value_p (decl, current_function_decl))
2144 return true;
2146 /* If expand_function_start determines the return value, we'll
2147 use MEM if it's not by reference. */
2148 if (cfun->returns_pcc_struct
2149 || (targetm.calls.struct_value_rtx
2150 (TREE_TYPE (current_function_decl), 1)))
2151 return DECL_BY_REFERENCE (decl);
2153 /* Otherwise, we're taking an extra all.function_result_decl
2154 argument. It's set up in assign_parms_augmented_arg_list,
2155 under the (negated) conditions above, and then it's used to
2156 set up the RESULT_DECL rtl in assign_params, after looping
2157 over all parameters. Now, if the RESULT_DECL is not by
2158 reference, we'll use a MEM either way. */
2159 if (!DECL_BY_REFERENCE (decl))
2160 return false;
2162 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2163 the function_result_decl's assignment. Since it's a pointer,
2164 we can short-circuit a number of the tests below, and we must
2165 duplicat e them because we don't have the
2166 function_result_decl to test. */
2167 if (!targetm.calls.allocate_stack_slots_for_args ())
2168 return true;
2169 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2170 if (optimize)
2171 return true;
2172 /* We don't set DECL_REGISTER for the function_result_decl. */
2173 return false;
2176 /* Decl is implicitly addressible by bound stores and loads
2177 if it is an aggregate holding bounds. */
2178 if (chkp_function_instrumented_p (current_function_decl)
2179 && TREE_TYPE (decl)
2180 && !BOUNDED_P (decl)
2181 && chkp_type_has_pointer (TREE_TYPE (decl)))
2182 return false;
2184 /* Only register-like things go in registers. */
2185 if (DECL_MODE (decl) == BLKmode)
2186 return false;
2188 /* If -ffloat-store specified, don't put explicit float variables
2189 into registers. */
2190 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2191 propagates values across these stores, and it probably shouldn't. */
2192 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2193 return false;
2195 if (!targetm.calls.allocate_stack_slots_for_args ())
2196 return true;
2198 /* If we're not interested in tracking debugging information for
2199 this decl, then we can certainly put it in a register. */
2200 if (DECL_IGNORED_P (decl))
2201 return true;
2203 if (optimize)
2204 return true;
2206 if (!DECL_REGISTER (decl))
2207 return false;
2209 switch (TREE_CODE (TREE_TYPE (decl)))
2211 case RECORD_TYPE:
2212 case UNION_TYPE:
2213 case QUAL_UNION_TYPE:
2214 /* When not optimizing, disregard register keyword for variables with
2215 types containing methods, otherwise the methods won't be callable
2216 from the debugger. */
2217 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
2218 return false;
2219 break;
2220 default:
2221 break;
2224 return true;
2227 /* Structures to communicate between the subroutines of assign_parms.
2228 The first holds data persistent across all parameters, the second
2229 is cleared out for each parameter. */
2231 struct assign_parm_data_all
2233 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2234 should become a job of the target or otherwise encapsulated. */
2235 CUMULATIVE_ARGS args_so_far_v;
2236 cumulative_args_t args_so_far;
2237 struct args_size stack_args_size;
2238 tree function_result_decl;
2239 tree orig_fnargs;
2240 rtx_insn *first_conversion_insn;
2241 rtx_insn *last_conversion_insn;
2242 HOST_WIDE_INT pretend_args_size;
2243 HOST_WIDE_INT extra_pretend_bytes;
2244 int reg_parm_stack_space;
2247 struct assign_parm_data_one
2249 tree nominal_type;
2250 tree passed_type;
2251 rtx entry_parm;
2252 rtx stack_parm;
2253 machine_mode nominal_mode;
2254 machine_mode passed_mode;
2255 machine_mode promoted_mode;
2256 struct locate_and_pad_arg_data locate;
2257 int partial;
2258 BOOL_BITFIELD named_arg : 1;
2259 BOOL_BITFIELD passed_pointer : 1;
2260 BOOL_BITFIELD on_stack : 1;
2261 BOOL_BITFIELD loaded_in_reg : 1;
2264 struct bounds_parm_data
2266 assign_parm_data_one parm_data;
2267 tree bounds_parm;
2268 tree ptr_parm;
2269 rtx ptr_entry;
2270 int bound_no;
2273 /* A subroutine of assign_parms. Initialize ALL. */
2275 static void
2276 assign_parms_initialize_all (struct assign_parm_data_all *all)
2278 tree fntype ATTRIBUTE_UNUSED;
2280 memset (all, 0, sizeof (*all));
2282 fntype = TREE_TYPE (current_function_decl);
2284 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2285 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2286 #else
2287 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2288 current_function_decl, -1);
2289 #endif
2290 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2292 #ifdef INCOMING_REG_PARM_STACK_SPACE
2293 all->reg_parm_stack_space
2294 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2295 #endif
2298 /* If ARGS contains entries with complex types, split the entry into two
2299 entries of the component type. Return a new list of substitutions are
2300 needed, else the old list. */
2302 static void
2303 split_complex_args (vec<tree> *args)
2305 unsigned i;
2306 tree p;
2308 FOR_EACH_VEC_ELT (*args, i, p)
2310 tree type = TREE_TYPE (p);
2311 if (TREE_CODE (type) == COMPLEX_TYPE
2312 && targetm.calls.split_complex_arg (type))
2314 tree decl;
2315 tree subtype = TREE_TYPE (type);
2316 bool addressable = TREE_ADDRESSABLE (p);
2318 /* Rewrite the PARM_DECL's type with its component. */
2319 p = copy_node (p);
2320 TREE_TYPE (p) = subtype;
2321 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2322 DECL_MODE (p) = VOIDmode;
2323 DECL_SIZE (p) = NULL;
2324 DECL_SIZE_UNIT (p) = NULL;
2325 /* If this arg must go in memory, put it in a pseudo here.
2326 We can't allow it to go in memory as per normal parms,
2327 because the usual place might not have the imag part
2328 adjacent to the real part. */
2329 DECL_ARTIFICIAL (p) = addressable;
2330 DECL_IGNORED_P (p) = addressable;
2331 TREE_ADDRESSABLE (p) = 0;
2332 layout_decl (p, 0);
2333 (*args)[i] = p;
2335 /* Build a second synthetic decl. */
2336 decl = build_decl (EXPR_LOCATION (p),
2337 PARM_DECL, NULL_TREE, subtype);
2338 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2339 DECL_ARTIFICIAL (decl) = addressable;
2340 DECL_IGNORED_P (decl) = addressable;
2341 layout_decl (decl, 0);
2342 args->safe_insert (++i, decl);
2347 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2348 the hidden struct return argument, and (abi willing) complex args.
2349 Return the new parameter list. */
2351 static vec<tree>
2352 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2354 tree fndecl = current_function_decl;
2355 tree fntype = TREE_TYPE (fndecl);
2356 vec<tree> fnargs = vNULL;
2357 tree arg;
2359 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2360 fnargs.safe_push (arg);
2362 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2364 /* If struct value address is treated as the first argument, make it so. */
2365 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2366 && ! cfun->returns_pcc_struct
2367 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2369 tree type = build_pointer_type (TREE_TYPE (fntype));
2370 tree decl;
2372 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2373 PARM_DECL, get_identifier (".result_ptr"), type);
2374 DECL_ARG_TYPE (decl) = type;
2375 DECL_ARTIFICIAL (decl) = 1;
2376 DECL_NAMELESS (decl) = 1;
2377 TREE_CONSTANT (decl) = 1;
2378 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2379 changes, the end of the RESULT_DECL handling block in
2380 use_register_for_decl must be adjusted to match. */
2382 DECL_CHAIN (decl) = all->orig_fnargs;
2383 all->orig_fnargs = decl;
2384 fnargs.safe_insert (0, decl);
2386 all->function_result_decl = decl;
2388 /* If function is instrumented then bounds of the
2389 passed structure address is the second argument. */
2390 if (chkp_function_instrumented_p (fndecl))
2392 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2393 PARM_DECL, get_identifier (".result_bnd"),
2394 pointer_bounds_type_node);
2395 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2396 DECL_ARTIFICIAL (decl) = 1;
2397 DECL_NAMELESS (decl) = 1;
2398 TREE_CONSTANT (decl) = 1;
2400 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2401 DECL_CHAIN (all->orig_fnargs) = decl;
2402 fnargs.safe_insert (1, decl);
2406 /* If the target wants to split complex arguments into scalars, do so. */
2407 if (targetm.calls.split_complex_arg)
2408 split_complex_args (&fnargs);
2410 return fnargs;
2413 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2414 data for the parameter. Incorporate ABI specifics such as pass-by-
2415 reference and type promotion. */
2417 static void
2418 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2419 struct assign_parm_data_one *data)
2421 tree nominal_type, passed_type;
2422 machine_mode nominal_mode, passed_mode, promoted_mode;
2423 int unsignedp;
2425 memset (data, 0, sizeof (*data));
2427 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2428 if (!cfun->stdarg)
2429 data->named_arg = 1; /* No variadic parms. */
2430 else if (DECL_CHAIN (parm))
2431 data->named_arg = 1; /* Not the last non-variadic parm. */
2432 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2433 data->named_arg = 1; /* Only variadic ones are unnamed. */
2434 else
2435 data->named_arg = 0; /* Treat as variadic. */
2437 nominal_type = TREE_TYPE (parm);
2438 passed_type = DECL_ARG_TYPE (parm);
2440 /* Look out for errors propagating this far. Also, if the parameter's
2441 type is void then its value doesn't matter. */
2442 if (TREE_TYPE (parm) == error_mark_node
2443 /* This can happen after weird syntax errors
2444 or if an enum type is defined among the parms. */
2445 || TREE_CODE (parm) != PARM_DECL
2446 || passed_type == NULL
2447 || VOID_TYPE_P (nominal_type))
2449 nominal_type = passed_type = void_type_node;
2450 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2451 goto egress;
2454 /* Find mode of arg as it is passed, and mode of arg as it should be
2455 during execution of this function. */
2456 passed_mode = TYPE_MODE (passed_type);
2457 nominal_mode = TYPE_MODE (nominal_type);
2459 /* If the parm is to be passed as a transparent union or record, use the
2460 type of the first field for the tests below. We have already verified
2461 that the modes are the same. */
2462 if ((TREE_CODE (passed_type) == UNION_TYPE
2463 || TREE_CODE (passed_type) == RECORD_TYPE)
2464 && TYPE_TRANSPARENT_AGGR (passed_type))
2465 passed_type = TREE_TYPE (first_field (passed_type));
2467 /* See if this arg was passed by invisible reference. */
2468 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2469 passed_type, data->named_arg))
2471 passed_type = nominal_type = build_pointer_type (passed_type);
2472 data->passed_pointer = true;
2473 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2476 /* Find mode as it is passed by the ABI. */
2477 unsignedp = TYPE_UNSIGNED (passed_type);
2478 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2479 TREE_TYPE (current_function_decl), 0);
2481 egress:
2482 data->nominal_type = nominal_type;
2483 data->passed_type = passed_type;
2484 data->nominal_mode = nominal_mode;
2485 data->passed_mode = passed_mode;
2486 data->promoted_mode = promoted_mode;
2489 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2491 static void
2492 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2493 struct assign_parm_data_one *data, bool no_rtl)
2495 int varargs_pretend_bytes = 0;
2497 targetm.calls.setup_incoming_varargs (all->args_so_far,
2498 data->promoted_mode,
2499 data->passed_type,
2500 &varargs_pretend_bytes, no_rtl);
2502 /* If the back-end has requested extra stack space, record how much is
2503 needed. Do not change pretend_args_size otherwise since it may be
2504 nonzero from an earlier partial argument. */
2505 if (varargs_pretend_bytes > 0)
2506 all->pretend_args_size = varargs_pretend_bytes;
2509 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2510 the incoming location of the current parameter. */
2512 static void
2513 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2514 struct assign_parm_data_one *data)
2516 HOST_WIDE_INT pretend_bytes = 0;
2517 rtx entry_parm;
2518 bool in_regs;
2520 if (data->promoted_mode == VOIDmode)
2522 data->entry_parm = data->stack_parm = const0_rtx;
2523 return;
2526 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2527 data->promoted_mode,
2528 data->passed_type,
2529 data->named_arg);
2531 if (entry_parm == 0)
2532 data->promoted_mode = data->passed_mode;
2534 /* Determine parm's home in the stack, in case it arrives in the stack
2535 or we should pretend it did. Compute the stack position and rtx where
2536 the argument arrives and its size.
2538 There is one complexity here: If this was a parameter that would
2539 have been passed in registers, but wasn't only because it is
2540 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2541 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2542 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2543 as it was the previous time. */
2544 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2545 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2546 in_regs = true;
2547 #endif
2548 if (!in_regs && !data->named_arg)
2550 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2552 rtx tem;
2553 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2554 data->promoted_mode,
2555 data->passed_type, true);
2556 in_regs = tem != NULL;
2560 /* If this parameter was passed both in registers and in the stack, use
2561 the copy on the stack. */
2562 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2563 data->passed_type))
2564 entry_parm = 0;
2566 if (entry_parm)
2568 int partial;
2570 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2571 data->promoted_mode,
2572 data->passed_type,
2573 data->named_arg);
2574 data->partial = partial;
2576 /* The caller might already have allocated stack space for the
2577 register parameters. */
2578 if (partial != 0 && all->reg_parm_stack_space == 0)
2580 /* Part of this argument is passed in registers and part
2581 is passed on the stack. Ask the prologue code to extend
2582 the stack part so that we can recreate the full value.
2584 PRETEND_BYTES is the size of the registers we need to store.
2585 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2586 stack space that the prologue should allocate.
2588 Internally, gcc assumes that the argument pointer is aligned
2589 to STACK_BOUNDARY bits. This is used both for alignment
2590 optimizations (see init_emit) and to locate arguments that are
2591 aligned to more than PARM_BOUNDARY bits. We must preserve this
2592 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2593 a stack boundary. */
2595 /* We assume at most one partial arg, and it must be the first
2596 argument on the stack. */
2597 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2599 pretend_bytes = partial;
2600 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2602 /* We want to align relative to the actual stack pointer, so
2603 don't include this in the stack size until later. */
2604 all->extra_pretend_bytes = all->pretend_args_size;
2608 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2609 all->reg_parm_stack_space,
2610 entry_parm ? data->partial : 0, current_function_decl,
2611 &all->stack_args_size, &data->locate);
2613 /* Update parm_stack_boundary if this parameter is passed in the
2614 stack. */
2615 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2616 crtl->parm_stack_boundary = data->locate.boundary;
2618 /* Adjust offsets to include the pretend args. */
2619 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2620 data->locate.slot_offset.constant += pretend_bytes;
2621 data->locate.offset.constant += pretend_bytes;
2623 data->entry_parm = entry_parm;
2626 /* A subroutine of assign_parms. If there is actually space on the stack
2627 for this parm, count it in stack_args_size and return true. */
2629 static bool
2630 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2631 struct assign_parm_data_one *data)
2633 /* Bounds are never passed on the stack to keep compatibility
2634 with not instrumented code. */
2635 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2636 return false;
2637 /* Trivially true if we've no incoming register. */
2638 else if (data->entry_parm == NULL)
2640 /* Also true if we're partially in registers and partially not,
2641 since we've arranged to drop the entire argument on the stack. */
2642 else if (data->partial != 0)
2644 /* Also true if the target says that it's passed in both registers
2645 and on the stack. */
2646 else if (GET_CODE (data->entry_parm) == PARALLEL
2647 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2649 /* Also true if the target says that there's stack allocated for
2650 all register parameters. */
2651 else if (all->reg_parm_stack_space > 0)
2653 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2654 else
2655 return false;
2657 all->stack_args_size.constant += data->locate.size.constant;
2658 if (data->locate.size.var)
2659 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2661 return true;
2664 /* A subroutine of assign_parms. Given that this parameter is allocated
2665 stack space by the ABI, find it. */
2667 static void
2668 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2670 rtx offset_rtx, stack_parm;
2671 unsigned int align, boundary;
2673 /* If we're passing this arg using a reg, make its stack home the
2674 aligned stack slot. */
2675 if (data->entry_parm)
2676 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2677 else
2678 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2680 stack_parm = crtl->args.internal_arg_pointer;
2681 if (offset_rtx != const0_rtx)
2682 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2683 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2685 if (!data->passed_pointer)
2687 set_mem_attributes (stack_parm, parm, 1);
2688 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2689 while promoted mode's size is needed. */
2690 if (data->promoted_mode != BLKmode
2691 && data->promoted_mode != DECL_MODE (parm))
2693 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2694 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2696 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2697 data->promoted_mode);
2698 if (offset)
2699 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2704 boundary = data->locate.boundary;
2705 align = BITS_PER_UNIT;
2707 /* If we're padding upward, we know that the alignment of the slot
2708 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2709 intentionally forcing upward padding. Otherwise we have to come
2710 up with a guess at the alignment based on OFFSET_RTX. */
2711 if (data->locate.where_pad != downward || data->entry_parm)
2712 align = boundary;
2713 else if (CONST_INT_P (offset_rtx))
2715 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2716 align = align & -align;
2718 set_mem_align (stack_parm, align);
2720 if (data->entry_parm)
2721 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2723 data->stack_parm = stack_parm;
2726 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2727 always valid and contiguous. */
2729 static void
2730 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2732 rtx entry_parm = data->entry_parm;
2733 rtx stack_parm = data->stack_parm;
2735 /* If this parm was passed part in regs and part in memory, pretend it
2736 arrived entirely in memory by pushing the register-part onto the stack.
2737 In the special case of a DImode or DFmode that is split, we could put
2738 it together in a pseudoreg directly, but for now that's not worth
2739 bothering with. */
2740 if (data->partial != 0)
2742 /* Handle calls that pass values in multiple non-contiguous
2743 locations. The Irix 6 ABI has examples of this. */
2744 if (GET_CODE (entry_parm) == PARALLEL)
2745 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2746 data->passed_type,
2747 int_size_in_bytes (data->passed_type));
2748 else
2750 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2751 move_block_from_reg (REGNO (entry_parm),
2752 validize_mem (copy_rtx (stack_parm)),
2753 data->partial / UNITS_PER_WORD);
2756 entry_parm = stack_parm;
2759 /* If we didn't decide this parm came in a register, by default it came
2760 on the stack. */
2761 else if (entry_parm == NULL)
2762 entry_parm = stack_parm;
2764 /* When an argument is passed in multiple locations, we can't make use
2765 of this information, but we can save some copying if the whole argument
2766 is passed in a single register. */
2767 else if (GET_CODE (entry_parm) == PARALLEL
2768 && data->nominal_mode != BLKmode
2769 && data->passed_mode != BLKmode)
2771 size_t i, len = XVECLEN (entry_parm, 0);
2773 for (i = 0; i < len; i++)
2774 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2775 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2776 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2777 == data->passed_mode)
2778 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2780 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2781 break;
2785 data->entry_parm = entry_parm;
2788 /* A subroutine of assign_parms. Reconstitute any values which were
2789 passed in multiple registers and would fit in a single register. */
2791 static void
2792 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2794 rtx entry_parm = data->entry_parm;
2796 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2797 This can be done with register operations rather than on the
2798 stack, even if we will store the reconstituted parameter on the
2799 stack later. */
2800 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2802 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2803 emit_group_store (parmreg, entry_parm, data->passed_type,
2804 GET_MODE_SIZE (GET_MODE (entry_parm)));
2805 entry_parm = parmreg;
2808 data->entry_parm = entry_parm;
2811 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2812 always valid and properly aligned. */
2814 static void
2815 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2817 rtx stack_parm = data->stack_parm;
2819 /* If we can't trust the parm stack slot to be aligned enough for its
2820 ultimate type, don't use that slot after entry. We'll make another
2821 stack slot, if we need one. */
2822 if (stack_parm
2823 && ((STRICT_ALIGNMENT
2824 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2825 || (data->nominal_type
2826 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2827 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2828 stack_parm = NULL;
2830 /* If parm was passed in memory, and we need to convert it on entry,
2831 don't store it back in that same slot. */
2832 else if (data->entry_parm == stack_parm
2833 && data->nominal_mode != BLKmode
2834 && data->nominal_mode != data->passed_mode)
2835 stack_parm = NULL;
2837 /* If stack protection is in effect for this function, don't leave any
2838 pointers in their passed stack slots. */
2839 else if (crtl->stack_protect_guard
2840 && (flag_stack_protect == 2
2841 || data->passed_pointer
2842 || POINTER_TYPE_P (data->nominal_type)))
2843 stack_parm = NULL;
2845 data->stack_parm = stack_parm;
2848 /* A subroutine of assign_parms. Return true if the current parameter
2849 should be stored as a BLKmode in the current frame. */
2851 static bool
2852 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2854 if (data->nominal_mode == BLKmode)
2855 return true;
2856 if (GET_MODE (data->entry_parm) == BLKmode)
2857 return true;
2859 #ifdef BLOCK_REG_PADDING
2860 /* Only assign_parm_setup_block knows how to deal with register arguments
2861 that are padded at the least significant end. */
2862 if (REG_P (data->entry_parm)
2863 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2864 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2865 == (BYTES_BIG_ENDIAN ? upward : downward)))
2866 return true;
2867 #endif
2869 return false;
2872 /* A subroutine of assign_parms. Arrange for the parameter to be
2873 present and valid in DATA->STACK_RTL. */
2875 static void
2876 assign_parm_setup_block (struct assign_parm_data_all *all,
2877 tree parm, struct assign_parm_data_one *data)
2879 rtx entry_parm = data->entry_parm;
2880 rtx stack_parm = data->stack_parm;
2881 rtx target_reg = NULL_RTX;
2882 bool in_conversion_seq = false;
2883 HOST_WIDE_INT size;
2884 HOST_WIDE_INT size_stored;
2886 if (GET_CODE (entry_parm) == PARALLEL)
2887 entry_parm = emit_group_move_into_temps (entry_parm);
2889 /* If we want the parameter in a pseudo, don't use a stack slot. */
2890 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2892 tree def = ssa_default_def (cfun, parm);
2893 gcc_assert (def);
2894 machine_mode mode = promote_ssa_mode (def, NULL);
2895 rtx reg = gen_reg_rtx (mode);
2896 if (GET_CODE (reg) != CONCAT)
2897 stack_parm = reg;
2898 else
2900 target_reg = reg;
2901 /* Avoid allocating a stack slot, if there isn't one
2902 preallocated by the ABI. It might seem like we should
2903 always prefer a pseudo, but converting between
2904 floating-point and integer modes goes through the stack
2905 on various machines, so it's better to use the reserved
2906 stack slot than to risk wasting it and allocating more
2907 for the conversion. */
2908 if (stack_parm == NULL_RTX)
2910 int save = generating_concat_p;
2911 generating_concat_p = 0;
2912 stack_parm = gen_reg_rtx (mode);
2913 generating_concat_p = save;
2916 data->stack_parm = NULL;
2919 size = int_size_in_bytes (data->passed_type);
2920 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2921 if (stack_parm == 0)
2923 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2924 stack_parm = assign_stack_local (BLKmode, size_stored,
2925 DECL_ALIGN (parm));
2926 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2927 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2928 set_mem_attributes (stack_parm, parm, 1);
2931 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2932 calls that pass values in multiple non-contiguous locations. */
2933 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2935 rtx mem;
2937 /* Note that we will be storing an integral number of words.
2938 So we have to be careful to ensure that we allocate an
2939 integral number of words. We do this above when we call
2940 assign_stack_local if space was not allocated in the argument
2941 list. If it was, this will not work if PARM_BOUNDARY is not
2942 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2943 if it becomes a problem. Exception is when BLKmode arrives
2944 with arguments not conforming to word_mode. */
2946 if (data->stack_parm == 0)
2948 else if (GET_CODE (entry_parm) == PARALLEL)
2950 else
2951 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2953 mem = validize_mem (copy_rtx (stack_parm));
2955 /* Handle values in multiple non-contiguous locations. */
2956 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2957 emit_group_store (mem, entry_parm, data->passed_type, size);
2958 else if (GET_CODE (entry_parm) == PARALLEL)
2960 push_to_sequence2 (all->first_conversion_insn,
2961 all->last_conversion_insn);
2962 emit_group_store (mem, entry_parm, data->passed_type, size);
2963 all->first_conversion_insn = get_insns ();
2964 all->last_conversion_insn = get_last_insn ();
2965 end_sequence ();
2966 in_conversion_seq = true;
2969 else if (size == 0)
2972 /* If SIZE is that of a mode no bigger than a word, just use
2973 that mode's store operation. */
2974 else if (size <= UNITS_PER_WORD)
2976 machine_mode mode
2977 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2979 if (mode != BLKmode
2980 #ifdef BLOCK_REG_PADDING
2981 && (size == UNITS_PER_WORD
2982 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2983 != (BYTES_BIG_ENDIAN ? upward : downward)))
2984 #endif
2987 rtx reg;
2989 /* We are really truncating a word_mode value containing
2990 SIZE bytes into a value of mode MODE. If such an
2991 operation requires no actual instructions, we can refer
2992 to the value directly in mode MODE, otherwise we must
2993 start with the register in word_mode and explicitly
2994 convert it. */
2995 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2996 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2997 else
2999 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3000 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3002 emit_move_insn (change_address (mem, mode, 0), reg);
3005 #ifdef BLOCK_REG_PADDING
3006 /* Storing the register in memory as a full word, as
3007 move_block_from_reg below would do, and then using the
3008 MEM in a smaller mode, has the effect of shifting right
3009 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3010 shifting must be explicit. */
3011 else if (!MEM_P (mem))
3013 rtx x;
3015 /* If the assert below fails, we should have taken the
3016 mode != BLKmode path above, unless we have downward
3017 padding of smaller-than-word arguments on a machine
3018 with little-endian bytes, which would likely require
3019 additional changes to work correctly. */
3020 gcc_checking_assert (BYTES_BIG_ENDIAN
3021 && (BLOCK_REG_PADDING (mode,
3022 data->passed_type, 1)
3023 == upward));
3025 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3027 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3028 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3029 NULL_RTX, 1);
3030 x = force_reg (word_mode, x);
3031 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3033 emit_move_insn (mem, x);
3035 #endif
3037 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3038 machine must be aligned to the left before storing
3039 to memory. Note that the previous test doesn't
3040 handle all cases (e.g. SIZE == 3). */
3041 else if (size != UNITS_PER_WORD
3042 #ifdef BLOCK_REG_PADDING
3043 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3044 == downward)
3045 #else
3046 && BYTES_BIG_ENDIAN
3047 #endif
3050 rtx tem, x;
3051 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3052 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3054 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3055 tem = change_address (mem, word_mode, 0);
3056 emit_move_insn (tem, x);
3058 else
3059 move_block_from_reg (REGNO (entry_parm), mem,
3060 size_stored / UNITS_PER_WORD);
3062 else if (!MEM_P (mem))
3064 gcc_checking_assert (size > UNITS_PER_WORD);
3065 #ifdef BLOCK_REG_PADDING
3066 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3067 data->passed_type, 0)
3068 == upward);
3069 #endif
3070 emit_move_insn (mem, entry_parm);
3072 else
3073 move_block_from_reg (REGNO (entry_parm), mem,
3074 size_stored / UNITS_PER_WORD);
3076 else if (data->stack_parm == 0)
3078 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3079 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3080 BLOCK_OP_NORMAL);
3081 all->first_conversion_insn = get_insns ();
3082 all->last_conversion_insn = get_last_insn ();
3083 end_sequence ();
3084 in_conversion_seq = true;
3087 if (target_reg)
3089 if (!in_conversion_seq)
3090 emit_move_insn (target_reg, stack_parm);
3091 else
3093 push_to_sequence2 (all->first_conversion_insn,
3094 all->last_conversion_insn);
3095 emit_move_insn (target_reg, stack_parm);
3096 all->first_conversion_insn = get_insns ();
3097 all->last_conversion_insn = get_last_insn ();
3098 end_sequence ();
3100 stack_parm = target_reg;
3103 data->stack_parm = stack_parm;
3104 set_parm_rtl (parm, stack_parm);
3107 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3108 parameter. Get it there. Perform all ABI specified conversions. */
3110 static void
3111 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3112 struct assign_parm_data_one *data)
3114 rtx parmreg, validated_mem;
3115 rtx equiv_stack_parm;
3116 machine_mode promoted_nominal_mode;
3117 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3118 bool did_conversion = false;
3119 bool need_conversion, moved;
3120 rtx rtl;
3122 /* Store the parm in a pseudoregister during the function, but we may
3123 need to do it in a wider mode. Using 2 here makes the result
3124 consistent with promote_decl_mode and thus expand_expr_real_1. */
3125 promoted_nominal_mode
3126 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3127 TREE_TYPE (current_function_decl), 2);
3129 parmreg = gen_reg_rtx (promoted_nominal_mode);
3130 if (!DECL_ARTIFICIAL (parm))
3131 mark_user_reg (parmreg);
3133 /* If this was an item that we received a pointer to,
3134 set rtl appropriately. */
3135 if (data->passed_pointer)
3137 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3138 set_mem_attributes (rtl, parm, 1);
3140 else
3141 rtl = parmreg;
3143 assign_parm_remove_parallels (data);
3145 /* Copy the value into the register, thus bridging between
3146 assign_parm_find_data_types and expand_expr_real_1. */
3148 equiv_stack_parm = data->stack_parm;
3149 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3151 need_conversion = (data->nominal_mode != data->passed_mode
3152 || promoted_nominal_mode != data->promoted_mode);
3153 moved = false;
3155 if (need_conversion
3156 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3157 && data->nominal_mode == data->passed_mode
3158 && data->nominal_mode == GET_MODE (data->entry_parm))
3160 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3161 mode, by the caller. We now have to convert it to
3162 NOMINAL_MODE, if different. However, PARMREG may be in
3163 a different mode than NOMINAL_MODE if it is being stored
3164 promoted.
3166 If ENTRY_PARM is a hard register, it might be in a register
3167 not valid for operating in its mode (e.g., an odd-numbered
3168 register for a DFmode). In that case, moves are the only
3169 thing valid, so we can't do a convert from there. This
3170 occurs when the calling sequence allow such misaligned
3171 usages.
3173 In addition, the conversion may involve a call, which could
3174 clobber parameters which haven't been copied to pseudo
3175 registers yet.
3177 First, we try to emit an insn which performs the necessary
3178 conversion. We verify that this insn does not clobber any
3179 hard registers. */
3181 enum insn_code icode;
3182 rtx op0, op1;
3184 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3185 unsignedp);
3187 op0 = parmreg;
3188 op1 = validated_mem;
3189 if (icode != CODE_FOR_nothing
3190 && insn_operand_matches (icode, 0, op0)
3191 && insn_operand_matches (icode, 1, op1))
3193 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3194 rtx_insn *insn, *insns;
3195 rtx t = op1;
3196 HARD_REG_SET hardregs;
3198 start_sequence ();
3199 /* If op1 is a hard register that is likely spilled, first
3200 force it into a pseudo, otherwise combiner might extend
3201 its lifetime too much. */
3202 if (GET_CODE (t) == SUBREG)
3203 t = SUBREG_REG (t);
3204 if (REG_P (t)
3205 && HARD_REGISTER_P (t)
3206 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3207 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3209 t = gen_reg_rtx (GET_MODE (op1));
3210 emit_move_insn (t, op1);
3212 else
3213 t = op1;
3214 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3215 data->passed_mode, unsignedp);
3216 emit_insn (pat);
3217 insns = get_insns ();
3219 moved = true;
3220 CLEAR_HARD_REG_SET (hardregs);
3221 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3223 if (INSN_P (insn))
3224 note_stores (PATTERN (insn), record_hard_reg_sets,
3225 &hardregs);
3226 if (!hard_reg_set_empty_p (hardregs))
3227 moved = false;
3230 end_sequence ();
3232 if (moved)
3234 emit_insn (insns);
3235 if (equiv_stack_parm != NULL_RTX)
3236 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3237 equiv_stack_parm);
3242 if (moved)
3243 /* Nothing to do. */
3245 else if (need_conversion)
3247 /* We did not have an insn to convert directly, or the sequence
3248 generated appeared unsafe. We must first copy the parm to a
3249 pseudo reg, and save the conversion until after all
3250 parameters have been moved. */
3252 int save_tree_used;
3253 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3255 emit_move_insn (tempreg, validated_mem);
3257 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3258 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3260 if (GET_CODE (tempreg) == SUBREG
3261 && GET_MODE (tempreg) == data->nominal_mode
3262 && REG_P (SUBREG_REG (tempreg))
3263 && data->nominal_mode == data->passed_mode
3264 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3265 && GET_MODE_SIZE (GET_MODE (tempreg))
3266 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3268 /* The argument is already sign/zero extended, so note it
3269 into the subreg. */
3270 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3271 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3274 /* TREE_USED gets set erroneously during expand_assignment. */
3275 save_tree_used = TREE_USED (parm);
3276 SET_DECL_RTL (parm, rtl);
3277 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3278 SET_DECL_RTL (parm, NULL_RTX);
3279 TREE_USED (parm) = save_tree_used;
3280 all->first_conversion_insn = get_insns ();
3281 all->last_conversion_insn = get_last_insn ();
3282 end_sequence ();
3284 did_conversion = true;
3286 else
3287 emit_move_insn (parmreg, validated_mem);
3289 /* If we were passed a pointer but the actual value can safely live
3290 in a register, retrieve it and use it directly. */
3291 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3293 /* We can't use nominal_mode, because it will have been set to
3294 Pmode above. We must use the actual mode of the parm. */
3295 if (use_register_for_decl (parm))
3297 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3298 mark_user_reg (parmreg);
3300 else
3302 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3303 TYPE_MODE (TREE_TYPE (parm)),
3304 TYPE_ALIGN (TREE_TYPE (parm)));
3305 parmreg
3306 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3307 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3308 align);
3309 set_mem_attributes (parmreg, parm, 1);
3312 if (GET_MODE (parmreg) != GET_MODE (rtl))
3314 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3315 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3317 push_to_sequence2 (all->first_conversion_insn,
3318 all->last_conversion_insn);
3319 emit_move_insn (tempreg, rtl);
3320 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3321 emit_move_insn (parmreg, tempreg);
3322 all->first_conversion_insn = get_insns ();
3323 all->last_conversion_insn = get_last_insn ();
3324 end_sequence ();
3326 did_conversion = true;
3328 else
3329 emit_move_insn (parmreg, rtl);
3331 rtl = parmreg;
3333 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3334 now the parm. */
3335 data->stack_parm = NULL;
3338 set_parm_rtl (parm, rtl);
3340 /* Mark the register as eliminable if we did no conversion and it was
3341 copied from memory at a fixed offset, and the arg pointer was not
3342 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3343 offset formed an invalid address, such memory-equivalences as we
3344 make here would screw up life analysis for it. */
3345 if (data->nominal_mode == data->passed_mode
3346 && !did_conversion
3347 && data->stack_parm != 0
3348 && MEM_P (data->stack_parm)
3349 && data->locate.offset.var == 0
3350 && reg_mentioned_p (virtual_incoming_args_rtx,
3351 XEXP (data->stack_parm, 0)))
3353 rtx_insn *linsn = get_last_insn ();
3354 rtx_insn *sinsn;
3355 rtx set;
3357 /* Mark complex types separately. */
3358 if (GET_CODE (parmreg) == CONCAT)
3360 machine_mode submode
3361 = GET_MODE_INNER (GET_MODE (parmreg));
3362 int regnor = REGNO (XEXP (parmreg, 0));
3363 int regnoi = REGNO (XEXP (parmreg, 1));
3364 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3365 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3366 GET_MODE_SIZE (submode));
3368 /* Scan backwards for the set of the real and
3369 imaginary parts. */
3370 for (sinsn = linsn; sinsn != 0;
3371 sinsn = prev_nonnote_insn (sinsn))
3373 set = single_set (sinsn);
3374 if (set == 0)
3375 continue;
3377 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3378 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3379 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3380 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3383 else
3384 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3387 /* For pointer data type, suggest pointer register. */
3388 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3389 mark_reg_pointer (parmreg,
3390 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3393 /* A subroutine of assign_parms. Allocate stack space to hold the current
3394 parameter. Get it there. Perform all ABI specified conversions. */
3396 static void
3397 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3398 struct assign_parm_data_one *data)
3400 /* Value must be stored in the stack slot STACK_PARM during function
3401 execution. */
3402 bool to_conversion = false;
3404 assign_parm_remove_parallels (data);
3406 if (data->promoted_mode != data->nominal_mode)
3408 /* Conversion is required. */
3409 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3411 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3413 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3414 to_conversion = true;
3416 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3417 TYPE_UNSIGNED (TREE_TYPE (parm)));
3419 if (data->stack_parm)
3421 int offset = subreg_lowpart_offset (data->nominal_mode,
3422 GET_MODE (data->stack_parm));
3423 /* ??? This may need a big-endian conversion on sparc64. */
3424 data->stack_parm
3425 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3426 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3427 set_mem_offset (data->stack_parm,
3428 MEM_OFFSET (data->stack_parm) + offset);
3432 if (data->entry_parm != data->stack_parm)
3434 rtx src, dest;
3436 if (data->stack_parm == 0)
3438 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3439 GET_MODE (data->entry_parm),
3440 TYPE_ALIGN (data->passed_type));
3441 data->stack_parm
3442 = assign_stack_local (GET_MODE (data->entry_parm),
3443 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3444 align);
3445 set_mem_attributes (data->stack_parm, parm, 1);
3448 dest = validize_mem (copy_rtx (data->stack_parm));
3449 src = validize_mem (copy_rtx (data->entry_parm));
3451 if (MEM_P (src))
3453 /* Use a block move to handle potentially misaligned entry_parm. */
3454 if (!to_conversion)
3455 push_to_sequence2 (all->first_conversion_insn,
3456 all->last_conversion_insn);
3457 to_conversion = true;
3459 emit_block_move (dest, src,
3460 GEN_INT (int_size_in_bytes (data->passed_type)),
3461 BLOCK_OP_NORMAL);
3463 else
3464 emit_move_insn (dest, src);
3467 if (to_conversion)
3469 all->first_conversion_insn = get_insns ();
3470 all->last_conversion_insn = get_last_insn ();
3471 end_sequence ();
3474 set_parm_rtl (parm, data->stack_parm);
3477 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3478 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3480 static void
3481 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3482 vec<tree> fnargs)
3484 tree parm;
3485 tree orig_fnargs = all->orig_fnargs;
3486 unsigned i = 0;
3488 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3490 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3491 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3493 rtx tmp, real, imag;
3494 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3496 real = DECL_RTL (fnargs[i]);
3497 imag = DECL_RTL (fnargs[i + 1]);
3498 if (inner != GET_MODE (real))
3500 real = gen_lowpart_SUBREG (inner, real);
3501 imag = gen_lowpart_SUBREG (inner, imag);
3504 if (TREE_ADDRESSABLE (parm))
3506 rtx rmem, imem;
3507 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3508 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3509 DECL_MODE (parm),
3510 TYPE_ALIGN (TREE_TYPE (parm)));
3512 /* split_complex_arg put the real and imag parts in
3513 pseudos. Move them to memory. */
3514 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3515 set_mem_attributes (tmp, parm, 1);
3516 rmem = adjust_address_nv (tmp, inner, 0);
3517 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3518 push_to_sequence2 (all->first_conversion_insn,
3519 all->last_conversion_insn);
3520 emit_move_insn (rmem, real);
3521 emit_move_insn (imem, imag);
3522 all->first_conversion_insn = get_insns ();
3523 all->last_conversion_insn = get_last_insn ();
3524 end_sequence ();
3526 else
3527 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3528 set_parm_rtl (parm, tmp);
3530 real = DECL_INCOMING_RTL (fnargs[i]);
3531 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3532 if (inner != GET_MODE (real))
3534 real = gen_lowpart_SUBREG (inner, real);
3535 imag = gen_lowpart_SUBREG (inner, imag);
3537 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3538 set_decl_incoming_rtl (parm, tmp, false);
3539 i++;
3544 /* Load bounds of PARM from bounds table. */
3545 static void
3546 assign_parm_load_bounds (struct assign_parm_data_one *data,
3547 tree parm,
3548 rtx entry,
3549 unsigned bound_no)
3551 bitmap_iterator bi;
3552 unsigned i, offs = 0;
3553 int bnd_no = -1;
3554 rtx slot = NULL, ptr = NULL;
3556 if (parm)
3558 bitmap slots;
3559 bitmap_obstack_initialize (NULL);
3560 slots = BITMAP_ALLOC (NULL);
3561 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3562 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3564 if (bound_no)
3565 bound_no--;
3566 else
3568 bnd_no = i;
3569 break;
3572 BITMAP_FREE (slots);
3573 bitmap_obstack_release (NULL);
3576 /* We may have bounds not associated with any pointer. */
3577 if (bnd_no != -1)
3578 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3580 /* Find associated pointer. */
3581 if (bnd_no == -1)
3583 /* If bounds are not associated with any bounds,
3584 then it is passed in a register or special slot. */
3585 gcc_assert (data->entry_parm);
3586 ptr = const0_rtx;
3588 else if (MEM_P (entry))
3589 slot = adjust_address (entry, Pmode, offs);
3590 else if (REG_P (entry))
3591 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3592 else if (GET_CODE (entry) == PARALLEL)
3593 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3594 else
3595 gcc_unreachable ();
3596 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3597 data->entry_parm);
3600 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3602 static void
3603 assign_bounds (vec<bounds_parm_data> &bndargs,
3604 struct assign_parm_data_all &all,
3605 bool assign_regs, bool assign_special,
3606 bool assign_bt)
3608 unsigned i, pass;
3609 bounds_parm_data *pbdata;
3611 if (!bndargs.exists ())
3612 return;
3614 /* We make few passes to store input bounds. Firstly handle bounds
3615 passed in registers. After that we load bounds passed in special
3616 slots. Finally we load bounds from Bounds Table. */
3617 for (pass = 0; pass < 3; pass++)
3618 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3620 /* Pass 0 => regs only. */
3621 if (pass == 0
3622 && (!assign_regs
3623 ||(!pbdata->parm_data.entry_parm
3624 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3625 continue;
3626 /* Pass 1 => slots only. */
3627 else if (pass == 1
3628 && (!assign_special
3629 || (!pbdata->parm_data.entry_parm
3630 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3631 continue;
3632 /* Pass 2 => BT only. */
3633 else if (pass == 2
3634 && (!assign_bt
3635 || pbdata->parm_data.entry_parm))
3636 continue;
3638 if (!pbdata->parm_data.entry_parm
3639 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3640 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3641 pbdata->ptr_entry, pbdata->bound_no);
3643 set_decl_incoming_rtl (pbdata->bounds_parm,
3644 pbdata->parm_data.entry_parm, false);
3646 if (assign_parm_setup_block_p (&pbdata->parm_data))
3647 assign_parm_setup_block (&all, pbdata->bounds_parm,
3648 &pbdata->parm_data);
3649 else if (pbdata->parm_data.passed_pointer
3650 || use_register_for_decl (pbdata->bounds_parm))
3651 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3652 &pbdata->parm_data);
3653 else
3654 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3655 &pbdata->parm_data);
3659 /* Assign RTL expressions to the function's parameters. This may involve
3660 copying them into registers and using those registers as the DECL_RTL. */
3662 static void
3663 assign_parms (tree fndecl)
3665 struct assign_parm_data_all all;
3666 tree parm;
3667 vec<tree> fnargs;
3668 unsigned i, bound_no = 0;
3669 tree last_arg = NULL;
3670 rtx last_arg_entry = NULL;
3671 vec<bounds_parm_data> bndargs = vNULL;
3672 bounds_parm_data bdata;
3674 crtl->args.internal_arg_pointer
3675 = targetm.calls.internal_arg_pointer ();
3677 assign_parms_initialize_all (&all);
3678 fnargs = assign_parms_augmented_arg_list (&all);
3680 FOR_EACH_VEC_ELT (fnargs, i, parm)
3682 struct assign_parm_data_one data;
3684 /* Extract the type of PARM; adjust it according to ABI. */
3685 assign_parm_find_data_types (&all, parm, &data);
3687 /* Early out for errors and void parameters. */
3688 if (data.passed_mode == VOIDmode)
3690 SET_DECL_RTL (parm, const0_rtx);
3691 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3692 continue;
3695 /* Estimate stack alignment from parameter alignment. */
3696 if (SUPPORTS_STACK_ALIGNMENT)
3698 unsigned int align
3699 = targetm.calls.function_arg_boundary (data.promoted_mode,
3700 data.passed_type);
3701 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3702 align);
3703 if (TYPE_ALIGN (data.nominal_type) > align)
3704 align = MINIMUM_ALIGNMENT (data.nominal_type,
3705 TYPE_MODE (data.nominal_type),
3706 TYPE_ALIGN (data.nominal_type));
3707 if (crtl->stack_alignment_estimated < align)
3709 gcc_assert (!crtl->stack_realign_processed);
3710 crtl->stack_alignment_estimated = align;
3714 /* Find out where the parameter arrives in this function. */
3715 assign_parm_find_entry_rtl (&all, &data);
3717 /* Find out where stack space for this parameter might be. */
3718 if (assign_parm_is_stack_parm (&all, &data))
3720 assign_parm_find_stack_rtl (parm, &data);
3721 assign_parm_adjust_entry_rtl (&data);
3723 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3725 /* Remember where last non bounds arg was passed in case
3726 we have to load associated bounds for it from Bounds
3727 Table. */
3728 last_arg = parm;
3729 last_arg_entry = data.entry_parm;
3730 bound_no = 0;
3732 /* Record permanently how this parm was passed. */
3733 if (data.passed_pointer)
3735 rtx incoming_rtl
3736 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3737 data.entry_parm);
3738 set_decl_incoming_rtl (parm, incoming_rtl, true);
3740 else
3741 set_decl_incoming_rtl (parm, data.entry_parm, false);
3743 assign_parm_adjust_stack_rtl (&data);
3745 /* Bounds should be loaded in the particular order to
3746 have registers allocated correctly. Collect info about
3747 input bounds and load them later. */
3748 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3750 /* Expect bounds in instrumented functions only. */
3751 gcc_assert (chkp_function_instrumented_p (fndecl));
3753 bdata.parm_data = data;
3754 bdata.bounds_parm = parm;
3755 bdata.ptr_parm = last_arg;
3756 bdata.ptr_entry = last_arg_entry;
3757 bdata.bound_no = bound_no;
3758 bndargs.safe_push (bdata);
3760 else
3762 if (assign_parm_setup_block_p (&data))
3763 assign_parm_setup_block (&all, parm, &data);
3764 else if (data.passed_pointer || use_register_for_decl (parm))
3765 assign_parm_setup_reg (&all, parm, &data);
3766 else
3767 assign_parm_setup_stack (&all, parm, &data);
3770 if (cfun->stdarg && !DECL_CHAIN (parm))
3772 int pretend_bytes = 0;
3774 assign_parms_setup_varargs (&all, &data, false);
3776 if (chkp_function_instrumented_p (fndecl))
3778 /* We expect this is the last parm. Otherwise it is wrong
3779 to assign bounds right now. */
3780 gcc_assert (i == (fnargs.length () - 1));
3781 assign_bounds (bndargs, all, true, false, false);
3782 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3783 data.promoted_mode,
3784 data.passed_type,
3785 &pretend_bytes,
3786 false);
3787 assign_bounds (bndargs, all, false, true, true);
3788 bndargs.release ();
3792 /* Update info on where next arg arrives in registers. */
3793 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3794 data.passed_type, data.named_arg);
3796 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3797 bound_no++;
3800 assign_bounds (bndargs, all, true, true, true);
3801 bndargs.release ();
3803 if (targetm.calls.split_complex_arg)
3804 assign_parms_unsplit_complex (&all, fnargs);
3806 fnargs.release ();
3808 /* Output all parameter conversion instructions (possibly including calls)
3809 now that all parameters have been copied out of hard registers. */
3810 emit_insn (all.first_conversion_insn);
3812 /* Estimate reload stack alignment from scalar return mode. */
3813 if (SUPPORTS_STACK_ALIGNMENT)
3815 if (DECL_RESULT (fndecl))
3817 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3818 machine_mode mode = TYPE_MODE (type);
3820 if (mode != BLKmode
3821 && mode != VOIDmode
3822 && !AGGREGATE_TYPE_P (type))
3824 unsigned int align = GET_MODE_ALIGNMENT (mode);
3825 if (crtl->stack_alignment_estimated < align)
3827 gcc_assert (!crtl->stack_realign_processed);
3828 crtl->stack_alignment_estimated = align;
3834 /* If we are receiving a struct value address as the first argument, set up
3835 the RTL for the function result. As this might require code to convert
3836 the transmitted address to Pmode, we do this here to ensure that possible
3837 preliminary conversions of the address have been emitted already. */
3838 if (all.function_result_decl)
3840 tree result = DECL_RESULT (current_function_decl);
3841 rtx addr = DECL_RTL (all.function_result_decl);
3842 rtx x;
3844 if (DECL_BY_REFERENCE (result))
3846 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3847 x = addr;
3849 else
3851 SET_DECL_VALUE_EXPR (result,
3852 build1 (INDIRECT_REF, TREE_TYPE (result),
3853 all.function_result_decl));
3854 addr = convert_memory_address (Pmode, addr);
3855 x = gen_rtx_MEM (DECL_MODE (result), addr);
3856 set_mem_attributes (x, result, 1);
3859 DECL_HAS_VALUE_EXPR_P (result) = 1;
3861 set_parm_rtl (result, x);
3864 /* We have aligned all the args, so add space for the pretend args. */
3865 crtl->args.pretend_args_size = all.pretend_args_size;
3866 all.stack_args_size.constant += all.extra_pretend_bytes;
3867 crtl->args.size = all.stack_args_size.constant;
3869 /* Adjust function incoming argument size for alignment and
3870 minimum length. */
3872 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3873 crtl->args.size = CEIL_ROUND (crtl->args.size,
3874 PARM_BOUNDARY / BITS_PER_UNIT);
3876 if (ARGS_GROW_DOWNWARD)
3878 crtl->args.arg_offset_rtx
3879 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3880 : expand_expr (size_diffop (all.stack_args_size.var,
3881 size_int (-all.stack_args_size.constant)),
3882 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3884 else
3885 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3887 /* See how many bytes, if any, of its args a function should try to pop
3888 on return. */
3890 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3891 TREE_TYPE (fndecl),
3892 crtl->args.size);
3894 /* For stdarg.h function, save info about
3895 regs and stack space used by the named args. */
3897 crtl->args.info = all.args_so_far_v;
3899 /* Set the rtx used for the function return value. Put this in its
3900 own variable so any optimizers that need this information don't have
3901 to include tree.h. Do this here so it gets done when an inlined
3902 function gets output. */
3904 crtl->return_rtx
3905 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3906 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3908 /* If scalar return value was computed in a pseudo-reg, or was a named
3909 return value that got dumped to the stack, copy that to the hard
3910 return register. */
3911 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3913 tree decl_result = DECL_RESULT (fndecl);
3914 rtx decl_rtl = DECL_RTL (decl_result);
3916 if (REG_P (decl_rtl)
3917 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3918 : DECL_REGISTER (decl_result))
3920 rtx real_decl_rtl;
3922 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3923 fndecl, true);
3924 if (chkp_function_instrumented_p (fndecl))
3925 crtl->return_bnd
3926 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3927 fndecl, true);
3928 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3929 /* The delay slot scheduler assumes that crtl->return_rtx
3930 holds the hard register containing the return value, not a
3931 temporary pseudo. */
3932 crtl->return_rtx = real_decl_rtl;
3937 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3938 For all seen types, gimplify their sizes. */
3940 static tree
3941 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3943 tree t = *tp;
3945 *walk_subtrees = 0;
3946 if (TYPE_P (t))
3948 if (POINTER_TYPE_P (t))
3949 *walk_subtrees = 1;
3950 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3951 && !TYPE_SIZES_GIMPLIFIED (t))
3953 gimplify_type_sizes (t, (gimple_seq *) data);
3954 *walk_subtrees = 1;
3958 return NULL;
3961 /* Gimplify the parameter list for current_function_decl. This involves
3962 evaluating SAVE_EXPRs of variable sized parameters and generating code
3963 to implement callee-copies reference parameters. Returns a sequence of
3964 statements to add to the beginning of the function. */
3966 gimple_seq
3967 gimplify_parameters (void)
3969 struct assign_parm_data_all all;
3970 tree parm;
3971 gimple_seq stmts = NULL;
3972 vec<tree> fnargs;
3973 unsigned i;
3975 assign_parms_initialize_all (&all);
3976 fnargs = assign_parms_augmented_arg_list (&all);
3978 FOR_EACH_VEC_ELT (fnargs, i, parm)
3980 struct assign_parm_data_one data;
3982 /* Extract the type of PARM; adjust it according to ABI. */
3983 assign_parm_find_data_types (&all, parm, &data);
3985 /* Early out for errors and void parameters. */
3986 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3987 continue;
3989 /* Update info on where next arg arrives in registers. */
3990 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3991 data.passed_type, data.named_arg);
3993 /* ??? Once upon a time variable_size stuffed parameter list
3994 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3995 turned out to be less than manageable in the gimple world.
3996 Now we have to hunt them down ourselves. */
3997 walk_tree_without_duplicates (&data.passed_type,
3998 gimplify_parm_type, &stmts);
4000 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4002 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4003 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4006 if (data.passed_pointer)
4008 tree type = TREE_TYPE (data.passed_type);
4009 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4010 type, data.named_arg))
4012 tree local, t;
4014 /* For constant-sized objects, this is trivial; for
4015 variable-sized objects, we have to play games. */
4016 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4017 && !(flag_stack_check == GENERIC_STACK_CHECK
4018 && compare_tree_int (DECL_SIZE_UNIT (parm),
4019 STACK_CHECK_MAX_VAR_SIZE) > 0))
4021 local = create_tmp_var (type, get_name (parm));
4022 DECL_IGNORED_P (local) = 0;
4023 /* If PARM was addressable, move that flag over
4024 to the local copy, as its address will be taken,
4025 not the PARMs. Keep the parms address taken
4026 as we'll query that flag during gimplification. */
4027 if (TREE_ADDRESSABLE (parm))
4028 TREE_ADDRESSABLE (local) = 1;
4029 else if (TREE_CODE (type) == COMPLEX_TYPE
4030 || TREE_CODE (type) == VECTOR_TYPE)
4031 DECL_GIMPLE_REG_P (local) = 1;
4033 else
4035 tree ptr_type, addr;
4037 ptr_type = build_pointer_type (type);
4038 addr = create_tmp_reg (ptr_type, get_name (parm));
4039 DECL_IGNORED_P (addr) = 0;
4040 local = build_fold_indirect_ref (addr);
4042 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4043 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
4044 size_int (DECL_ALIGN (parm)));
4046 /* The call has been built for a variable-sized object. */
4047 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4048 t = fold_convert (ptr_type, t);
4049 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4050 gimplify_and_add (t, &stmts);
4053 gimplify_assign (local, parm, &stmts);
4055 SET_DECL_VALUE_EXPR (parm, local);
4056 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4061 fnargs.release ();
4063 return stmts;
4066 /* Compute the size and offset from the start of the stacked arguments for a
4067 parm passed in mode PASSED_MODE and with type TYPE.
4069 INITIAL_OFFSET_PTR points to the current offset into the stacked
4070 arguments.
4072 The starting offset and size for this parm are returned in
4073 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4074 nonzero, the offset is that of stack slot, which is returned in
4075 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4076 padding required from the initial offset ptr to the stack slot.
4078 IN_REGS is nonzero if the argument will be passed in registers. It will
4079 never be set if REG_PARM_STACK_SPACE is not defined.
4081 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4082 for arguments which are passed in registers.
4084 FNDECL is the function in which the argument was defined.
4086 There are two types of rounding that are done. The first, controlled by
4087 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4088 argument list to be aligned to the specific boundary (in bits). This
4089 rounding affects the initial and starting offsets, but not the argument
4090 size.
4092 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4093 optionally rounds the size of the parm to PARM_BOUNDARY. The
4094 initial offset is not affected by this rounding, while the size always
4095 is and the starting offset may be. */
4097 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4098 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4099 callers pass in the total size of args so far as
4100 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4102 void
4103 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4104 int reg_parm_stack_space, int partial,
4105 tree fndecl ATTRIBUTE_UNUSED,
4106 struct args_size *initial_offset_ptr,
4107 struct locate_and_pad_arg_data *locate)
4109 tree sizetree;
4110 enum direction where_pad;
4111 unsigned int boundary, round_boundary;
4112 int part_size_in_regs;
4114 /* If we have found a stack parm before we reach the end of the
4115 area reserved for registers, skip that area. */
4116 if (! in_regs)
4118 if (reg_parm_stack_space > 0)
4120 if (initial_offset_ptr->var)
4122 initial_offset_ptr->var
4123 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4124 ssize_int (reg_parm_stack_space));
4125 initial_offset_ptr->constant = 0;
4127 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4128 initial_offset_ptr->constant = reg_parm_stack_space;
4132 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4134 sizetree
4135 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4136 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4137 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4138 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4139 type);
4140 locate->where_pad = where_pad;
4142 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4143 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4144 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4146 locate->boundary = boundary;
4148 if (SUPPORTS_STACK_ALIGNMENT)
4150 /* stack_alignment_estimated can't change after stack has been
4151 realigned. */
4152 if (crtl->stack_alignment_estimated < boundary)
4154 if (!crtl->stack_realign_processed)
4155 crtl->stack_alignment_estimated = boundary;
4156 else
4158 /* If stack is realigned and stack alignment value
4159 hasn't been finalized, it is OK not to increase
4160 stack_alignment_estimated. The bigger alignment
4161 requirement is recorded in stack_alignment_needed
4162 below. */
4163 gcc_assert (!crtl->stack_realign_finalized
4164 && crtl->stack_realign_needed);
4169 /* Remember if the outgoing parameter requires extra alignment on the
4170 calling function side. */
4171 if (crtl->stack_alignment_needed < boundary)
4172 crtl->stack_alignment_needed = boundary;
4173 if (crtl->preferred_stack_boundary < boundary)
4174 crtl->preferred_stack_boundary = boundary;
4176 if (ARGS_GROW_DOWNWARD)
4178 locate->slot_offset.constant = -initial_offset_ptr->constant;
4179 if (initial_offset_ptr->var)
4180 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4181 initial_offset_ptr->var);
4184 tree s2 = sizetree;
4185 if (where_pad != none
4186 && (!tree_fits_uhwi_p (sizetree)
4187 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4188 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4189 SUB_PARM_SIZE (locate->slot_offset, s2);
4192 locate->slot_offset.constant += part_size_in_regs;
4194 if (!in_regs || reg_parm_stack_space > 0)
4195 pad_to_arg_alignment (&locate->slot_offset, boundary,
4196 &locate->alignment_pad);
4198 locate->size.constant = (-initial_offset_ptr->constant
4199 - locate->slot_offset.constant);
4200 if (initial_offset_ptr->var)
4201 locate->size.var = size_binop (MINUS_EXPR,
4202 size_binop (MINUS_EXPR,
4203 ssize_int (0),
4204 initial_offset_ptr->var),
4205 locate->slot_offset.var);
4207 /* Pad_below needs the pre-rounded size to know how much to pad
4208 below. */
4209 locate->offset = locate->slot_offset;
4210 if (where_pad == downward)
4211 pad_below (&locate->offset, passed_mode, sizetree);
4214 else
4216 if (!in_regs || reg_parm_stack_space > 0)
4217 pad_to_arg_alignment (initial_offset_ptr, boundary,
4218 &locate->alignment_pad);
4219 locate->slot_offset = *initial_offset_ptr;
4221 #ifdef PUSH_ROUNDING
4222 if (passed_mode != BLKmode)
4223 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4224 #endif
4226 /* Pad_below needs the pre-rounded size to know how much to pad below
4227 so this must be done before rounding up. */
4228 locate->offset = locate->slot_offset;
4229 if (where_pad == downward)
4230 pad_below (&locate->offset, passed_mode, sizetree);
4232 if (where_pad != none
4233 && (!tree_fits_uhwi_p (sizetree)
4234 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4235 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4237 ADD_PARM_SIZE (locate->size, sizetree);
4239 locate->size.constant -= part_size_in_regs;
4242 #ifdef FUNCTION_ARG_OFFSET
4243 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4244 #endif
4247 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4248 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4250 static void
4251 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4252 struct args_size *alignment_pad)
4254 tree save_var = NULL_TREE;
4255 HOST_WIDE_INT save_constant = 0;
4256 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4257 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4259 #ifdef SPARC_STACK_BOUNDARY_HACK
4260 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4261 the real alignment of %sp. However, when it does this, the
4262 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4263 if (SPARC_STACK_BOUNDARY_HACK)
4264 sp_offset = 0;
4265 #endif
4267 if (boundary > PARM_BOUNDARY)
4269 save_var = offset_ptr->var;
4270 save_constant = offset_ptr->constant;
4273 alignment_pad->var = NULL_TREE;
4274 alignment_pad->constant = 0;
4276 if (boundary > BITS_PER_UNIT)
4278 if (offset_ptr->var)
4280 tree sp_offset_tree = ssize_int (sp_offset);
4281 tree offset = size_binop (PLUS_EXPR,
4282 ARGS_SIZE_TREE (*offset_ptr),
4283 sp_offset_tree);
4284 tree rounded;
4285 if (ARGS_GROW_DOWNWARD)
4286 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4287 else
4288 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4290 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4291 /* ARGS_SIZE_TREE includes constant term. */
4292 offset_ptr->constant = 0;
4293 if (boundary > PARM_BOUNDARY)
4294 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4295 save_var);
4297 else
4299 offset_ptr->constant = -sp_offset +
4300 (ARGS_GROW_DOWNWARD
4301 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4302 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4304 if (boundary > PARM_BOUNDARY)
4305 alignment_pad->constant = offset_ptr->constant - save_constant;
4310 static void
4311 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4313 if (passed_mode != BLKmode)
4315 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4316 offset_ptr->constant
4317 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4318 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4319 - GET_MODE_SIZE (passed_mode));
4321 else
4323 if (TREE_CODE (sizetree) != INTEGER_CST
4324 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4326 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4327 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4328 /* Add it in. */
4329 ADD_PARM_SIZE (*offset_ptr, s2);
4330 SUB_PARM_SIZE (*offset_ptr, sizetree);
4336 /* True if register REGNO was alive at a place where `setjmp' was
4337 called and was set more than once or is an argument. Such regs may
4338 be clobbered by `longjmp'. */
4340 static bool
4341 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4343 /* There appear to be cases where some local vars never reach the
4344 backend but have bogus regnos. */
4345 if (regno >= max_reg_num ())
4346 return false;
4348 return ((REG_N_SETS (regno) > 1
4349 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4350 regno))
4351 && REGNO_REG_SET_P (setjmp_crosses, regno));
4354 /* Walk the tree of blocks describing the binding levels within a
4355 function and warn about variables the might be killed by setjmp or
4356 vfork. This is done after calling flow_analysis before register
4357 allocation since that will clobber the pseudo-regs to hard
4358 regs. */
4360 static void
4361 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4363 tree decl, sub;
4365 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4367 if (TREE_CODE (decl) == VAR_DECL
4368 && DECL_RTL_SET_P (decl)
4369 && REG_P (DECL_RTL (decl))
4370 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4371 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4372 " %<longjmp%> or %<vfork%>", decl);
4375 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4376 setjmp_vars_warning (setjmp_crosses, sub);
4379 /* Do the appropriate part of setjmp_vars_warning
4380 but for arguments instead of local variables. */
4382 static void
4383 setjmp_args_warning (bitmap setjmp_crosses)
4385 tree decl;
4386 for (decl = DECL_ARGUMENTS (current_function_decl);
4387 decl; decl = DECL_CHAIN (decl))
4388 if (DECL_RTL (decl) != 0
4389 && REG_P (DECL_RTL (decl))
4390 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4391 warning (OPT_Wclobbered,
4392 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4393 decl);
4396 /* Generate warning messages for variables live across setjmp. */
4398 void
4399 generate_setjmp_warnings (void)
4401 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4403 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4404 || bitmap_empty_p (setjmp_crosses))
4405 return;
4407 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4408 setjmp_args_warning (setjmp_crosses);
4412 /* Reverse the order of elements in the fragment chain T of blocks,
4413 and return the new head of the chain (old last element).
4414 In addition to that clear BLOCK_SAME_RANGE flags when needed
4415 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4416 its super fragment origin. */
4418 static tree
4419 block_fragments_nreverse (tree t)
4421 tree prev = 0, block, next, prev_super = 0;
4422 tree super = BLOCK_SUPERCONTEXT (t);
4423 if (BLOCK_FRAGMENT_ORIGIN (super))
4424 super = BLOCK_FRAGMENT_ORIGIN (super);
4425 for (block = t; block; block = next)
4427 next = BLOCK_FRAGMENT_CHAIN (block);
4428 BLOCK_FRAGMENT_CHAIN (block) = prev;
4429 if ((prev && !BLOCK_SAME_RANGE (prev))
4430 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4431 != prev_super))
4432 BLOCK_SAME_RANGE (block) = 0;
4433 prev_super = BLOCK_SUPERCONTEXT (block);
4434 BLOCK_SUPERCONTEXT (block) = super;
4435 prev = block;
4437 t = BLOCK_FRAGMENT_ORIGIN (t);
4438 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4439 != prev_super)
4440 BLOCK_SAME_RANGE (t) = 0;
4441 BLOCK_SUPERCONTEXT (t) = super;
4442 return prev;
4445 /* Reverse the order of elements in the chain T of blocks,
4446 and return the new head of the chain (old last element).
4447 Also do the same on subblocks and reverse the order of elements
4448 in BLOCK_FRAGMENT_CHAIN as well. */
4450 static tree
4451 blocks_nreverse_all (tree t)
4453 tree prev = 0, block, next;
4454 for (block = t; block; block = next)
4456 next = BLOCK_CHAIN (block);
4457 BLOCK_CHAIN (block) = prev;
4458 if (BLOCK_FRAGMENT_CHAIN (block)
4459 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4461 BLOCK_FRAGMENT_CHAIN (block)
4462 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4463 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4464 BLOCK_SAME_RANGE (block) = 0;
4466 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4467 prev = block;
4469 return prev;
4473 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4474 and create duplicate blocks. */
4475 /* ??? Need an option to either create block fragments or to create
4476 abstract origin duplicates of a source block. It really depends
4477 on what optimization has been performed. */
4479 void
4480 reorder_blocks (void)
4482 tree block = DECL_INITIAL (current_function_decl);
4484 if (block == NULL_TREE)
4485 return;
4487 auto_vec<tree, 10> block_stack;
4489 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4490 clear_block_marks (block);
4492 /* Prune the old trees away, so that they don't get in the way. */
4493 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4494 BLOCK_CHAIN (block) = NULL_TREE;
4496 /* Recreate the block tree from the note nesting. */
4497 reorder_blocks_1 (get_insns (), block, &block_stack);
4498 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4501 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4503 void
4504 clear_block_marks (tree block)
4506 while (block)
4508 TREE_ASM_WRITTEN (block) = 0;
4509 clear_block_marks (BLOCK_SUBBLOCKS (block));
4510 block = BLOCK_CHAIN (block);
4514 static void
4515 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4516 vec<tree> *p_block_stack)
4518 rtx_insn *insn;
4519 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4521 for (insn = insns; insn; insn = NEXT_INSN (insn))
4523 if (NOTE_P (insn))
4525 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4527 tree block = NOTE_BLOCK (insn);
4528 tree origin;
4530 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4531 origin = block;
4533 if (prev_end)
4534 BLOCK_SAME_RANGE (prev_end) = 0;
4535 prev_end = NULL_TREE;
4537 /* If we have seen this block before, that means it now
4538 spans multiple address regions. Create a new fragment. */
4539 if (TREE_ASM_WRITTEN (block))
4541 tree new_block = copy_node (block);
4543 BLOCK_SAME_RANGE (new_block) = 0;
4544 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4545 BLOCK_FRAGMENT_CHAIN (new_block)
4546 = BLOCK_FRAGMENT_CHAIN (origin);
4547 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4549 NOTE_BLOCK (insn) = new_block;
4550 block = new_block;
4553 if (prev_beg == current_block && prev_beg)
4554 BLOCK_SAME_RANGE (block) = 1;
4556 prev_beg = origin;
4558 BLOCK_SUBBLOCKS (block) = 0;
4559 TREE_ASM_WRITTEN (block) = 1;
4560 /* When there's only one block for the entire function,
4561 current_block == block and we mustn't do this, it
4562 will cause infinite recursion. */
4563 if (block != current_block)
4565 tree super;
4566 if (block != origin)
4567 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4568 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4569 (origin))
4570 == current_block);
4571 if (p_block_stack->is_empty ())
4572 super = current_block;
4573 else
4575 super = p_block_stack->last ();
4576 gcc_assert (super == current_block
4577 || BLOCK_FRAGMENT_ORIGIN (super)
4578 == current_block);
4580 BLOCK_SUPERCONTEXT (block) = super;
4581 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4582 BLOCK_SUBBLOCKS (current_block) = block;
4583 current_block = origin;
4585 p_block_stack->safe_push (block);
4587 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4589 NOTE_BLOCK (insn) = p_block_stack->pop ();
4590 current_block = BLOCK_SUPERCONTEXT (current_block);
4591 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4592 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4593 prev_beg = NULL_TREE;
4594 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4595 ? NOTE_BLOCK (insn) : NULL_TREE;
4598 else
4600 prev_beg = NULL_TREE;
4601 if (prev_end)
4602 BLOCK_SAME_RANGE (prev_end) = 0;
4603 prev_end = NULL_TREE;
4608 /* Reverse the order of elements in the chain T of blocks,
4609 and return the new head of the chain (old last element). */
4611 tree
4612 blocks_nreverse (tree t)
4614 tree prev = 0, block, next;
4615 for (block = t; block; block = next)
4617 next = BLOCK_CHAIN (block);
4618 BLOCK_CHAIN (block) = prev;
4619 prev = block;
4621 return prev;
4624 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4625 by modifying the last node in chain 1 to point to chain 2. */
4627 tree
4628 block_chainon (tree op1, tree op2)
4630 tree t1;
4632 if (!op1)
4633 return op2;
4634 if (!op2)
4635 return op1;
4637 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4638 continue;
4639 BLOCK_CHAIN (t1) = op2;
4641 #ifdef ENABLE_TREE_CHECKING
4643 tree t2;
4644 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4645 gcc_assert (t2 != t1);
4647 #endif
4649 return op1;
4652 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4653 non-NULL, list them all into VECTOR, in a depth-first preorder
4654 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4655 blocks. */
4657 static int
4658 all_blocks (tree block, tree *vector)
4660 int n_blocks = 0;
4662 while (block)
4664 TREE_ASM_WRITTEN (block) = 0;
4666 /* Record this block. */
4667 if (vector)
4668 vector[n_blocks] = block;
4670 ++n_blocks;
4672 /* Record the subblocks, and their subblocks... */
4673 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4674 vector ? vector + n_blocks : 0);
4675 block = BLOCK_CHAIN (block);
4678 return n_blocks;
4681 /* Return a vector containing all the blocks rooted at BLOCK. The
4682 number of elements in the vector is stored in N_BLOCKS_P. The
4683 vector is dynamically allocated; it is the caller's responsibility
4684 to call `free' on the pointer returned. */
4686 static tree *
4687 get_block_vector (tree block, int *n_blocks_p)
4689 tree *block_vector;
4691 *n_blocks_p = all_blocks (block, NULL);
4692 block_vector = XNEWVEC (tree, *n_blocks_p);
4693 all_blocks (block, block_vector);
4695 return block_vector;
4698 static GTY(()) int next_block_index = 2;
4700 /* Set BLOCK_NUMBER for all the blocks in FN. */
4702 void
4703 number_blocks (tree fn)
4705 int i;
4706 int n_blocks;
4707 tree *block_vector;
4709 /* For SDB and XCOFF debugging output, we start numbering the blocks
4710 from 1 within each function, rather than keeping a running
4711 count. */
4712 #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO)
4713 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4714 next_block_index = 1;
4715 #endif
4717 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4719 /* The top-level BLOCK isn't numbered at all. */
4720 for (i = 1; i < n_blocks; ++i)
4721 /* We number the blocks from two. */
4722 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4724 free (block_vector);
4726 return;
4729 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4731 DEBUG_FUNCTION tree
4732 debug_find_var_in_block_tree (tree var, tree block)
4734 tree t;
4736 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4737 if (t == var)
4738 return block;
4740 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4742 tree ret = debug_find_var_in_block_tree (var, t);
4743 if (ret)
4744 return ret;
4747 return NULL_TREE;
4750 /* Keep track of whether we're in a dummy function context. If we are,
4751 we don't want to invoke the set_current_function hook, because we'll
4752 get into trouble if the hook calls target_reinit () recursively or
4753 when the initial initialization is not yet complete. */
4755 static bool in_dummy_function;
4757 /* Invoke the target hook when setting cfun. Update the optimization options
4758 if the function uses different options than the default. */
4760 static void
4761 invoke_set_current_function_hook (tree fndecl)
4763 if (!in_dummy_function)
4765 tree opts = ((fndecl)
4766 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4767 : optimization_default_node);
4769 if (!opts)
4770 opts = optimization_default_node;
4772 /* Change optimization options if needed. */
4773 if (optimization_current_node != opts)
4775 optimization_current_node = opts;
4776 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4779 targetm.set_current_function (fndecl);
4780 this_fn_optabs = this_target_optabs;
4782 if (opts != optimization_default_node)
4784 init_tree_optimization_optabs (opts);
4785 if (TREE_OPTIMIZATION_OPTABS (opts))
4786 this_fn_optabs = (struct target_optabs *)
4787 TREE_OPTIMIZATION_OPTABS (opts);
4792 /* cfun should never be set directly; use this function. */
4794 void
4795 set_cfun (struct function *new_cfun)
4797 if (cfun != new_cfun)
4799 cfun = new_cfun;
4800 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4804 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4806 static vec<function *> cfun_stack;
4808 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4809 current_function_decl accordingly. */
4811 void
4812 push_cfun (struct function *new_cfun)
4814 gcc_assert ((!cfun && !current_function_decl)
4815 || (cfun && current_function_decl == cfun->decl));
4816 cfun_stack.safe_push (cfun);
4817 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4818 set_cfun (new_cfun);
4821 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4823 void
4824 pop_cfun (void)
4826 struct function *new_cfun = cfun_stack.pop ();
4827 /* When in_dummy_function, we do have a cfun but current_function_decl is
4828 NULL. We also allow pushing NULL cfun and subsequently changing
4829 current_function_decl to something else and have both restored by
4830 pop_cfun. */
4831 gcc_checking_assert (in_dummy_function
4832 || !cfun
4833 || current_function_decl == cfun->decl);
4834 set_cfun (new_cfun);
4835 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4838 /* Return value of funcdef and increase it. */
4840 get_next_funcdef_no (void)
4842 return funcdef_no++;
4845 /* Return value of funcdef. */
4847 get_last_funcdef_no (void)
4849 return funcdef_no;
4852 /* Allocate a function structure for FNDECL and set its contents
4853 to the defaults. Set cfun to the newly-allocated object.
4854 Some of the helper functions invoked during initialization assume
4855 that cfun has already been set. Therefore, assign the new object
4856 directly into cfun and invoke the back end hook explicitly at the
4857 very end, rather than initializing a temporary and calling set_cfun
4858 on it.
4860 ABSTRACT_P is true if this is a function that will never be seen by
4861 the middle-end. Such functions are front-end concepts (like C++
4862 function templates) that do not correspond directly to functions
4863 placed in object files. */
4865 void
4866 allocate_struct_function (tree fndecl, bool abstract_p)
4868 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4870 cfun = ggc_cleared_alloc<function> ();
4872 init_eh_for_function ();
4874 if (init_machine_status)
4875 cfun->machine = (*init_machine_status) ();
4877 #ifdef OVERRIDE_ABI_FORMAT
4878 OVERRIDE_ABI_FORMAT (fndecl);
4879 #endif
4881 if (fndecl != NULL_TREE)
4883 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4884 cfun->decl = fndecl;
4885 current_function_funcdef_no = get_next_funcdef_no ();
4888 invoke_set_current_function_hook (fndecl);
4890 if (fndecl != NULL_TREE)
4892 tree result = DECL_RESULT (fndecl);
4894 if (!abstract_p)
4896 /* Now that we have activated any function-specific attributes
4897 that might affect layout, particularly vector modes, relayout
4898 each of the parameters and the result. */
4899 relayout_decl (result);
4900 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4901 parm = DECL_CHAIN (parm))
4902 relayout_decl (parm);
4904 /* Similarly relayout the function decl. */
4905 targetm.target_option.relayout_function (fndecl);
4908 if (!abstract_p && aggregate_value_p (result, fndecl))
4910 #ifdef PCC_STATIC_STRUCT_RETURN
4911 cfun->returns_pcc_struct = 1;
4912 #endif
4913 cfun->returns_struct = 1;
4916 cfun->stdarg = stdarg_p (fntype);
4918 /* Assume all registers in stdarg functions need to be saved. */
4919 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4920 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4922 /* ??? This could be set on a per-function basis by the front-end
4923 but is this worth the hassle? */
4924 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4925 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4927 if (!profile_flag && !flag_instrument_function_entry_exit)
4928 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4932 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4933 instead of just setting it. */
4935 void
4936 push_struct_function (tree fndecl)
4938 /* When in_dummy_function we might be in the middle of a pop_cfun and
4939 current_function_decl and cfun may not match. */
4940 gcc_assert (in_dummy_function
4941 || (!cfun && !current_function_decl)
4942 || (cfun && current_function_decl == cfun->decl));
4943 cfun_stack.safe_push (cfun);
4944 current_function_decl = fndecl;
4945 allocate_struct_function (fndecl, false);
4948 /* Reset crtl and other non-struct-function variables to defaults as
4949 appropriate for emitting rtl at the start of a function. */
4951 static void
4952 prepare_function_start (void)
4954 gcc_assert (!get_last_insn ());
4955 init_temp_slots ();
4956 init_emit ();
4957 init_varasm_status ();
4958 init_expr ();
4959 default_rtl_profile ();
4961 if (flag_stack_usage_info)
4963 cfun->su = ggc_cleared_alloc<stack_usage> ();
4964 cfun->su->static_stack_size = -1;
4967 cse_not_expected = ! optimize;
4969 /* Caller save not needed yet. */
4970 caller_save_needed = 0;
4972 /* We haven't done register allocation yet. */
4973 reg_renumber = 0;
4975 /* Indicate that we have not instantiated virtual registers yet. */
4976 virtuals_instantiated = 0;
4978 /* Indicate that we want CONCATs now. */
4979 generating_concat_p = 1;
4981 /* Indicate we have no need of a frame pointer yet. */
4982 frame_pointer_needed = 0;
4985 void
4986 push_dummy_function (bool with_decl)
4988 tree fn_decl, fn_type, fn_result_decl;
4990 gcc_assert (!in_dummy_function);
4991 in_dummy_function = true;
4993 if (with_decl)
4995 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4996 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4997 fn_type);
4998 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4999 NULL_TREE, void_type_node);
5000 DECL_RESULT (fn_decl) = fn_result_decl;
5002 else
5003 fn_decl = NULL_TREE;
5005 push_struct_function (fn_decl);
5008 /* Initialize the rtl expansion mechanism so that we can do simple things
5009 like generate sequences. This is used to provide a context during global
5010 initialization of some passes. You must call expand_dummy_function_end
5011 to exit this context. */
5013 void
5014 init_dummy_function_start (void)
5016 push_dummy_function (false);
5017 prepare_function_start ();
5020 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5021 and initialize static variables for generating RTL for the statements
5022 of the function. */
5024 void
5025 init_function_start (tree subr)
5027 /* Initialize backend, if needed. */
5028 initialize_rtl ();
5030 prepare_function_start ();
5031 decide_function_section (subr);
5033 /* Warn if this value is an aggregate type,
5034 regardless of which calling convention we are using for it. */
5035 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5036 warning (OPT_Waggregate_return, "function returns an aggregate");
5039 /* Expand code to verify the stack_protect_guard. This is invoked at
5040 the end of a function to be protected. */
5042 void
5043 stack_protect_epilogue (void)
5045 tree guard_decl = targetm.stack_protect_guard ();
5046 rtx_code_label *label = gen_label_rtx ();
5047 rtx x, y;
5048 rtx_insn *seq;
5050 x = expand_normal (crtl->stack_protect_guard);
5051 y = expand_normal (guard_decl);
5053 /* Allow the target to compare Y with X without leaking either into
5054 a register. */
5055 if (targetm.have_stack_protect_test ()
5056 && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5057 emit_insn (seq);
5058 else
5059 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5061 /* The noreturn predictor has been moved to the tree level. The rtl-level
5062 predictors estimate this branch about 20%, which isn't enough to get
5063 things moved out of line. Since this is the only extant case of adding
5064 a noreturn function at the rtl level, it doesn't seem worth doing ought
5065 except adding the prediction by hand. */
5066 rtx_insn *tmp = get_last_insn ();
5067 if (JUMP_P (tmp))
5068 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5070 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5071 free_temp_slots ();
5072 emit_label (label);
5075 /* Start the RTL for a new function, and set variables used for
5076 emitting RTL.
5077 SUBR is the FUNCTION_DECL node.
5078 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5079 the function's parameters, which must be run at any return statement. */
5081 void
5082 expand_function_start (tree subr)
5084 /* Make sure volatile mem refs aren't considered
5085 valid operands of arithmetic insns. */
5086 init_recog_no_volatile ();
5088 crtl->profile
5089 = (profile_flag
5090 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5092 crtl->limit_stack
5093 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5095 /* Make the label for return statements to jump to. Do not special
5096 case machines with special return instructions -- they will be
5097 handled later during jump, ifcvt, or epilogue creation. */
5098 return_label = gen_label_rtx ();
5100 /* Initialize rtx used to return the value. */
5101 /* Do this before assign_parms so that we copy the struct value address
5102 before any library calls that assign parms might generate. */
5104 /* Decide whether to return the value in memory or in a register. */
5105 tree res = DECL_RESULT (subr);
5106 if (aggregate_value_p (res, subr))
5108 /* Returning something that won't go in a register. */
5109 rtx value_address = 0;
5111 #ifdef PCC_STATIC_STRUCT_RETURN
5112 if (cfun->returns_pcc_struct)
5114 int size = int_size_in_bytes (TREE_TYPE (res));
5115 value_address = assemble_static_space (size);
5117 else
5118 #endif
5120 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5121 /* Expect to be passed the address of a place to store the value.
5122 If it is passed as an argument, assign_parms will take care of
5123 it. */
5124 if (sv)
5126 value_address = gen_reg_rtx (Pmode);
5127 emit_move_insn (value_address, sv);
5130 if (value_address)
5132 rtx x = value_address;
5133 if (!DECL_BY_REFERENCE (res))
5135 x = gen_rtx_MEM (DECL_MODE (res), x);
5136 set_mem_attributes (x, res, 1);
5138 set_parm_rtl (res, x);
5141 else if (DECL_MODE (res) == VOIDmode)
5142 /* If return mode is void, this decl rtl should not be used. */
5143 set_parm_rtl (res, NULL_RTX);
5144 else
5146 /* Compute the return values into a pseudo reg, which we will copy
5147 into the true return register after the cleanups are done. */
5148 tree return_type = TREE_TYPE (res);
5149 /* If we may coalesce this result, make sure it has the expected
5150 mode. */
5151 if (flag_tree_coalesce_vars && is_gimple_reg (res))
5153 tree def = ssa_default_def (cfun, res);
5154 gcc_assert (def);
5155 machine_mode mode = promote_ssa_mode (def, NULL);
5156 set_parm_rtl (res, gen_reg_rtx (mode));
5158 else if (TYPE_MODE (return_type) != BLKmode
5159 && targetm.calls.return_in_msb (return_type))
5160 /* expand_function_end will insert the appropriate padding in
5161 this case. Use the return value's natural (unpadded) mode
5162 within the function proper. */
5163 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5164 else
5166 /* In order to figure out what mode to use for the pseudo, we
5167 figure out what the mode of the eventual return register will
5168 actually be, and use that. */
5169 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5171 /* Structures that are returned in registers are not
5172 aggregate_value_p, so we may see a PARALLEL or a REG. */
5173 if (REG_P (hard_reg))
5174 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5175 else
5177 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5178 set_parm_rtl (res, gen_group_rtx (hard_reg));
5182 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5183 result to the real return register(s). */
5184 DECL_REGISTER (res) = 1;
5186 if (chkp_function_instrumented_p (current_function_decl))
5188 tree return_type = TREE_TYPE (res);
5189 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5190 subr, 1);
5191 SET_DECL_BOUNDS_RTL (res, bounds);
5195 /* Initialize rtx for parameters and local variables.
5196 In some cases this requires emitting insns. */
5197 assign_parms (subr);
5199 /* If function gets a static chain arg, store it. */
5200 if (cfun->static_chain_decl)
5202 tree parm = cfun->static_chain_decl;
5203 rtx local, chain;
5204 rtx_insn *insn;
5205 int unsignedp;
5207 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5208 chain = targetm.calls.static_chain (current_function_decl, true);
5210 set_decl_incoming_rtl (parm, chain, false);
5211 set_parm_rtl (parm, local);
5212 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5214 if (GET_MODE (local) != GET_MODE (chain))
5216 convert_move (local, chain, unsignedp);
5217 insn = get_last_insn ();
5219 else
5220 insn = emit_move_insn (local, chain);
5222 /* Mark the register as eliminable, similar to parameters. */
5223 if (MEM_P (chain)
5224 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5225 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5227 /* If we aren't optimizing, save the static chain onto the stack. */
5228 if (!optimize)
5230 tree saved_static_chain_decl
5231 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5232 DECL_NAME (parm), TREE_TYPE (parm));
5233 rtx saved_static_chain_rtx
5234 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5235 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5236 emit_move_insn (saved_static_chain_rtx, chain);
5237 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5238 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5242 /* If the function receives a non-local goto, then store the
5243 bits we need to restore the frame pointer. */
5244 if (cfun->nonlocal_goto_save_area)
5246 tree t_save;
5247 rtx r_save;
5249 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5250 gcc_assert (DECL_RTL_SET_P (var));
5252 t_save = build4 (ARRAY_REF,
5253 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5254 cfun->nonlocal_goto_save_area,
5255 integer_zero_node, NULL_TREE, NULL_TREE);
5256 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5257 gcc_assert (GET_MODE (r_save) == Pmode);
5259 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5260 update_nonlocal_goto_save_area ();
5263 /* The following was moved from init_function_start.
5264 The move is supposed to make sdb output more accurate. */
5265 /* Indicate the beginning of the function body,
5266 as opposed to parm setup. */
5267 emit_note (NOTE_INSN_FUNCTION_BEG);
5269 gcc_assert (NOTE_P (get_last_insn ()));
5271 parm_birth_insn = get_last_insn ();
5273 if (crtl->profile)
5275 #ifdef PROFILE_HOOK
5276 PROFILE_HOOK (current_function_funcdef_no);
5277 #endif
5280 /* If we are doing generic stack checking, the probe should go here. */
5281 if (flag_stack_check == GENERIC_STACK_CHECK)
5282 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5285 void
5286 pop_dummy_function (void)
5288 pop_cfun ();
5289 in_dummy_function = false;
5292 /* Undo the effects of init_dummy_function_start. */
5293 void
5294 expand_dummy_function_end (void)
5296 gcc_assert (in_dummy_function);
5298 /* End any sequences that failed to be closed due to syntax errors. */
5299 while (in_sequence_p ())
5300 end_sequence ();
5302 /* Outside function body, can't compute type's actual size
5303 until next function's body starts. */
5305 free_after_parsing (cfun);
5306 free_after_compilation (cfun);
5307 pop_dummy_function ();
5310 /* Helper for diddle_return_value. */
5312 void
5313 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5315 if (! outgoing)
5316 return;
5318 if (REG_P (outgoing))
5319 (*doit) (outgoing, arg);
5320 else if (GET_CODE (outgoing) == PARALLEL)
5322 int i;
5324 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5326 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5328 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5329 (*doit) (x, arg);
5334 /* Call DOIT for each hard register used as a return value from
5335 the current function. */
5337 void
5338 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5340 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5341 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5344 static void
5345 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5347 emit_clobber (reg);
5350 void
5351 clobber_return_register (void)
5353 diddle_return_value (do_clobber_return_reg, NULL);
5355 /* In case we do use pseudo to return value, clobber it too. */
5356 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5358 tree decl_result = DECL_RESULT (current_function_decl);
5359 rtx decl_rtl = DECL_RTL (decl_result);
5360 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5362 do_clobber_return_reg (decl_rtl, NULL);
5367 static void
5368 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5370 emit_use (reg);
5373 static void
5374 use_return_register (void)
5376 diddle_return_value (do_use_return_reg, NULL);
5379 /* Set the location of the insn chain starting at INSN to LOC. */
5381 static void
5382 set_insn_locations (rtx_insn *insn, int loc)
5384 while (insn != NULL)
5386 if (INSN_P (insn))
5387 INSN_LOCATION (insn) = loc;
5388 insn = NEXT_INSN (insn);
5392 /* Generate RTL for the end of the current function. */
5394 void
5395 expand_function_end (void)
5397 /* If arg_pointer_save_area was referenced only from a nested
5398 function, we will not have initialized it yet. Do that now. */
5399 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5400 get_arg_pointer_save_area ();
5402 /* If we are doing generic stack checking and this function makes calls,
5403 do a stack probe at the start of the function to ensure we have enough
5404 space for another stack frame. */
5405 if (flag_stack_check == GENERIC_STACK_CHECK)
5407 rtx_insn *insn, *seq;
5409 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5410 if (CALL_P (insn))
5412 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5413 start_sequence ();
5414 if (STACK_CHECK_MOVING_SP)
5415 anti_adjust_stack_and_probe (max_frame_size, true);
5416 else
5417 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5418 seq = get_insns ();
5419 end_sequence ();
5420 set_insn_locations (seq, prologue_location);
5421 emit_insn_before (seq, stack_check_probe_note);
5422 break;
5426 /* End any sequences that failed to be closed due to syntax errors. */
5427 while (in_sequence_p ())
5428 end_sequence ();
5430 clear_pending_stack_adjust ();
5431 do_pending_stack_adjust ();
5433 /* Output a linenumber for the end of the function.
5434 SDB depends on this. */
5435 set_curr_insn_location (input_location);
5437 /* Before the return label (if any), clobber the return
5438 registers so that they are not propagated live to the rest of
5439 the function. This can only happen with functions that drop
5440 through; if there had been a return statement, there would
5441 have either been a return rtx, or a jump to the return label.
5443 We delay actual code generation after the current_function_value_rtx
5444 is computed. */
5445 rtx_insn *clobber_after = get_last_insn ();
5447 /* Output the label for the actual return from the function. */
5448 emit_label (return_label);
5450 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5452 /* Let except.c know where it should emit the call to unregister
5453 the function context for sjlj exceptions. */
5454 if (flag_exceptions)
5455 sjlj_emit_function_exit_after (get_last_insn ());
5457 else
5459 /* We want to ensure that instructions that may trap are not
5460 moved into the epilogue by scheduling, because we don't
5461 always emit unwind information for the epilogue. */
5462 if (cfun->can_throw_non_call_exceptions)
5463 emit_insn (gen_blockage ());
5466 /* If this is an implementation of throw, do what's necessary to
5467 communicate between __builtin_eh_return and the epilogue. */
5468 expand_eh_return ();
5470 /* If scalar return value was computed in a pseudo-reg, or was a named
5471 return value that got dumped to the stack, copy that to the hard
5472 return register. */
5473 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5475 tree decl_result = DECL_RESULT (current_function_decl);
5476 rtx decl_rtl = DECL_RTL (decl_result);
5478 if (REG_P (decl_rtl)
5479 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5480 : DECL_REGISTER (decl_result))
5482 rtx real_decl_rtl = crtl->return_rtx;
5484 /* This should be set in assign_parms. */
5485 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5487 /* If this is a BLKmode structure being returned in registers,
5488 then use the mode computed in expand_return. Note that if
5489 decl_rtl is memory, then its mode may have been changed,
5490 but that crtl->return_rtx has not. */
5491 if (GET_MODE (real_decl_rtl) == BLKmode)
5492 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5494 /* If a non-BLKmode return value should be padded at the least
5495 significant end of the register, shift it left by the appropriate
5496 amount. BLKmode results are handled using the group load/store
5497 machinery. */
5498 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5499 && REG_P (real_decl_rtl)
5500 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5502 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5503 REGNO (real_decl_rtl)),
5504 decl_rtl);
5505 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5507 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5509 /* If expand_function_start has created a PARALLEL for decl_rtl,
5510 move the result to the real return registers. Otherwise, do
5511 a group load from decl_rtl for a named return. */
5512 if (GET_CODE (decl_rtl) == PARALLEL)
5513 emit_group_move (real_decl_rtl, decl_rtl);
5514 else
5515 emit_group_load (real_decl_rtl, decl_rtl,
5516 TREE_TYPE (decl_result),
5517 int_size_in_bytes (TREE_TYPE (decl_result)));
5519 /* In the case of complex integer modes smaller than a word, we'll
5520 need to generate some non-trivial bitfield insertions. Do that
5521 on a pseudo and not the hard register. */
5522 else if (GET_CODE (decl_rtl) == CONCAT
5523 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5524 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5526 int old_generating_concat_p;
5527 rtx tmp;
5529 old_generating_concat_p = generating_concat_p;
5530 generating_concat_p = 0;
5531 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5532 generating_concat_p = old_generating_concat_p;
5534 emit_move_insn (tmp, decl_rtl);
5535 emit_move_insn (real_decl_rtl, tmp);
5537 /* If a named return value dumped decl_return to memory, then
5538 we may need to re-do the PROMOTE_MODE signed/unsigned
5539 extension. */
5540 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5542 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5543 promote_function_mode (TREE_TYPE (decl_result),
5544 GET_MODE (decl_rtl), &unsignedp,
5545 TREE_TYPE (current_function_decl), 1);
5547 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5549 else
5550 emit_move_insn (real_decl_rtl, decl_rtl);
5554 /* If returning a structure, arrange to return the address of the value
5555 in a place where debuggers expect to find it.
5557 If returning a structure PCC style,
5558 the caller also depends on this value.
5559 And cfun->returns_pcc_struct is not necessarily set. */
5560 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5561 && !targetm.calls.omit_struct_return_reg)
5563 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5564 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5565 rtx outgoing;
5567 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5568 type = TREE_TYPE (type);
5569 else
5570 value_address = XEXP (value_address, 0);
5572 outgoing = targetm.calls.function_value (build_pointer_type (type),
5573 current_function_decl, true);
5575 /* Mark this as a function return value so integrate will delete the
5576 assignment and USE below when inlining this function. */
5577 REG_FUNCTION_VALUE_P (outgoing) = 1;
5579 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5580 value_address = convert_memory_address (GET_MODE (outgoing),
5581 value_address);
5583 emit_move_insn (outgoing, value_address);
5585 /* Show return register used to hold result (in this case the address
5586 of the result. */
5587 crtl->return_rtx = outgoing;
5590 /* Emit the actual code to clobber return register. Don't emit
5591 it if clobber_after is a barrier, then the previous basic block
5592 certainly doesn't fall thru into the exit block. */
5593 if (!BARRIER_P (clobber_after))
5595 start_sequence ();
5596 clobber_return_register ();
5597 rtx_insn *seq = get_insns ();
5598 end_sequence ();
5600 emit_insn_after (seq, clobber_after);
5603 /* Output the label for the naked return from the function. */
5604 if (naked_return_label)
5605 emit_label (naked_return_label);
5607 /* @@@ This is a kludge. We want to ensure that instructions that
5608 may trap are not moved into the epilogue by scheduling, because
5609 we don't always emit unwind information for the epilogue. */
5610 if (cfun->can_throw_non_call_exceptions
5611 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5612 emit_insn (gen_blockage ());
5614 /* If stack protection is enabled for this function, check the guard. */
5615 if (crtl->stack_protect_guard)
5616 stack_protect_epilogue ();
5618 /* If we had calls to alloca, and this machine needs
5619 an accurate stack pointer to exit the function,
5620 insert some code to save and restore the stack pointer. */
5621 if (! EXIT_IGNORE_STACK
5622 && cfun->calls_alloca)
5624 rtx tem = 0;
5626 start_sequence ();
5627 emit_stack_save (SAVE_FUNCTION, &tem);
5628 rtx_insn *seq = get_insns ();
5629 end_sequence ();
5630 emit_insn_before (seq, parm_birth_insn);
5632 emit_stack_restore (SAVE_FUNCTION, tem);
5635 /* ??? This should no longer be necessary since stupid is no longer with
5636 us, but there are some parts of the compiler (eg reload_combine, and
5637 sh mach_dep_reorg) that still try and compute their own lifetime info
5638 instead of using the general framework. */
5639 use_return_register ();
5643 get_arg_pointer_save_area (void)
5645 rtx ret = arg_pointer_save_area;
5647 if (! ret)
5649 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5650 arg_pointer_save_area = ret;
5653 if (! crtl->arg_pointer_save_area_init)
5655 /* Save the arg pointer at the beginning of the function. The
5656 generated stack slot may not be a valid memory address, so we
5657 have to check it and fix it if necessary. */
5658 start_sequence ();
5659 emit_move_insn (validize_mem (copy_rtx (ret)),
5660 crtl->args.internal_arg_pointer);
5661 rtx_insn *seq = get_insns ();
5662 end_sequence ();
5664 push_topmost_sequence ();
5665 emit_insn_after (seq, entry_of_function ());
5666 pop_topmost_sequence ();
5668 crtl->arg_pointer_save_area_init = true;
5671 return ret;
5674 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5675 for the first time. */
5677 static void
5678 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5680 rtx_insn *tmp;
5681 hash_table<insn_cache_hasher> *hash = *hashp;
5683 if (hash == NULL)
5684 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5686 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5688 rtx *slot = hash->find_slot (tmp, INSERT);
5689 gcc_assert (*slot == NULL);
5690 *slot = tmp;
5694 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5695 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5696 insn, then record COPY as well. */
5698 void
5699 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5701 hash_table<insn_cache_hasher> *hash;
5702 rtx *slot;
5704 hash = epilogue_insn_hash;
5705 if (!hash || !hash->find (insn))
5707 hash = prologue_insn_hash;
5708 if (!hash || !hash->find (insn))
5709 return;
5712 slot = hash->find_slot (copy, INSERT);
5713 gcc_assert (*slot == NULL);
5714 *slot = copy;
5717 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5718 we can be running after reorg, SEQUENCE rtl is possible. */
5720 static bool
5721 contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
5723 if (hash == NULL)
5724 return false;
5726 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5728 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5729 int i;
5730 for (i = seq->len () - 1; i >= 0; i--)
5731 if (hash->find (seq->element (i)))
5732 return true;
5733 return false;
5736 return hash->find (const_cast<rtx> (insn)) != NULL;
5740 prologue_epilogue_contains (const_rtx insn)
5742 if (contains (insn, prologue_insn_hash))
5743 return 1;
5744 if (contains (insn, epilogue_insn_hash))
5745 return 1;
5746 return 0;
5749 /* Insert use of return register before the end of BB. */
5751 static void
5752 emit_use_return_register_into_block (basic_block bb)
5754 start_sequence ();
5755 use_return_register ();
5756 rtx_insn *seq = get_insns ();
5757 end_sequence ();
5758 rtx_insn *insn = BB_END (bb);
5759 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5760 insn = prev_cc0_setter (insn);
5762 emit_insn_before (seq, insn);
5766 /* Create a return pattern, either simple_return or return, depending on
5767 simple_p. */
5769 static rtx_insn *
5770 gen_return_pattern (bool simple_p)
5772 return (simple_p
5773 ? targetm.gen_simple_return ()
5774 : targetm.gen_return ());
5777 /* Insert an appropriate return pattern at the end of block BB. This
5778 also means updating block_for_insn appropriately. SIMPLE_P is
5779 the same as in gen_return_pattern and passed to it. */
5781 void
5782 emit_return_into_block (bool simple_p, basic_block bb)
5784 rtx_jump_insn *jump = emit_jump_insn_after (gen_return_pattern (simple_p),
5785 BB_END (bb));
5786 rtx pat = PATTERN (jump);
5787 if (GET_CODE (pat) == PARALLEL)
5788 pat = XVECEXP (pat, 0, 0);
5789 gcc_assert (ANY_RETURN_P (pat));
5790 JUMP_LABEL (jump) = pat;
5793 /* Set JUMP_LABEL for a return insn. */
5795 void
5796 set_return_jump_label (rtx_insn *returnjump)
5798 rtx pat = PATTERN (returnjump);
5799 if (GET_CODE (pat) == PARALLEL)
5800 pat = XVECEXP (pat, 0, 0);
5801 if (ANY_RETURN_P (pat))
5802 JUMP_LABEL (returnjump) = pat;
5803 else
5804 JUMP_LABEL (returnjump) = ret_rtx;
5807 /* Return true if there are any active insns between HEAD and TAIL. */
5808 bool
5809 active_insn_between (rtx_insn *head, rtx_insn *tail)
5811 while (tail)
5813 if (active_insn_p (tail))
5814 return true;
5815 if (tail == head)
5816 return false;
5817 tail = PREV_INSN (tail);
5819 return false;
5822 /* LAST_BB is a block that exits, and empty of active instructions.
5823 Examine its predecessors for jumps that can be converted to
5824 (conditional) returns. */
5825 vec<edge>
5826 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5827 vec<edge> unconverted ATTRIBUTE_UNUSED)
5829 int i;
5830 basic_block bb;
5831 edge_iterator ei;
5832 edge e;
5833 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5835 FOR_EACH_EDGE (e, ei, last_bb->preds)
5836 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5837 src_bbs.quick_push (e->src);
5839 rtx_insn *label = BB_HEAD (last_bb);
5841 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5843 rtx_insn *jump = BB_END (bb);
5845 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5846 continue;
5848 e = find_edge (bb, last_bb);
5850 /* If we have an unconditional jump, we can replace that
5851 with a simple return instruction. */
5852 if (simplejump_p (jump))
5854 /* The use of the return register might be present in the exit
5855 fallthru block. Either:
5856 - removing the use is safe, and we should remove the use in
5857 the exit fallthru block, or
5858 - removing the use is not safe, and we should add it here.
5859 For now, we conservatively choose the latter. Either of the
5860 2 helps in crossjumping. */
5861 emit_use_return_register_into_block (bb);
5863 emit_return_into_block (simple_p, bb);
5864 delete_insn (jump);
5867 /* If we have a conditional jump branching to the last
5868 block, we can try to replace that with a conditional
5869 return instruction. */
5870 else if (condjump_p (jump))
5872 rtx dest;
5874 if (simple_p)
5875 dest = simple_return_rtx;
5876 else
5877 dest = ret_rtx;
5878 if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
5880 if (targetm.have_simple_return () && simple_p)
5882 if (dump_file)
5883 fprintf (dump_file,
5884 "Failed to redirect bb %d branch.\n", bb->index);
5885 unconverted.safe_push (e);
5887 continue;
5890 /* See comment in simplejump_p case above. */
5891 emit_use_return_register_into_block (bb);
5893 /* If this block has only one successor, it both jumps
5894 and falls through to the fallthru block, so we can't
5895 delete the edge. */
5896 if (single_succ_p (bb))
5897 continue;
5899 else
5901 if (targetm.have_simple_return () && simple_p)
5903 if (dump_file)
5904 fprintf (dump_file,
5905 "Failed to redirect bb %d branch.\n", bb->index);
5906 unconverted.safe_push (e);
5908 continue;
5911 /* Fix up the CFG for the successful change we just made. */
5912 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5913 e->flags &= ~EDGE_CROSSING;
5915 src_bbs.release ();
5916 return unconverted;
5919 /* Emit a return insn for the exit fallthru block. */
5920 basic_block
5921 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5923 basic_block last_bb = exit_fallthru_edge->src;
5925 if (JUMP_P (BB_END (last_bb)))
5927 last_bb = split_edge (exit_fallthru_edge);
5928 exit_fallthru_edge = single_succ_edge (last_bb);
5930 emit_barrier_after (BB_END (last_bb));
5931 emit_return_into_block (simple_p, last_bb);
5932 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5933 return last_bb;
5937 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5938 this into place with notes indicating where the prologue ends and where
5939 the epilogue begins. Update the basic block information when possible.
5941 Notes on epilogue placement:
5942 There are several kinds of edges to the exit block:
5943 * a single fallthru edge from LAST_BB
5944 * possibly, edges from blocks containing sibcalls
5945 * possibly, fake edges from infinite loops
5947 The epilogue is always emitted on the fallthru edge from the last basic
5948 block in the function, LAST_BB, into the exit block.
5950 If LAST_BB is empty except for a label, it is the target of every
5951 other basic block in the function that ends in a return. If a
5952 target has a return or simple_return pattern (possibly with
5953 conditional variants), these basic blocks can be changed so that a
5954 return insn is emitted into them, and their target is adjusted to
5955 the real exit block.
5957 Notes on shrink wrapping: We implement a fairly conservative
5958 version of shrink-wrapping rather than the textbook one. We only
5959 generate a single prologue and a single epilogue. This is
5960 sufficient to catch a number of interesting cases involving early
5961 exits.
5963 First, we identify the blocks that require the prologue to occur before
5964 them. These are the ones that modify a call-saved register, or reference
5965 any of the stack or frame pointer registers. To simplify things, we then
5966 mark everything reachable from these blocks as also requiring a prologue.
5967 This takes care of loops automatically, and avoids the need to examine
5968 whether MEMs reference the frame, since it is sufficient to check for
5969 occurrences of the stack or frame pointer.
5971 We then compute the set of blocks for which the need for a prologue
5972 is anticipatable (borrowing terminology from the shrink-wrapping
5973 description in Muchnick's book). These are the blocks which either
5974 require a prologue themselves, or those that have only successors
5975 where the prologue is anticipatable. The prologue needs to be
5976 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5977 is not. For the moment, we ensure that only one such edge exists.
5979 The epilogue is placed as described above, but we make a
5980 distinction between inserting return and simple_return patterns
5981 when modifying other blocks that end in a return. Blocks that end
5982 in a sibcall omit the sibcall_epilogue if the block is not in
5983 ANTIC. */
5985 void
5986 thread_prologue_and_epilogue_insns (void)
5988 bool inserted;
5989 vec<edge> unconverted_simple_returns = vNULL;
5990 bitmap_head bb_flags;
5991 rtx_insn *returnjump;
5992 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
5993 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
5994 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5995 edge_iterator ei;
5997 df_analyze ();
5999 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6001 inserted = false;
6002 epilogue_end = NULL;
6003 returnjump = NULL;
6005 /* Can't deal with multiple successors of the entry block at the
6006 moment. Function should always have at least one entry
6007 point. */
6008 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6009 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6010 orig_entry_edge = entry_edge;
6012 split_prologue_seq = NULL;
6013 if (flag_split_stack
6014 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
6015 == NULL))
6017 start_sequence ();
6018 emit_insn (targetm.gen_split_stack_prologue ());
6019 split_prologue_seq = get_insns ();
6020 end_sequence ();
6022 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
6023 set_insn_locations (split_prologue_seq, prologue_location);
6026 prologue_seq = NULL;
6027 if (targetm.have_prologue ())
6029 start_sequence ();
6030 rtx_insn *seq = targetm.gen_prologue ();
6031 emit_insn (seq);
6033 /* Insert an explicit USE for the frame pointer
6034 if the profiling is on and the frame pointer is required. */
6035 if (crtl->profile && frame_pointer_needed)
6036 emit_use (hard_frame_pointer_rtx);
6038 /* Retain a map of the prologue insns. */
6039 record_insns (seq, NULL, &prologue_insn_hash);
6040 emit_note (NOTE_INSN_PROLOGUE_END);
6042 /* Ensure that instructions are not moved into the prologue when
6043 profiling is on. The call to the profiling routine can be
6044 emitted within the live range of a call-clobbered register. */
6045 if (!targetm.profile_before_prologue () && crtl->profile)
6046 emit_insn (gen_blockage ());
6048 prologue_seq = get_insns ();
6049 end_sequence ();
6050 set_insn_locations (prologue_seq, prologue_location);
6053 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
6055 /* Try to perform a kind of shrink-wrapping, making sure the
6056 prologue/epilogue is emitted only around those parts of the
6057 function that require it. */
6059 try_shrink_wrapping (&entry_edge, &bb_flags, prologue_seq);
6061 if (split_prologue_seq != NULL_RTX)
6063 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6064 inserted = true;
6066 if (prologue_seq != NULL_RTX)
6068 insert_insn_on_edge (prologue_seq, entry_edge);
6069 inserted = true;
6072 /* If the exit block has no non-fake predecessors, we don't need
6073 an epilogue. */
6074 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6075 if ((e->flags & EDGE_FAKE) == 0)
6076 break;
6077 if (e == NULL)
6078 goto epilogue_done;
6080 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6082 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6084 if (targetm.have_simple_return () && entry_edge != orig_entry_edge)
6085 exit_fallthru_edge
6086 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
6087 &unconverted_simple_returns,
6088 &returnjump);
6089 if (targetm.have_return ())
6091 if (exit_fallthru_edge == NULL)
6092 goto epilogue_done;
6094 if (optimize)
6096 basic_block last_bb = exit_fallthru_edge->src;
6098 if (LABEL_P (BB_HEAD (last_bb))
6099 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6100 convert_jumps_to_returns (last_bb, false, vNULL);
6102 if (EDGE_COUNT (last_bb->preds) != 0
6103 && single_succ_p (last_bb))
6105 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6106 epilogue_end = returnjump = BB_END (last_bb);
6108 /* Emitting the return may add a basic block.
6109 Fix bb_flags for the added block. */
6110 if (targetm.have_simple_return ()
6111 && last_bb != exit_fallthru_edge->src)
6112 bitmap_set_bit (&bb_flags, last_bb->index);
6114 goto epilogue_done;
6119 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6120 this marker for the splits of EH_RETURN patterns, and nothing else
6121 uses the flag in the meantime. */
6122 epilogue_completed = 1;
6124 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6125 some targets, these get split to a special version of the epilogue
6126 code. In order to be able to properly annotate these with unwind
6127 info, try to split them now. If we get a valid split, drop an
6128 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6129 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6131 rtx_insn *prev, *last, *trial;
6133 if (e->flags & EDGE_FALLTHRU)
6134 continue;
6135 last = BB_END (e->src);
6136 if (!eh_returnjump_p (last))
6137 continue;
6139 prev = PREV_INSN (last);
6140 trial = try_split (PATTERN (last), last, 1);
6141 if (trial == last)
6142 continue;
6144 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6145 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6148 /* If nothing falls through into the exit block, we don't need an
6149 epilogue. */
6151 if (exit_fallthru_edge == NULL)
6152 goto epilogue_done;
6154 if (targetm.have_epilogue ())
6156 start_sequence ();
6157 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6158 rtx_insn *seq = targetm.gen_epilogue ();
6159 if (seq)
6160 emit_jump_insn (seq);
6162 /* Retain a map of the epilogue insns. */
6163 record_insns (seq, NULL, &epilogue_insn_hash);
6164 set_insn_locations (seq, epilogue_location);
6166 seq = get_insns ();
6167 returnjump = get_last_insn ();
6168 end_sequence ();
6170 insert_insn_on_edge (seq, exit_fallthru_edge);
6171 inserted = true;
6173 if (JUMP_P (returnjump))
6174 set_return_jump_label (returnjump);
6176 else
6178 basic_block cur_bb;
6180 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6181 goto epilogue_done;
6182 /* We have a fall-through edge to the exit block, the source is not
6183 at the end of the function, and there will be an assembler epilogue
6184 at the end of the function.
6185 We can't use force_nonfallthru here, because that would try to
6186 use return. Inserting a jump 'by hand' is extremely messy, so
6187 we take advantage of cfg_layout_finalize using
6188 fixup_fallthru_exit_predecessor. */
6189 cfg_layout_initialize (0);
6190 FOR_EACH_BB_FN (cur_bb, cfun)
6191 if (cur_bb->index >= NUM_FIXED_BLOCKS
6192 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6193 cur_bb->aux = cur_bb->next_bb;
6194 cfg_layout_finalize ();
6197 epilogue_done:
6199 default_rtl_profile ();
6201 if (inserted)
6203 sbitmap blocks;
6205 commit_edge_insertions ();
6207 /* Look for basic blocks within the prologue insns. */
6208 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6209 bitmap_clear (blocks);
6210 bitmap_set_bit (blocks, entry_edge->dest->index);
6211 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6212 find_many_sub_basic_blocks (blocks);
6213 sbitmap_free (blocks);
6215 /* The epilogue insns we inserted may cause the exit edge to no longer
6216 be fallthru. */
6217 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6219 if (((e->flags & EDGE_FALLTHRU) != 0)
6220 && returnjump_p (BB_END (e->src)))
6221 e->flags &= ~EDGE_FALLTHRU;
6225 if (targetm.have_simple_return ())
6226 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6227 returnjump, unconverted_simple_returns);
6229 /* Emit sibling epilogues before any sibling call sites. */
6230 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6231 ei_safe_edge (ei));
6234 basic_block bb = e->src;
6235 rtx_insn *insn = BB_END (bb);
6237 if (!CALL_P (insn)
6238 || ! SIBLING_CALL_P (insn)
6239 || (targetm.have_simple_return ()
6240 && entry_edge != orig_entry_edge
6241 && !bitmap_bit_p (&bb_flags, bb->index)))
6243 ei_next (&ei);
6244 continue;
6247 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6249 start_sequence ();
6250 emit_note (NOTE_INSN_EPILOGUE_BEG);
6251 emit_insn (ep_seq);
6252 rtx_insn *seq = get_insns ();
6253 end_sequence ();
6255 /* Retain a map of the epilogue insns. Used in life analysis to
6256 avoid getting rid of sibcall epilogue insns. Do this before we
6257 actually emit the sequence. */
6258 record_insns (seq, NULL, &epilogue_insn_hash);
6259 set_insn_locations (seq, epilogue_location);
6261 emit_insn_before (seq, insn);
6263 ei_next (&ei);
6266 if (epilogue_end)
6268 rtx_insn *insn, *next;
6270 /* Similarly, move any line notes that appear after the epilogue.
6271 There is no need, however, to be quite so anal about the existence
6272 of such a note. Also possibly move
6273 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6274 info generation. */
6275 for (insn = epilogue_end; insn; insn = next)
6277 next = NEXT_INSN (insn);
6278 if (NOTE_P (insn)
6279 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6280 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6284 bitmap_clear (&bb_flags);
6286 /* Threading the prologue and epilogue changes the artificial refs
6287 in the entry and exit blocks. */
6288 epilogue_completed = 1;
6289 df_update_entry_exit_and_calls ();
6292 /* Reposition the prologue-end and epilogue-begin notes after
6293 instruction scheduling. */
6295 void
6296 reposition_prologue_and_epilogue_notes (void)
6298 if (!targetm.have_prologue ()
6299 && !targetm.have_epilogue ()
6300 && !targetm.have_sibcall_epilogue ())
6301 return;
6303 /* Since the hash table is created on demand, the fact that it is
6304 non-null is a signal that it is non-empty. */
6305 if (prologue_insn_hash != NULL)
6307 size_t len = prologue_insn_hash->elements ();
6308 rtx_insn *insn, *last = NULL, *note = NULL;
6310 /* Scan from the beginning until we reach the last prologue insn. */
6311 /* ??? While we do have the CFG intact, there are two problems:
6312 (1) The prologue can contain loops (typically probing the stack),
6313 which means that the end of the prologue isn't in the first bb.
6314 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6315 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6317 if (NOTE_P (insn))
6319 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6320 note = insn;
6322 else if (contains (insn, prologue_insn_hash))
6324 last = insn;
6325 if (--len == 0)
6326 break;
6330 if (last)
6332 if (note == NULL)
6334 /* Scan forward looking for the PROLOGUE_END note. It should
6335 be right at the beginning of the block, possibly with other
6336 insn notes that got moved there. */
6337 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6339 if (NOTE_P (note)
6340 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6341 break;
6345 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6346 if (LABEL_P (last))
6347 last = NEXT_INSN (last);
6348 reorder_insns (note, note, last);
6352 if (epilogue_insn_hash != NULL)
6354 edge_iterator ei;
6355 edge e;
6357 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6359 rtx_insn *insn, *first = NULL, *note = NULL;
6360 basic_block bb = e->src;
6362 /* Scan from the beginning until we reach the first epilogue insn. */
6363 FOR_BB_INSNS (bb, insn)
6365 if (NOTE_P (insn))
6367 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6369 note = insn;
6370 if (first != NULL)
6371 break;
6374 else if (first == NULL && contains (insn, epilogue_insn_hash))
6376 first = insn;
6377 if (note != NULL)
6378 break;
6382 if (note)
6384 /* If the function has a single basic block, and no real
6385 epilogue insns (e.g. sibcall with no cleanup), the
6386 epilogue note can get scheduled before the prologue
6387 note. If we have frame related prologue insns, having
6388 them scanned during the epilogue will result in a crash.
6389 In this case re-order the epilogue note to just before
6390 the last insn in the block. */
6391 if (first == NULL)
6392 first = BB_END (bb);
6394 if (PREV_INSN (first) != note)
6395 reorder_insns (note, note, PREV_INSN (first));
6401 /* Returns the name of function declared by FNDECL. */
6402 const char *
6403 fndecl_name (tree fndecl)
6405 if (fndecl == NULL)
6406 return "(nofn)";
6407 return lang_hooks.decl_printable_name (fndecl, 2);
6410 /* Returns the name of function FN. */
6411 const char *
6412 function_name (struct function *fn)
6414 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6415 return fndecl_name (fndecl);
6418 /* Returns the name of the current function. */
6419 const char *
6420 current_function_name (void)
6422 return function_name (cfun);
6426 static unsigned int
6427 rest_of_handle_check_leaf_regs (void)
6429 #ifdef LEAF_REGISTERS
6430 crtl->uses_only_leaf_regs
6431 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6432 #endif
6433 return 0;
6436 /* Insert a TYPE into the used types hash table of CFUN. */
6438 static void
6439 used_types_insert_helper (tree type, struct function *func)
6441 if (type != NULL && func != NULL)
6443 if (func->used_types_hash == NULL)
6444 func->used_types_hash = hash_set<tree>::create_ggc (37);
6446 func->used_types_hash->add (type);
6450 /* Given a type, insert it into the used hash table in cfun. */
6451 void
6452 used_types_insert (tree t)
6454 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6455 if (TYPE_NAME (t))
6456 break;
6457 else
6458 t = TREE_TYPE (t);
6459 if (TREE_CODE (t) == ERROR_MARK)
6460 return;
6461 if (TYPE_NAME (t) == NULL_TREE
6462 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6463 t = TYPE_MAIN_VARIANT (t);
6464 if (debug_info_level > DINFO_LEVEL_NONE)
6466 if (cfun)
6467 used_types_insert_helper (t, cfun);
6468 else
6470 /* So this might be a type referenced by a global variable.
6471 Record that type so that we can later decide to emit its
6472 debug information. */
6473 vec_safe_push (types_used_by_cur_var_decl, t);
6478 /* Helper to Hash a struct types_used_by_vars_entry. */
6480 static hashval_t
6481 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6483 gcc_assert (entry && entry->var_decl && entry->type);
6485 return iterative_hash_object (entry->type,
6486 iterative_hash_object (entry->var_decl, 0));
6489 /* Hash function of the types_used_by_vars_entry hash table. */
6491 hashval_t
6492 used_type_hasher::hash (types_used_by_vars_entry *entry)
6494 return hash_types_used_by_vars_entry (entry);
6497 /*Equality function of the types_used_by_vars_entry hash table. */
6499 bool
6500 used_type_hasher::equal (types_used_by_vars_entry *e1,
6501 types_used_by_vars_entry *e2)
6503 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6506 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6508 void
6509 types_used_by_var_decl_insert (tree type, tree var_decl)
6511 if (type != NULL && var_decl != NULL)
6513 types_used_by_vars_entry **slot;
6514 struct types_used_by_vars_entry e;
6515 e.var_decl = var_decl;
6516 e.type = type;
6517 if (types_used_by_vars_hash == NULL)
6518 types_used_by_vars_hash
6519 = hash_table<used_type_hasher>::create_ggc (37);
6521 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6522 if (*slot == NULL)
6524 struct types_used_by_vars_entry *entry;
6525 entry = ggc_alloc<types_used_by_vars_entry> ();
6526 entry->type = type;
6527 entry->var_decl = var_decl;
6528 *slot = entry;
6533 namespace {
6535 const pass_data pass_data_leaf_regs =
6537 RTL_PASS, /* type */
6538 "*leaf_regs", /* name */
6539 OPTGROUP_NONE, /* optinfo_flags */
6540 TV_NONE, /* tv_id */
6541 0, /* properties_required */
6542 0, /* properties_provided */
6543 0, /* properties_destroyed */
6544 0, /* todo_flags_start */
6545 0, /* todo_flags_finish */
6548 class pass_leaf_regs : public rtl_opt_pass
6550 public:
6551 pass_leaf_regs (gcc::context *ctxt)
6552 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6555 /* opt_pass methods: */
6556 virtual unsigned int execute (function *)
6558 return rest_of_handle_check_leaf_regs ();
6561 }; // class pass_leaf_regs
6563 } // anon namespace
6565 rtl_opt_pass *
6566 make_pass_leaf_regs (gcc::context *ctxt)
6568 return new pass_leaf_regs (ctxt);
6571 static unsigned int
6572 rest_of_handle_thread_prologue_and_epilogue (void)
6574 if (optimize)
6575 cleanup_cfg (CLEANUP_EXPENSIVE);
6577 /* On some machines, the prologue and epilogue code, or parts thereof,
6578 can be represented as RTL. Doing so lets us schedule insns between
6579 it and the rest of the code and also allows delayed branch
6580 scheduling to operate in the epilogue. */
6581 thread_prologue_and_epilogue_insns ();
6583 /* Some non-cold blocks may now be only reachable from cold blocks.
6584 Fix that up. */
6585 fixup_partitions ();
6587 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6588 see PR57320. */
6589 cleanup_cfg (0);
6591 /* The stack usage info is finalized during prologue expansion. */
6592 if (flag_stack_usage_info)
6593 output_stack_usage ();
6595 return 0;
6598 namespace {
6600 const pass_data pass_data_thread_prologue_and_epilogue =
6602 RTL_PASS, /* type */
6603 "pro_and_epilogue", /* name */
6604 OPTGROUP_NONE, /* optinfo_flags */
6605 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6606 0, /* properties_required */
6607 0, /* properties_provided */
6608 0, /* properties_destroyed */
6609 0, /* todo_flags_start */
6610 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6613 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6615 public:
6616 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6617 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6620 /* opt_pass methods: */
6621 virtual unsigned int execute (function *)
6623 return rest_of_handle_thread_prologue_and_epilogue ();
6626 }; // class pass_thread_prologue_and_epilogue
6628 } // anon namespace
6630 rtl_opt_pass *
6631 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6633 return new pass_thread_prologue_and_epilogue (ctxt);
6637 /* This mini-pass fixes fall-out from SSA in asm statements that have
6638 in-out constraints. Say you start with
6640 orig = inout;
6641 asm ("": "+mr" (inout));
6642 use (orig);
6644 which is transformed very early to use explicit output and match operands:
6646 orig = inout;
6647 asm ("": "=mr" (inout) : "0" (inout));
6648 use (orig);
6650 Or, after SSA and copyprop,
6652 asm ("": "=mr" (inout_2) : "0" (inout_1));
6653 use (inout_1);
6655 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6656 they represent two separate values, so they will get different pseudo
6657 registers during expansion. Then, since the two operands need to match
6658 per the constraints, but use different pseudo registers, reload can
6659 only register a reload for these operands. But reloads can only be
6660 satisfied by hardregs, not by memory, so we need a register for this
6661 reload, just because we are presented with non-matching operands.
6662 So, even though we allow memory for this operand, no memory can be
6663 used for it, just because the two operands don't match. This can
6664 cause reload failures on register-starved targets.
6666 So it's a symptom of reload not being able to use memory for reloads
6667 or, alternatively it's also a symptom of both operands not coming into
6668 reload as matching (in which case the pseudo could go to memory just
6669 fine, as the alternative allows it, and no reload would be necessary).
6670 We fix the latter problem here, by transforming
6672 asm ("": "=mr" (inout_2) : "0" (inout_1));
6674 back to
6676 inout_2 = inout_1;
6677 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6679 static void
6680 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6682 int i;
6683 bool changed = false;
6684 rtx op = SET_SRC (p_sets[0]);
6685 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6686 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6687 bool *output_matched = XALLOCAVEC (bool, noutputs);
6689 memset (output_matched, 0, noutputs * sizeof (bool));
6690 for (i = 0; i < ninputs; i++)
6692 rtx input, output;
6693 rtx_insn *insns;
6694 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6695 char *end;
6696 int match, j;
6698 if (*constraint == '%')
6699 constraint++;
6701 match = strtoul (constraint, &end, 10);
6702 if (end == constraint)
6703 continue;
6705 gcc_assert (match < noutputs);
6706 output = SET_DEST (p_sets[match]);
6707 input = RTVEC_ELT (inputs, i);
6708 /* Only do the transformation for pseudos. */
6709 if (! REG_P (output)
6710 || rtx_equal_p (output, input)
6711 || (GET_MODE (input) != VOIDmode
6712 && GET_MODE (input) != GET_MODE (output)))
6713 continue;
6715 /* We can't do anything if the output is also used as input,
6716 as we're going to overwrite it. */
6717 for (j = 0; j < ninputs; j++)
6718 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6719 break;
6720 if (j != ninputs)
6721 continue;
6723 /* Avoid changing the same input several times. For
6724 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6725 only change in once (to out1), rather than changing it
6726 first to out1 and afterwards to out2. */
6727 if (i > 0)
6729 for (j = 0; j < noutputs; j++)
6730 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6731 break;
6732 if (j != noutputs)
6733 continue;
6735 output_matched[match] = true;
6737 start_sequence ();
6738 emit_move_insn (output, input);
6739 insns = get_insns ();
6740 end_sequence ();
6741 emit_insn_before (insns, insn);
6743 /* Now replace all mentions of the input with output. We can't
6744 just replace the occurrence in inputs[i], as the register might
6745 also be used in some other input (or even in an address of an
6746 output), which would mean possibly increasing the number of
6747 inputs by one (namely 'output' in addition), which might pose
6748 a too complicated problem for reload to solve. E.g. this situation:
6750 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6752 Here 'input' is used in two occurrences as input (once for the
6753 input operand, once for the address in the second output operand).
6754 If we would replace only the occurrence of the input operand (to
6755 make the matching) we would be left with this:
6757 output = input
6758 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6760 Now we suddenly have two different input values (containing the same
6761 value, but different pseudos) where we formerly had only one.
6762 With more complicated asms this might lead to reload failures
6763 which wouldn't have happen without this pass. So, iterate over
6764 all operands and replace all occurrences of the register used. */
6765 for (j = 0; j < noutputs; j++)
6766 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6767 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6768 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6769 input, output);
6770 for (j = 0; j < ninputs; j++)
6771 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6772 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6773 input, output);
6775 changed = true;
6778 if (changed)
6779 df_insn_rescan (insn);
6782 /* Add the decl D to the local_decls list of FUN. */
6784 void
6785 add_local_decl (struct function *fun, tree d)
6787 gcc_assert (TREE_CODE (d) == VAR_DECL);
6788 vec_safe_push (fun->local_decls, d);
6791 namespace {
6793 const pass_data pass_data_match_asm_constraints =
6795 RTL_PASS, /* type */
6796 "asmcons", /* name */
6797 OPTGROUP_NONE, /* optinfo_flags */
6798 TV_NONE, /* tv_id */
6799 0, /* properties_required */
6800 0, /* properties_provided */
6801 0, /* properties_destroyed */
6802 0, /* todo_flags_start */
6803 0, /* todo_flags_finish */
6806 class pass_match_asm_constraints : public rtl_opt_pass
6808 public:
6809 pass_match_asm_constraints (gcc::context *ctxt)
6810 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6813 /* opt_pass methods: */
6814 virtual unsigned int execute (function *);
6816 }; // class pass_match_asm_constraints
6818 unsigned
6819 pass_match_asm_constraints::execute (function *fun)
6821 basic_block bb;
6822 rtx_insn *insn;
6823 rtx pat, *p_sets;
6824 int noutputs;
6826 if (!crtl->has_asm_statement)
6827 return 0;
6829 df_set_flags (DF_DEFER_INSN_RESCAN);
6830 FOR_EACH_BB_FN (bb, fun)
6832 FOR_BB_INSNS (bb, insn)
6834 if (!INSN_P (insn))
6835 continue;
6837 pat = PATTERN (insn);
6838 if (GET_CODE (pat) == PARALLEL)
6839 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6840 else if (GET_CODE (pat) == SET)
6841 p_sets = &PATTERN (insn), noutputs = 1;
6842 else
6843 continue;
6845 if (GET_CODE (*p_sets) == SET
6846 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6847 match_asm_constraints_1 (insn, p_sets, noutputs);
6851 return TODO_df_finish;
6854 } // anon namespace
6856 rtl_opt_pass *
6857 make_pass_match_asm_constraints (gcc::context *ctxt)
6859 return new pass_match_asm_constraints (ctxt);
6863 #include "gt-function.h"