Fix ChangeLog record for 171649:
[official-gcc.git] / gcc / function.c
blob4508ae22de41af6e17d9f3cac5bb2eb2e569377d
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68 #include "params.h"
69 #include "bb-reorder.h"
71 /* So we can assign to cfun in this file. */
72 #undef cfun
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
76 #endif
78 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97 /* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
100 compiler passes. */
101 int current_function_is_leaf;
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 pass_stack_ptr_mod has run. */
106 int current_function_sp_is_unchanging;
108 /* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
111 int current_function_uses_only_leaf_regs;
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero.
115 calls.c:emit_library_call_value_1 uses it to set up
116 post-instantiation libcalls. */
117 int virtuals_instantiated;
119 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
120 static GTY(()) int funcdef_no;
122 /* These variables hold pointers to functions to create and destroy
123 target specific, per-function data structures. */
124 struct machine_function * (*init_machine_status) (void);
126 /* The currently compiled function. */
127 struct function *cfun = 0;
129 /* These hashes record the prologue and epilogue insns. */
130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t prologue_insn_hash;
132 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
133 htab_t epilogue_insn_hash;
136 htab_t types_used_by_vars_hash = NULL;
137 VEC(tree,gc) *types_used_by_cur_var_decl;
139 /* Forward declarations. */
141 static struct temp_slot *find_temp_slot_from_address (rtx);
142 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
143 static void pad_below (struct args_size *, enum machine_mode, tree);
144 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
145 static int all_blocks (tree, tree *);
146 static tree *get_block_vector (tree, int *);
147 extern tree debug_find_var_in_block_tree (tree, tree);
148 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
151 static bool contains (const_rtx, htab_t);
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx, void *);
154 static void do_use_return_reg (rtx, void *);
155 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
160 typedef struct function *function_p;
162 DEF_VEC_P(function_p);
163 DEF_VEC_ALLOC_P(function_p,heap);
164 static VEC(function_p,heap) *function_context_stack;
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
169 void
170 push_function_context (void)
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
175 VEC_safe_push (function_p, heap, function_context_stack, cfun);
176 set_cfun (NULL);
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
182 void
183 pop_function_context (void)
185 struct function *p = VEC_pop (function_p, function_context_stack);
186 set_cfun (p);
187 current_function_decl = p->decl;
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
198 void
199 free_after_parsing (struct function *f)
201 f->language = 0;
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
208 void
209 free_after_compilation (struct function *f)
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
214 free (crtl->emit.regno_pointer_align);
216 memset (crtl, 0, sizeof (struct rtl_data));
217 f->eh = NULL;
218 f->machine = NULL;
219 f->cfg = NULL;
221 regno_reg_rtx = NULL;
222 insn_locators_free ();
225 /* Return size needed for stack frame based on slots so far allocated.
226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
227 the caller may have to do that. */
229 HOST_WIDE_INT
230 get_frame_size (void)
232 if (FRAME_GROWS_DOWNWARD)
233 return -frame_offset;
234 else
235 return frame_offset;
238 /* Issue an error message and return TRUE if frame OFFSET overflows in
239 the signed target pointer arithmetics for function FUNC. Otherwise
240 return FALSE. */
242 bool
243 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
245 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
247 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
248 /* Leave room for the fixed part of the frame. */
249 - 64 * UNITS_PER_WORD)
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects too large");
253 return TRUE;
256 return FALSE;
259 /* Return stack slot alignment in bits for TYPE and MODE. */
261 static unsigned int
262 get_stack_local_alignment (tree type, enum machine_mode mode)
264 unsigned int alignment;
266 if (mode == BLKmode)
267 alignment = BIGGEST_ALIGNMENT;
268 else
269 alignment = GET_MODE_ALIGNMENT (mode);
271 /* Allow the frond-end to (possibly) increase the alignment of this
272 stack slot. */
273 if (! type)
274 type = lang_hooks.types.type_for_mode (mode, 0);
276 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
279 /* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
286 static bool
287 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
288 HOST_WIDE_INT size, unsigned int alignment,
289 HOST_WIDE_INT *poffset)
291 HOST_WIDE_INT this_frame_offset;
292 int frame_off, frame_alignment, frame_phase;
294 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0;
300 /* Round the frame offset to the specified alignment. */
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase,
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase);
311 else
312 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase,
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset)
321 frame_offset = this_frame_offset;
322 else if (this_frame_offset < start)
323 return false;
324 else if (start + length == frame_offset
325 && this_frame_offset + size > start + length)
326 frame_offset = this_frame_offset + size;
327 else if (this_frame_offset + size > start + length)
328 return false;
330 *poffset = this_frame_offset;
331 return true;
334 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
338 static void
339 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
341 struct frame_space *space = ggc_alloc_frame_space ();
342 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space;
344 space->start = start;
345 space->length = end - start;
348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
363 We do not round to stack_boundary here. */
366 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
367 int align, int kind)
369 rtx x, addr;
370 int bigend_correction = 0;
371 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
372 unsigned int alignment, alignment_in_bits;
374 if (align == 0)
376 alignment = get_stack_local_alignment (NULL, mode);
377 alignment /= BITS_PER_UNIT;
379 else if (align == -1)
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment);
384 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
386 else
387 alignment = align / BITS_PER_UNIT;
389 alignment_in_bits = alignment * BITS_PER_UNIT;
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
395 alignment = alignment_in_bits / BITS_PER_UNIT;
398 if (SUPPORTS_STACK_ALIGNMENT)
400 if (crtl->stack_alignment_estimated < alignment_in_bits)
402 if (!crtl->stack_realign_processed)
403 crtl->stack_alignment_estimated = alignment_in_bits;
404 else
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
410 below. */
411 gcc_assert (!crtl->stack_realign_finalized);
412 if (!crtl->stack_realign_needed)
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
418 || size == 0
419 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT;
428 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits;
430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits;
433 if (mode != BLKmode || size != 0)
435 if (kind & ASLK_RECORD_PAD)
437 struct frame_space **psp;
439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
441 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset))
444 continue;
445 *psp = space->next;
446 if (slot_offset > space->start)
447 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length)
449 add_frame_space (slot_offset + size,
450 space->start + space->length);
451 goto found_space;
455 else if (!STACK_ALIGNMENT_NEEDED)
457 slot_offset = frame_offset;
458 goto found_space;
461 old_frame_offset = frame_offset;
463 if (FRAME_GROWS_DOWNWARD)
465 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
468 if (kind & ASLK_RECORD_PAD)
470 if (slot_offset > frame_offset)
471 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset)
473 add_frame_space (slot_offset + size, old_frame_offset);
476 else
478 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
481 if (kind & ASLK_RECORD_PAD)
483 if (slot_offset > old_frame_offset)
484 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset)
486 add_frame_space (slot_offset + size, frame_offset);
490 found_space:
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
494 bigend_correction = size - GET_MODE_SIZE (mode);
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated)
499 addr = plus_constant (frame_pointer_rtx,
500 trunc_int_for_mode
501 (slot_offset + bigend_correction
502 + STARTING_FRAME_OFFSET, Pmode));
503 else
504 addr = plus_constant (virtual_stack_vars_rtx,
505 trunc_int_for_mode
506 (slot_offset + bigend_correction,
507 Pmode));
509 x = gen_rtx_MEM (mode, addr);
510 set_mem_align (x, alignment_in_bits);
511 MEM_NOTRAP_P (x) = 1;
513 stack_slot_list
514 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
516 if (frame_offset_overflow (frame_offset, current_function_decl))
517 frame_offset = 0;
519 return x;
522 /* Wrap up assign_stack_local_1 with last parameter as false. */
525 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
527 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
531 /* In order to evaluate some expressions, such as function calls returning
532 structures in memory, we need to temporarily allocate stack locations.
533 We record each allocated temporary in the following structure.
535 Associated with each temporary slot is a nesting level. When we pop up
536 one level, all temporaries associated with the previous level are freed.
537 Normally, all temporaries are freed after the execution of the statement
538 in which they were created. However, if we are inside a ({...}) grouping,
539 the result may be in a temporary and hence must be preserved. If the
540 result could be in a temporary, we preserve it if we can determine which
541 one it is in. If we cannot determine which temporary may contain the
542 result, all temporaries are preserved. A temporary is preserved by
543 pretending it was allocated at the previous nesting level.
545 Automatic variables are also assigned temporary slots, at the nesting
546 level where they are defined. They are marked a "kept" so that
547 free_temp_slots will not free them. */
549 struct GTY(()) temp_slot {
550 /* Points to next temporary slot. */
551 struct temp_slot *next;
552 /* Points to previous temporary slot. */
553 struct temp_slot *prev;
554 /* The rtx to used to reference the slot. */
555 rtx slot;
556 /* The size, in units, of the slot. */
557 HOST_WIDE_INT size;
558 /* The type of the object in the slot, or zero if it doesn't correspond
559 to a type. We use this to determine whether a slot can be reused.
560 It can be reused if objects of the type of the new slot will always
561 conflict with objects of the type of the old slot. */
562 tree type;
563 /* The alignment (in bits) of the slot. */
564 unsigned int align;
565 /* Nonzero if this temporary is currently in use. */
566 char in_use;
567 /* Nonzero if this temporary has its address taken. */
568 char addr_taken;
569 /* Nesting level at which this slot is being used. */
570 int level;
571 /* Nonzero if this should survive a call to free_temp_slots. */
572 int keep;
573 /* The offset of the slot from the frame_pointer, including extra space
574 for alignment. This info is for combine_temp_slots. */
575 HOST_WIDE_INT base_offset;
576 /* The size of the slot, including extra space for alignment. This
577 info is for combine_temp_slots. */
578 HOST_WIDE_INT full_size;
581 /* A table of addresses that represent a stack slot. The table is a mapping
582 from address RTXen to a temp slot. */
583 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
585 /* Entry for the above hash table. */
586 struct GTY(()) temp_slot_address_entry {
587 hashval_t hash;
588 rtx address;
589 struct temp_slot *temp_slot;
592 /* Removes temporary slot TEMP from LIST. */
594 static void
595 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
597 if (temp->next)
598 temp->next->prev = temp->prev;
599 if (temp->prev)
600 temp->prev->next = temp->next;
601 else
602 *list = temp->next;
604 temp->prev = temp->next = NULL;
607 /* Inserts temporary slot TEMP to LIST. */
609 static void
610 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
612 temp->next = *list;
613 if (*list)
614 (*list)->prev = temp;
615 temp->prev = NULL;
616 *list = temp;
619 /* Returns the list of used temp slots at LEVEL. */
621 static struct temp_slot **
622 temp_slots_at_level (int level)
624 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
625 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
627 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
630 /* Returns the maximal temporary slot level. */
632 static int
633 max_slot_level (void)
635 if (!used_temp_slots)
636 return -1;
638 return VEC_length (temp_slot_p, used_temp_slots) - 1;
641 /* Moves temporary slot TEMP to LEVEL. */
643 static void
644 move_slot_to_level (struct temp_slot *temp, int level)
646 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
647 insert_slot_to_list (temp, temp_slots_at_level (level));
648 temp->level = level;
651 /* Make temporary slot TEMP available. */
653 static void
654 make_slot_available (struct temp_slot *temp)
656 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
657 insert_slot_to_list (temp, &avail_temp_slots);
658 temp->in_use = 0;
659 temp->level = -1;
662 /* Compute the hash value for an address -> temp slot mapping.
663 The value is cached on the mapping entry. */
664 static hashval_t
665 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
667 int do_not_record = 0;
668 return hash_rtx (t->address, GET_MODE (t->address),
669 &do_not_record, NULL, false);
672 /* Return the hash value for an address -> temp slot mapping. */
673 static hashval_t
674 temp_slot_address_hash (const void *p)
676 const struct temp_slot_address_entry *t;
677 t = (const struct temp_slot_address_entry *) p;
678 return t->hash;
681 /* Compare two address -> temp slot mapping entries. */
682 static int
683 temp_slot_address_eq (const void *p1, const void *p2)
685 const struct temp_slot_address_entry *t1, *t2;
686 t1 = (const struct temp_slot_address_entry *) p1;
687 t2 = (const struct temp_slot_address_entry *) p2;
688 return exp_equiv_p (t1->address, t2->address, 0, true);
691 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
692 static void
693 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
695 void **slot;
696 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
697 t->address = address;
698 t->temp_slot = temp_slot;
699 t->hash = temp_slot_address_compute_hash (t);
700 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
701 *slot = t;
704 /* Remove an address -> temp slot mapping entry if the temp slot is
705 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
706 static int
707 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
709 const struct temp_slot_address_entry *t;
710 t = (const struct temp_slot_address_entry *) *slot;
711 if (! t->temp_slot->in_use)
712 *slot = NULL;
713 return 1;
716 /* Remove all mappings of addresses to unused temp slots. */
717 static void
718 remove_unused_temp_slot_addresses (void)
720 htab_traverse (temp_slot_address_table,
721 remove_unused_temp_slot_addresses_1,
722 NULL);
725 /* Find the temp slot corresponding to the object at address X. */
727 static struct temp_slot *
728 find_temp_slot_from_address (rtx x)
730 struct temp_slot *p;
731 struct temp_slot_address_entry tmp, *t;
733 /* First try the easy way:
734 See if X exists in the address -> temp slot mapping. */
735 tmp.address = x;
736 tmp.temp_slot = NULL;
737 tmp.hash = temp_slot_address_compute_hash (&tmp);
738 t = (struct temp_slot_address_entry *)
739 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
740 if (t)
741 return t->temp_slot;
743 /* If we have a sum involving a register, see if it points to a temp
744 slot. */
745 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
746 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
747 return p;
748 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
749 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
750 return p;
752 /* Last resort: Address is a virtual stack var address. */
753 if (GET_CODE (x) == PLUS
754 && XEXP (x, 0) == virtual_stack_vars_rtx
755 && CONST_INT_P (XEXP (x, 1)))
757 int i;
758 for (i = max_slot_level (); i >= 0; i--)
759 for (p = *temp_slots_at_level (i); p; p = p->next)
761 if (INTVAL (XEXP (x, 1)) >= p->base_offset
762 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
763 return p;
767 return NULL;
770 /* Allocate a temporary stack slot and record it for possible later
771 reuse.
773 MODE is the machine mode to be given to the returned rtx.
775 SIZE is the size in units of the space required. We do no rounding here
776 since assign_stack_local will do any required rounding.
778 KEEP is 1 if this slot is to be retained after a call to
779 free_temp_slots. Automatic variables for a block are allocated
780 with this flag. KEEP values of 2 or 3 were needed respectively
781 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
782 or for SAVE_EXPRs, but they are now unused.
784 TYPE is the type that will be used for the stack slot. */
787 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
788 int keep, tree type)
790 unsigned int align;
791 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
792 rtx slot;
794 /* If SIZE is -1 it means that somebody tried to allocate a temporary
795 of a variable size. */
796 gcc_assert (size != -1);
798 /* These are now unused. */
799 gcc_assert (keep <= 1);
801 align = get_stack_local_alignment (type, mode);
803 /* Try to find an available, already-allocated temporary of the proper
804 mode which meets the size and alignment requirements. Choose the
805 smallest one with the closest alignment.
807 If assign_stack_temp is called outside of the tree->rtl expansion,
808 we cannot reuse the stack slots (that may still refer to
809 VIRTUAL_STACK_VARS_REGNUM). */
810 if (!virtuals_instantiated)
812 for (p = avail_temp_slots; p; p = p->next)
814 if (p->align >= align && p->size >= size
815 && GET_MODE (p->slot) == mode
816 && objects_must_conflict_p (p->type, type)
817 && (best_p == 0 || best_p->size > p->size
818 || (best_p->size == p->size && best_p->align > p->align)))
820 if (p->align == align && p->size == size)
822 selected = p;
823 cut_slot_from_list (selected, &avail_temp_slots);
824 best_p = 0;
825 break;
827 best_p = p;
832 /* Make our best, if any, the one to use. */
833 if (best_p)
835 selected = best_p;
836 cut_slot_from_list (selected, &avail_temp_slots);
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p->slot) == BLKmode)
843 int alignment = best_p->align / BITS_PER_UNIT;
844 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
846 if (best_p->size - rounded_size >= alignment)
848 p = ggc_alloc_temp_slot ();
849 p->in_use = p->addr_taken = 0;
850 p->size = best_p->size - rounded_size;
851 p->base_offset = best_p->base_offset + rounded_size;
852 p->full_size = best_p->full_size - rounded_size;
853 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
854 p->align = best_p->align;
855 p->type = best_p->type;
856 insert_slot_to_list (p, &avail_temp_slots);
858 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
859 stack_slot_list);
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
867 /* If we still didn't find one, make a new temporary. */
868 if (selected == 0)
870 HOST_WIDE_INT frame_offset_old = frame_offset;
872 p = ggc_alloc_temp_slot ();
874 /* We are passing an explicit alignment request to assign_stack_local.
875 One side effect of that is assign_stack_local will not round SIZE
876 to ensure the frame offset remains suitably aligned.
878 So for requests which depended on the rounding of SIZE, we go ahead
879 and round it now. We also make sure ALIGNMENT is at least
880 BIGGEST_ALIGNMENT. */
881 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
882 p->slot = assign_stack_local_1 (mode,
883 (mode == BLKmode
884 ? CEIL_ROUND (size,
885 (int) align
886 / BITS_PER_UNIT)
887 : size),
888 align, 0);
890 p->align = align;
892 /* The following slot size computation is necessary because we don't
893 know the actual size of the temporary slot until assign_stack_local
894 has performed all the frame alignment and size rounding for the
895 requested temporary. Note that extra space added for alignment
896 can be either above or below this stack slot depending on which
897 way the frame grows. We include the extra space if and only if it
898 is above this slot. */
899 if (FRAME_GROWS_DOWNWARD)
900 p->size = frame_offset_old - frame_offset;
901 else
902 p->size = size;
904 /* Now define the fields used by combine_temp_slots. */
905 if (FRAME_GROWS_DOWNWARD)
907 p->base_offset = frame_offset;
908 p->full_size = frame_offset_old - frame_offset;
910 else
912 p->base_offset = frame_offset_old;
913 p->full_size = frame_offset - frame_offset_old;
916 selected = p;
919 p = selected;
920 p->in_use = 1;
921 p->addr_taken = 0;
922 p->type = type;
923 p->level = temp_slot_level;
924 p->keep = keep;
926 pp = temp_slots_at_level (p->level);
927 insert_slot_to_list (p, pp);
928 insert_temp_slot_address (XEXP (p->slot, 0), p);
930 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
931 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
932 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
934 /* If we know the alias set for the memory that will be used, use
935 it. If there's no TYPE, then we don't know anything about the
936 alias set for the memory. */
937 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
938 set_mem_align (slot, align);
940 /* If a type is specified, set the relevant flags. */
941 if (type != 0)
942 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
943 MEM_NOTRAP_P (slot) = 1;
945 return slot;
948 /* Allocate a temporary stack slot and record it for possible later
949 reuse. First three arguments are same as in preceding function. */
952 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
954 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
957 /* Assign a temporary.
958 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
959 and so that should be used in error messages. In either case, we
960 allocate of the given type.
961 KEEP is as for assign_stack_temp.
962 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
963 it is 0 if a register is OK.
964 DONT_PROMOTE is 1 if we should not promote values in register
965 to wider modes. */
968 assign_temp (tree type_or_decl, int keep, int memory_required,
969 int dont_promote ATTRIBUTE_UNUSED)
971 tree type, decl;
972 enum machine_mode mode;
973 #ifdef PROMOTE_MODE
974 int unsignedp;
975 #endif
977 if (DECL_P (type_or_decl))
978 decl = type_or_decl, type = TREE_TYPE (decl);
979 else
980 decl = NULL, type = type_or_decl;
982 mode = TYPE_MODE (type);
983 #ifdef PROMOTE_MODE
984 unsignedp = TYPE_UNSIGNED (type);
985 #endif
987 if (mode == BLKmode || memory_required)
989 HOST_WIDE_INT size = int_size_in_bytes (type);
990 rtx tmp;
992 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
993 problems with allocating the stack space. */
994 if (size == 0)
995 size = 1;
997 /* Unfortunately, we don't yet know how to allocate variable-sized
998 temporaries. However, sometimes we can find a fixed upper limit on
999 the size, so try that instead. */
1000 else if (size == -1)
1001 size = max_int_size_in_bytes (type);
1003 /* The size of the temporary may be too large to fit into an integer. */
1004 /* ??? Not sure this should happen except for user silliness, so limit
1005 this to things that aren't compiler-generated temporaries. The
1006 rest of the time we'll die in assign_stack_temp_for_type. */
1007 if (decl && size == -1
1008 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1010 error ("size of variable %q+D is too large", decl);
1011 size = 1;
1014 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1015 return tmp;
1018 #ifdef PROMOTE_MODE
1019 if (! dont_promote)
1020 mode = promote_mode (type, mode, &unsignedp);
1021 #endif
1023 return gen_reg_rtx (mode);
1026 /* Combine temporary stack slots which are adjacent on the stack.
1028 This allows for better use of already allocated stack space. This is only
1029 done for BLKmode slots because we can be sure that we won't have alignment
1030 problems in this case. */
1032 static void
1033 combine_temp_slots (void)
1035 struct temp_slot *p, *q, *next, *next_q;
1036 int num_slots;
1038 /* We can't combine slots, because the information about which slot
1039 is in which alias set will be lost. */
1040 if (flag_strict_aliasing)
1041 return;
1043 /* If there are a lot of temp slots, don't do anything unless
1044 high levels of optimization. */
1045 if (! flag_expensive_optimizations)
1046 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1047 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1048 return;
1050 for (p = avail_temp_slots; p; p = next)
1052 int delete_p = 0;
1054 next = p->next;
1056 if (GET_MODE (p->slot) != BLKmode)
1057 continue;
1059 for (q = p->next; q; q = next_q)
1061 int delete_q = 0;
1063 next_q = q->next;
1065 if (GET_MODE (q->slot) != BLKmode)
1066 continue;
1068 if (p->base_offset + p->full_size == q->base_offset)
1070 /* Q comes after P; combine Q into P. */
1071 p->size += q->size;
1072 p->full_size += q->full_size;
1073 delete_q = 1;
1075 else if (q->base_offset + q->full_size == p->base_offset)
1077 /* P comes after Q; combine P into Q. */
1078 q->size += p->size;
1079 q->full_size += p->full_size;
1080 delete_p = 1;
1081 break;
1083 if (delete_q)
1084 cut_slot_from_list (q, &avail_temp_slots);
1087 /* Either delete P or advance past it. */
1088 if (delete_p)
1089 cut_slot_from_list (p, &avail_temp_slots);
1093 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1094 slot that previously was known by OLD_RTX. */
1096 void
1097 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1099 struct temp_slot *p;
1101 if (rtx_equal_p (old_rtx, new_rtx))
1102 return;
1104 p = find_temp_slot_from_address (old_rtx);
1106 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1107 NEW_RTX is a register, see if one operand of the PLUS is a
1108 temporary location. If so, NEW_RTX points into it. Otherwise,
1109 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1110 in common between them. If so, try a recursive call on those
1111 values. */
1112 if (p == 0)
1114 if (GET_CODE (old_rtx) != PLUS)
1115 return;
1117 if (REG_P (new_rtx))
1119 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1120 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1121 return;
1123 else if (GET_CODE (new_rtx) != PLUS)
1124 return;
1126 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1127 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1128 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1129 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1130 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1131 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1132 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1133 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1135 return;
1138 /* Otherwise add an alias for the temp's address. */
1139 insert_temp_slot_address (new_rtx, p);
1142 /* If X could be a reference to a temporary slot, mark the fact that its
1143 address was taken. */
1145 void
1146 mark_temp_addr_taken (rtx x)
1148 struct temp_slot *p;
1150 if (x == 0)
1151 return;
1153 /* If X is not in memory or is at a constant address, it cannot be in
1154 a temporary slot. */
1155 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1156 return;
1158 p = find_temp_slot_from_address (XEXP (x, 0));
1159 if (p != 0)
1160 p->addr_taken = 1;
1163 /* If X could be a reference to a temporary slot, mark that slot as
1164 belonging to the to one level higher than the current level. If X
1165 matched one of our slots, just mark that one. Otherwise, we can't
1166 easily predict which it is, so upgrade all of them. Kept slots
1167 need not be touched.
1169 This is called when an ({...}) construct occurs and a statement
1170 returns a value in memory. */
1172 void
1173 preserve_temp_slots (rtx x)
1175 struct temp_slot *p = 0, *next;
1177 /* If there is no result, we still might have some objects whose address
1178 were taken, so we need to make sure they stay around. */
1179 if (x == 0)
1181 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1183 next = p->next;
1185 if (p->addr_taken)
1186 move_slot_to_level (p, temp_slot_level - 1);
1189 return;
1192 /* If X is a register that is being used as a pointer, see if we have
1193 a temporary slot we know it points to. To be consistent with
1194 the code below, we really should preserve all non-kept slots
1195 if we can't find a match, but that seems to be much too costly. */
1196 if (REG_P (x) && REG_POINTER (x))
1197 p = find_temp_slot_from_address (x);
1199 /* If X is not in memory or is at a constant address, it cannot be in
1200 a temporary slot, but it can contain something whose address was
1201 taken. */
1202 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1204 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1206 next = p->next;
1208 if (p->addr_taken)
1209 move_slot_to_level (p, temp_slot_level - 1);
1212 return;
1215 /* First see if we can find a match. */
1216 if (p == 0)
1217 p = find_temp_slot_from_address (XEXP (x, 0));
1219 if (p != 0)
1221 /* Move everything at our level whose address was taken to our new
1222 level in case we used its address. */
1223 struct temp_slot *q;
1225 if (p->level == temp_slot_level)
1227 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1229 next = q->next;
1231 if (p != q && q->addr_taken)
1232 move_slot_to_level (q, temp_slot_level - 1);
1235 move_slot_to_level (p, temp_slot_level - 1);
1236 p->addr_taken = 0;
1238 return;
1241 /* Otherwise, preserve all non-kept slots at this level. */
1242 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1244 next = p->next;
1246 if (!p->keep)
1247 move_slot_to_level (p, temp_slot_level - 1);
1251 /* Free all temporaries used so far. This is normally called at the
1252 end of generating code for a statement. */
1254 void
1255 free_temp_slots (void)
1257 struct temp_slot *p, *next;
1258 bool some_available = false;
1260 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1262 next = p->next;
1264 if (!p->keep)
1266 make_slot_available (p);
1267 some_available = true;
1271 if (some_available)
1273 remove_unused_temp_slot_addresses ();
1274 combine_temp_slots ();
1278 /* Push deeper into the nesting level for stack temporaries. */
1280 void
1281 push_temp_slots (void)
1283 temp_slot_level++;
1286 /* Pop a temporary nesting level. All slots in use in the current level
1287 are freed. */
1289 void
1290 pop_temp_slots (void)
1292 struct temp_slot *p, *next;
1293 bool some_available = false;
1295 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1297 next = p->next;
1298 make_slot_available (p);
1299 some_available = true;
1302 if (some_available)
1304 remove_unused_temp_slot_addresses ();
1305 combine_temp_slots ();
1308 temp_slot_level--;
1311 /* Initialize temporary slots. */
1313 void
1314 init_temp_slots (void)
1316 /* We have not allocated any temporaries yet. */
1317 avail_temp_slots = 0;
1318 used_temp_slots = 0;
1319 temp_slot_level = 0;
1321 /* Set up the table to map addresses to temp slots. */
1322 if (! temp_slot_address_table)
1323 temp_slot_address_table = htab_create_ggc (32,
1324 temp_slot_address_hash,
1325 temp_slot_address_eq,
1326 NULL);
1327 else
1328 htab_empty (temp_slot_address_table);
1331 /* These routines are responsible for converting virtual register references
1332 to the actual hard register references once RTL generation is complete.
1334 The following four variables are used for communication between the
1335 routines. They contain the offsets of the virtual registers from their
1336 respective hard registers. */
1338 static int in_arg_offset;
1339 static int var_offset;
1340 static int dynamic_offset;
1341 static int out_arg_offset;
1342 static int cfa_offset;
1344 /* In most machines, the stack pointer register is equivalent to the bottom
1345 of the stack. */
1347 #ifndef STACK_POINTER_OFFSET
1348 #define STACK_POINTER_OFFSET 0
1349 #endif
1351 /* If not defined, pick an appropriate default for the offset of dynamically
1352 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1353 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1355 #ifndef STACK_DYNAMIC_OFFSET
1357 /* The bottom of the stack points to the actual arguments. If
1358 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1359 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1360 stack space for register parameters is not pushed by the caller, but
1361 rather part of the fixed stack areas and hence not included in
1362 `crtl->outgoing_args_size'. Nevertheless, we must allow
1363 for it when allocating stack dynamic objects. */
1365 #if defined(REG_PARM_STACK_SPACE)
1366 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1367 ((ACCUMULATE_OUTGOING_ARGS \
1368 ? (crtl->outgoing_args_size \
1369 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1370 : REG_PARM_STACK_SPACE (FNDECL))) \
1371 : 0) + (STACK_POINTER_OFFSET))
1372 #else
1373 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1374 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1375 + (STACK_POINTER_OFFSET))
1376 #endif
1377 #endif
1380 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1381 is a virtual register, return the equivalent hard register and set the
1382 offset indirectly through the pointer. Otherwise, return 0. */
1384 static rtx
1385 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1387 rtx new_rtx;
1388 HOST_WIDE_INT offset;
1390 if (x == virtual_incoming_args_rtx)
1392 if (stack_realign_drap)
1394 /* Replace virtual_incoming_args_rtx with internal arg
1395 pointer if DRAP is used to realign stack. */
1396 new_rtx = crtl->args.internal_arg_pointer;
1397 offset = 0;
1399 else
1400 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1402 else if (x == virtual_stack_vars_rtx)
1403 new_rtx = frame_pointer_rtx, offset = var_offset;
1404 else if (x == virtual_stack_dynamic_rtx)
1405 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1406 else if (x == virtual_outgoing_args_rtx)
1407 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1408 else if (x == virtual_cfa_rtx)
1410 #ifdef FRAME_POINTER_CFA_OFFSET
1411 new_rtx = frame_pointer_rtx;
1412 #else
1413 new_rtx = arg_pointer_rtx;
1414 #endif
1415 offset = cfa_offset;
1417 else if (x == virtual_preferred_stack_boundary_rtx)
1419 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1420 offset = 0;
1422 else
1423 return NULL_RTX;
1425 *poffset = offset;
1426 return new_rtx;
1429 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1430 Instantiate any virtual registers present inside of *LOC. The expression
1431 is simplified, as much as possible, but is not to be considered "valid"
1432 in any sense implied by the target. If any change is made, set CHANGED
1433 to true. */
1435 static int
1436 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1438 HOST_WIDE_INT offset;
1439 bool *changed = (bool *) data;
1440 rtx x, new_rtx;
1442 x = *loc;
1443 if (x == 0)
1444 return 0;
1446 switch (GET_CODE (x))
1448 case REG:
1449 new_rtx = instantiate_new_reg (x, &offset);
1450 if (new_rtx)
1452 *loc = plus_constant (new_rtx, offset);
1453 if (changed)
1454 *changed = true;
1456 return -1;
1458 case PLUS:
1459 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1460 if (new_rtx)
1462 new_rtx = plus_constant (new_rtx, offset);
1463 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1464 if (changed)
1465 *changed = true;
1466 return -1;
1469 /* FIXME -- from old code */
1470 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1471 we can commute the PLUS and SUBREG because pointers into the
1472 frame are well-behaved. */
1473 break;
1475 default:
1476 break;
1479 return 0;
1482 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1483 matches the predicate for insn CODE operand OPERAND. */
1485 static int
1486 safe_insn_predicate (int code, int operand, rtx x)
1488 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1491 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1492 registers present inside of insn. The result will be a valid insn. */
1494 static void
1495 instantiate_virtual_regs_in_insn (rtx insn)
1497 HOST_WIDE_INT offset;
1498 int insn_code, i;
1499 bool any_change = false;
1500 rtx set, new_rtx, x, seq;
1502 /* There are some special cases to be handled first. */
1503 set = single_set (insn);
1504 if (set)
1506 /* We're allowed to assign to a virtual register. This is interpreted
1507 to mean that the underlying register gets assigned the inverse
1508 transformation. This is used, for example, in the handling of
1509 non-local gotos. */
1510 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1511 if (new_rtx)
1513 start_sequence ();
1515 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1516 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1517 GEN_INT (-offset));
1518 x = force_operand (x, new_rtx);
1519 if (x != new_rtx)
1520 emit_move_insn (new_rtx, x);
1522 seq = get_insns ();
1523 end_sequence ();
1525 emit_insn_before (seq, insn);
1526 delete_insn (insn);
1527 return;
1530 /* Handle a straight copy from a virtual register by generating a
1531 new add insn. The difference between this and falling through
1532 to the generic case is avoiding a new pseudo and eliminating a
1533 move insn in the initial rtl stream. */
1534 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1535 if (new_rtx && offset != 0
1536 && REG_P (SET_DEST (set))
1537 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1539 start_sequence ();
1541 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1542 new_rtx, GEN_INT (offset), SET_DEST (set),
1543 1, OPTAB_LIB_WIDEN);
1544 if (x != SET_DEST (set))
1545 emit_move_insn (SET_DEST (set), x);
1547 seq = get_insns ();
1548 end_sequence ();
1550 emit_insn_before (seq, insn);
1551 delete_insn (insn);
1552 return;
1555 extract_insn (insn);
1556 insn_code = INSN_CODE (insn);
1558 /* Handle a plus involving a virtual register by determining if the
1559 operands remain valid if they're modified in place. */
1560 if (GET_CODE (SET_SRC (set)) == PLUS
1561 && recog_data.n_operands >= 3
1562 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1563 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1564 && CONST_INT_P (recog_data.operand[2])
1565 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1567 offset += INTVAL (recog_data.operand[2]);
1569 /* If the sum is zero, then replace with a plain move. */
1570 if (offset == 0
1571 && REG_P (SET_DEST (set))
1572 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1574 start_sequence ();
1575 emit_move_insn (SET_DEST (set), new_rtx);
1576 seq = get_insns ();
1577 end_sequence ();
1579 emit_insn_before (seq, insn);
1580 delete_insn (insn);
1581 return;
1584 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1586 /* Using validate_change and apply_change_group here leaves
1587 recog_data in an invalid state. Since we know exactly what
1588 we want to check, do those two by hand. */
1589 if (safe_insn_predicate (insn_code, 1, new_rtx)
1590 && safe_insn_predicate (insn_code, 2, x))
1592 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1593 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1594 any_change = true;
1596 /* Fall through into the regular operand fixup loop in
1597 order to take care of operands other than 1 and 2. */
1601 else
1603 extract_insn (insn);
1604 insn_code = INSN_CODE (insn);
1607 /* In the general case, we expect virtual registers to appear only in
1608 operands, and then only as either bare registers or inside memories. */
1609 for (i = 0; i < recog_data.n_operands; ++i)
1611 x = recog_data.operand[i];
1612 switch (GET_CODE (x))
1614 case MEM:
1616 rtx addr = XEXP (x, 0);
1617 bool changed = false;
1619 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1620 if (!changed)
1621 continue;
1623 start_sequence ();
1624 x = replace_equiv_address (x, addr);
1625 /* It may happen that the address with the virtual reg
1626 was valid (e.g. based on the virtual stack reg, which might
1627 be acceptable to the predicates with all offsets), whereas
1628 the address now isn't anymore, for instance when the address
1629 is still offsetted, but the base reg isn't virtual-stack-reg
1630 anymore. Below we would do a force_reg on the whole operand,
1631 but this insn might actually only accept memory. Hence,
1632 before doing that last resort, try to reload the address into
1633 a register, so this operand stays a MEM. */
1634 if (!safe_insn_predicate (insn_code, i, x))
1636 addr = force_reg (GET_MODE (addr), addr);
1637 x = replace_equiv_address (x, addr);
1639 seq = get_insns ();
1640 end_sequence ();
1641 if (seq)
1642 emit_insn_before (seq, insn);
1644 break;
1646 case REG:
1647 new_rtx = instantiate_new_reg (x, &offset);
1648 if (new_rtx == NULL)
1649 continue;
1650 if (offset == 0)
1651 x = new_rtx;
1652 else
1654 start_sequence ();
1656 /* Careful, special mode predicates may have stuff in
1657 insn_data[insn_code].operand[i].mode that isn't useful
1658 to us for computing a new value. */
1659 /* ??? Recognize address_operand and/or "p" constraints
1660 to see if (plus new offset) is a valid before we put
1661 this through expand_simple_binop. */
1662 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1663 GEN_INT (offset), NULL_RTX,
1664 1, OPTAB_LIB_WIDEN);
1665 seq = get_insns ();
1666 end_sequence ();
1667 emit_insn_before (seq, insn);
1669 break;
1671 case SUBREG:
1672 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1673 if (new_rtx == NULL)
1674 continue;
1675 if (offset != 0)
1677 start_sequence ();
1678 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1679 GEN_INT (offset), NULL_RTX,
1680 1, OPTAB_LIB_WIDEN);
1681 seq = get_insns ();
1682 end_sequence ();
1683 emit_insn_before (seq, insn);
1685 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1686 GET_MODE (new_rtx), SUBREG_BYTE (x));
1687 gcc_assert (x);
1688 break;
1690 default:
1691 continue;
1694 /* At this point, X contains the new value for the operand.
1695 Validate the new value vs the insn predicate. Note that
1696 asm insns will have insn_code -1 here. */
1697 if (!safe_insn_predicate (insn_code, i, x))
1699 start_sequence ();
1700 if (REG_P (x))
1702 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1703 x = copy_to_reg (x);
1705 else
1706 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1707 seq = get_insns ();
1708 end_sequence ();
1709 if (seq)
1710 emit_insn_before (seq, insn);
1713 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1714 any_change = true;
1717 if (any_change)
1719 /* Propagate operand changes into the duplicates. */
1720 for (i = 0; i < recog_data.n_dups; ++i)
1721 *recog_data.dup_loc[i]
1722 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1724 /* Force re-recognition of the instruction for validation. */
1725 INSN_CODE (insn) = -1;
1728 if (asm_noperands (PATTERN (insn)) >= 0)
1730 if (!check_asm_operands (PATTERN (insn)))
1732 error_for_asm (insn, "impossible constraint in %<asm%>");
1733 delete_insn_and_edges (insn);
1736 else
1738 if (recog_memoized (insn) < 0)
1739 fatal_insn_not_found (insn);
1743 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1744 do any instantiation required. */
1746 void
1747 instantiate_decl_rtl (rtx x)
1749 rtx addr;
1751 if (x == 0)
1752 return;
1754 /* If this is a CONCAT, recurse for the pieces. */
1755 if (GET_CODE (x) == CONCAT)
1757 instantiate_decl_rtl (XEXP (x, 0));
1758 instantiate_decl_rtl (XEXP (x, 1));
1759 return;
1762 /* If this is not a MEM, no need to do anything. Similarly if the
1763 address is a constant or a register that is not a virtual register. */
1764 if (!MEM_P (x))
1765 return;
1767 addr = XEXP (x, 0);
1768 if (CONSTANT_P (addr)
1769 || (REG_P (addr)
1770 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1771 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1772 return;
1774 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1777 /* Helper for instantiate_decls called via walk_tree: Process all decls
1778 in the given DECL_VALUE_EXPR. */
1780 static tree
1781 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1783 tree t = *tp;
1784 if (! EXPR_P (t))
1786 *walk_subtrees = 0;
1787 if (DECL_P (t))
1789 if (DECL_RTL_SET_P (t))
1790 instantiate_decl_rtl (DECL_RTL (t));
1791 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1792 && DECL_INCOMING_RTL (t))
1793 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1794 if ((TREE_CODE (t) == VAR_DECL
1795 || TREE_CODE (t) == RESULT_DECL)
1796 && DECL_HAS_VALUE_EXPR_P (t))
1798 tree v = DECL_VALUE_EXPR (t);
1799 walk_tree (&v, instantiate_expr, NULL, NULL);
1803 return NULL;
1806 /* Subroutine of instantiate_decls: Process all decls in the given
1807 BLOCK node and all its subblocks. */
1809 static void
1810 instantiate_decls_1 (tree let)
1812 tree t;
1814 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1816 if (DECL_RTL_SET_P (t))
1817 instantiate_decl_rtl (DECL_RTL (t));
1818 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1820 tree v = DECL_VALUE_EXPR (t);
1821 walk_tree (&v, instantiate_expr, NULL, NULL);
1825 /* Process all subblocks. */
1826 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1827 instantiate_decls_1 (t);
1830 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1831 all virtual registers in their DECL_RTL's. */
1833 static void
1834 instantiate_decls (tree fndecl)
1836 tree decl;
1837 unsigned ix;
1839 /* Process all parameters of the function. */
1840 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1842 instantiate_decl_rtl (DECL_RTL (decl));
1843 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1844 if (DECL_HAS_VALUE_EXPR_P (decl))
1846 tree v = DECL_VALUE_EXPR (decl);
1847 walk_tree (&v, instantiate_expr, NULL, NULL);
1851 if ((decl = DECL_RESULT (fndecl))
1852 && TREE_CODE (decl) == RESULT_DECL)
1854 if (DECL_RTL_SET_P (decl))
1855 instantiate_decl_rtl (DECL_RTL (decl));
1856 if (DECL_HAS_VALUE_EXPR_P (decl))
1858 tree v = DECL_VALUE_EXPR (decl);
1859 walk_tree (&v, instantiate_expr, NULL, NULL);
1863 /* Now process all variables defined in the function or its subblocks. */
1864 instantiate_decls_1 (DECL_INITIAL (fndecl));
1866 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1867 if (DECL_RTL_SET_P (decl))
1868 instantiate_decl_rtl (DECL_RTL (decl));
1869 VEC_free (tree, gc, cfun->local_decls);
1872 /* Pass through the INSNS of function FNDECL and convert virtual register
1873 references to hard register references. */
1875 static unsigned int
1876 instantiate_virtual_regs (void)
1878 rtx insn;
1880 /* Compute the offsets to use for this function. */
1881 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1882 var_offset = STARTING_FRAME_OFFSET;
1883 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1884 out_arg_offset = STACK_POINTER_OFFSET;
1885 #ifdef FRAME_POINTER_CFA_OFFSET
1886 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1887 #else
1888 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1889 #endif
1891 /* Initialize recognition, indicating that volatile is OK. */
1892 init_recog ();
1894 /* Scan through all the insns, instantiating every virtual register still
1895 present. */
1896 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1897 if (INSN_P (insn))
1899 /* These patterns in the instruction stream can never be recognized.
1900 Fortunately, they shouldn't contain virtual registers either. */
1901 if (GET_CODE (PATTERN (insn)) == USE
1902 || GET_CODE (PATTERN (insn)) == CLOBBER
1903 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1904 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1905 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1906 continue;
1907 else if (DEBUG_INSN_P (insn))
1908 for_each_rtx (&INSN_VAR_LOCATION (insn),
1909 instantiate_virtual_regs_in_rtx, NULL);
1910 else
1911 instantiate_virtual_regs_in_insn (insn);
1913 if (INSN_DELETED_P (insn))
1914 continue;
1916 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1918 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1919 if (CALL_P (insn))
1920 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1921 instantiate_virtual_regs_in_rtx, NULL);
1924 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1925 instantiate_decls (current_function_decl);
1927 targetm.instantiate_decls ();
1929 /* Indicate that, from now on, assign_stack_local should use
1930 frame_pointer_rtx. */
1931 virtuals_instantiated = 1;
1933 return 0;
1936 struct rtl_opt_pass pass_instantiate_virtual_regs =
1939 RTL_PASS,
1940 "vregs", /* name */
1941 NULL, /* gate */
1942 instantiate_virtual_regs, /* execute */
1943 NULL, /* sub */
1944 NULL, /* next */
1945 0, /* static_pass_number */
1946 TV_NONE, /* tv_id */
1947 0, /* properties_required */
1948 0, /* properties_provided */
1949 0, /* properties_destroyed */
1950 0, /* todo_flags_start */
1951 0 /* todo_flags_finish */
1956 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1957 This means a type for which function calls must pass an address to the
1958 function or get an address back from the function.
1959 EXP may be a type node or an expression (whose type is tested). */
1962 aggregate_value_p (const_tree exp, const_tree fntype)
1964 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1965 int i, regno, nregs;
1966 rtx reg;
1968 if (fntype)
1969 switch (TREE_CODE (fntype))
1971 case CALL_EXPR:
1973 tree fndecl = get_callee_fndecl (fntype);
1974 fntype = (fndecl
1975 ? TREE_TYPE (fndecl)
1976 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1978 break;
1979 case FUNCTION_DECL:
1980 fntype = TREE_TYPE (fntype);
1981 break;
1982 case FUNCTION_TYPE:
1983 case METHOD_TYPE:
1984 break;
1985 case IDENTIFIER_NODE:
1986 fntype = NULL_TREE;
1987 break;
1988 default:
1989 /* We don't expect other tree types here. */
1990 gcc_unreachable ();
1993 if (VOID_TYPE_P (type))
1994 return 0;
1996 /* If a record should be passed the same as its first (and only) member
1997 don't pass it as an aggregate. */
1998 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
1999 return aggregate_value_p (first_field (type), fntype);
2001 /* If the front end has decided that this needs to be passed by
2002 reference, do so. */
2003 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2004 && DECL_BY_REFERENCE (exp))
2005 return 1;
2007 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2008 if (fntype && TREE_ADDRESSABLE (fntype))
2009 return 1;
2011 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2012 and thus can't be returned in registers. */
2013 if (TREE_ADDRESSABLE (type))
2014 return 1;
2016 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2017 return 1;
2019 if (targetm.calls.return_in_memory (type, fntype))
2020 return 1;
2022 /* Make sure we have suitable call-clobbered regs to return
2023 the value in; if not, we must return it in memory. */
2024 reg = hard_function_value (type, 0, fntype, 0);
2026 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2027 it is OK. */
2028 if (!REG_P (reg))
2029 return 0;
2031 regno = REGNO (reg);
2032 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2033 for (i = 0; i < nregs; i++)
2034 if (! call_used_regs[regno + i])
2035 return 1;
2037 return 0;
2040 /* Return true if we should assign DECL a pseudo register; false if it
2041 should live on the local stack. */
2043 bool
2044 use_register_for_decl (const_tree decl)
2046 if (!targetm.calls.allocate_stack_slots_for_args())
2047 return true;
2049 /* Honor volatile. */
2050 if (TREE_SIDE_EFFECTS (decl))
2051 return false;
2053 /* Honor addressability. */
2054 if (TREE_ADDRESSABLE (decl))
2055 return false;
2057 /* Only register-like things go in registers. */
2058 if (DECL_MODE (decl) == BLKmode)
2059 return false;
2061 /* If -ffloat-store specified, don't put explicit float variables
2062 into registers. */
2063 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2064 propagates values across these stores, and it probably shouldn't. */
2065 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2066 return false;
2068 /* If we're not interested in tracking debugging information for
2069 this decl, then we can certainly put it in a register. */
2070 if (DECL_IGNORED_P (decl))
2071 return true;
2073 if (optimize)
2074 return true;
2076 if (!DECL_REGISTER (decl))
2077 return false;
2079 switch (TREE_CODE (TREE_TYPE (decl)))
2081 case RECORD_TYPE:
2082 case UNION_TYPE:
2083 case QUAL_UNION_TYPE:
2084 /* When not optimizing, disregard register keyword for variables with
2085 types containing methods, otherwise the methods won't be callable
2086 from the debugger. */
2087 if (TYPE_METHODS (TREE_TYPE (decl)))
2088 return false;
2089 break;
2090 default:
2091 break;
2094 return true;
2097 /* Return true if TYPE should be passed by invisible reference. */
2099 bool
2100 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2101 tree type, bool named_arg)
2103 if (type)
2105 /* If this type contains non-trivial constructors, then it is
2106 forbidden for the middle-end to create any new copies. */
2107 if (TREE_ADDRESSABLE (type))
2108 return true;
2110 /* GCC post 3.4 passes *all* variable sized types by reference. */
2111 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2112 return true;
2114 /* If a record type should be passed the same as its first (and only)
2115 member, use the type and mode of that member. */
2116 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2118 type = TREE_TYPE (first_field (type));
2119 mode = TYPE_MODE (type);
2123 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2124 type, named_arg);
2127 /* Return true if TYPE, which is passed by reference, should be callee
2128 copied instead of caller copied. */
2130 bool
2131 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2132 tree type, bool named_arg)
2134 if (type && TREE_ADDRESSABLE (type))
2135 return false;
2136 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2137 named_arg);
2140 /* Structures to communicate between the subroutines of assign_parms.
2141 The first holds data persistent across all parameters, the second
2142 is cleared out for each parameter. */
2144 struct assign_parm_data_all
2146 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2147 should become a job of the target or otherwise encapsulated. */
2148 CUMULATIVE_ARGS args_so_far_v;
2149 cumulative_args_t args_so_far;
2150 struct args_size stack_args_size;
2151 tree function_result_decl;
2152 tree orig_fnargs;
2153 rtx first_conversion_insn;
2154 rtx last_conversion_insn;
2155 HOST_WIDE_INT pretend_args_size;
2156 HOST_WIDE_INT extra_pretend_bytes;
2157 int reg_parm_stack_space;
2160 struct assign_parm_data_one
2162 tree nominal_type;
2163 tree passed_type;
2164 rtx entry_parm;
2165 rtx stack_parm;
2166 enum machine_mode nominal_mode;
2167 enum machine_mode passed_mode;
2168 enum machine_mode promoted_mode;
2169 struct locate_and_pad_arg_data locate;
2170 int partial;
2171 BOOL_BITFIELD named_arg : 1;
2172 BOOL_BITFIELD passed_pointer : 1;
2173 BOOL_BITFIELD on_stack : 1;
2174 BOOL_BITFIELD loaded_in_reg : 1;
2177 /* A subroutine of assign_parms. Initialize ALL. */
2179 static void
2180 assign_parms_initialize_all (struct assign_parm_data_all *all)
2182 tree fntype ATTRIBUTE_UNUSED;
2184 memset (all, 0, sizeof (*all));
2186 fntype = TREE_TYPE (current_function_decl);
2188 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2189 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2190 #else
2191 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2192 current_function_decl, -1);
2193 #endif
2194 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2196 #ifdef REG_PARM_STACK_SPACE
2197 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2198 #endif
2201 /* If ARGS contains entries with complex types, split the entry into two
2202 entries of the component type. Return a new list of substitutions are
2203 needed, else the old list. */
2205 static void
2206 split_complex_args (VEC(tree, heap) **args)
2208 unsigned i;
2209 tree p;
2211 FOR_EACH_VEC_ELT (tree, *args, i, p)
2213 tree type = TREE_TYPE (p);
2214 if (TREE_CODE (type) == COMPLEX_TYPE
2215 && targetm.calls.split_complex_arg (type))
2217 tree decl;
2218 tree subtype = TREE_TYPE (type);
2219 bool addressable = TREE_ADDRESSABLE (p);
2221 /* Rewrite the PARM_DECL's type with its component. */
2222 p = copy_node (p);
2223 TREE_TYPE (p) = subtype;
2224 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2225 DECL_MODE (p) = VOIDmode;
2226 DECL_SIZE (p) = NULL;
2227 DECL_SIZE_UNIT (p) = NULL;
2228 /* If this arg must go in memory, put it in a pseudo here.
2229 We can't allow it to go in memory as per normal parms,
2230 because the usual place might not have the imag part
2231 adjacent to the real part. */
2232 DECL_ARTIFICIAL (p) = addressable;
2233 DECL_IGNORED_P (p) = addressable;
2234 TREE_ADDRESSABLE (p) = 0;
2235 layout_decl (p, 0);
2236 VEC_replace (tree, *args, i, p);
2238 /* Build a second synthetic decl. */
2239 decl = build_decl (EXPR_LOCATION (p),
2240 PARM_DECL, NULL_TREE, subtype);
2241 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2242 DECL_ARTIFICIAL (decl) = addressable;
2243 DECL_IGNORED_P (decl) = addressable;
2244 layout_decl (decl, 0);
2245 VEC_safe_insert (tree, heap, *args, ++i, decl);
2250 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2251 the hidden struct return argument, and (abi willing) complex args.
2252 Return the new parameter list. */
2254 static VEC(tree, heap) *
2255 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2257 tree fndecl = current_function_decl;
2258 tree fntype = TREE_TYPE (fndecl);
2259 VEC(tree, heap) *fnargs = NULL;
2260 tree arg;
2262 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2263 VEC_safe_push (tree, heap, fnargs, arg);
2265 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2267 /* If struct value address is treated as the first argument, make it so. */
2268 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2269 && ! cfun->returns_pcc_struct
2270 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2272 tree type = build_pointer_type (TREE_TYPE (fntype));
2273 tree decl;
2275 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2276 PARM_DECL, get_identifier (".result_ptr"), type);
2277 DECL_ARG_TYPE (decl) = type;
2278 DECL_ARTIFICIAL (decl) = 1;
2279 DECL_NAMELESS (decl) = 1;
2280 TREE_CONSTANT (decl) = 1;
2282 DECL_CHAIN (decl) = all->orig_fnargs;
2283 all->orig_fnargs = decl;
2284 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2286 all->function_result_decl = decl;
2289 /* If the target wants to split complex arguments into scalars, do so. */
2290 if (targetm.calls.split_complex_arg)
2291 split_complex_args (&fnargs);
2293 return fnargs;
2296 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2297 data for the parameter. Incorporate ABI specifics such as pass-by-
2298 reference and type promotion. */
2300 static void
2301 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2302 struct assign_parm_data_one *data)
2304 tree nominal_type, passed_type;
2305 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2306 int unsignedp;
2308 memset (data, 0, sizeof (*data));
2310 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2311 if (!cfun->stdarg)
2312 data->named_arg = 1; /* No variadic parms. */
2313 else if (DECL_CHAIN (parm))
2314 data->named_arg = 1; /* Not the last non-variadic parm. */
2315 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2316 data->named_arg = 1; /* Only variadic ones are unnamed. */
2317 else
2318 data->named_arg = 0; /* Treat as variadic. */
2320 nominal_type = TREE_TYPE (parm);
2321 passed_type = DECL_ARG_TYPE (parm);
2323 /* Look out for errors propagating this far. Also, if the parameter's
2324 type is void then its value doesn't matter. */
2325 if (TREE_TYPE (parm) == error_mark_node
2326 /* This can happen after weird syntax errors
2327 or if an enum type is defined among the parms. */
2328 || TREE_CODE (parm) != PARM_DECL
2329 || passed_type == NULL
2330 || VOID_TYPE_P (nominal_type))
2332 nominal_type = passed_type = void_type_node;
2333 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2334 goto egress;
2337 /* Find mode of arg as it is passed, and mode of arg as it should be
2338 during execution of this function. */
2339 passed_mode = TYPE_MODE (passed_type);
2340 nominal_mode = TYPE_MODE (nominal_type);
2342 /* If the parm is to be passed as a transparent union or record, use the
2343 type of the first field for the tests below. We have already verified
2344 that the modes are the same. */
2345 if ((TREE_CODE (passed_type) == UNION_TYPE
2346 || TREE_CODE (passed_type) == RECORD_TYPE)
2347 && TYPE_TRANSPARENT_AGGR (passed_type))
2348 passed_type = TREE_TYPE (first_field (passed_type));
2350 /* See if this arg was passed by invisible reference. */
2351 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2352 passed_type, data->named_arg))
2354 passed_type = nominal_type = build_pointer_type (passed_type);
2355 data->passed_pointer = true;
2356 passed_mode = nominal_mode = Pmode;
2359 /* Find mode as it is passed by the ABI. */
2360 unsignedp = TYPE_UNSIGNED (passed_type);
2361 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2362 TREE_TYPE (current_function_decl), 0);
2364 egress:
2365 data->nominal_type = nominal_type;
2366 data->passed_type = passed_type;
2367 data->nominal_mode = nominal_mode;
2368 data->passed_mode = passed_mode;
2369 data->promoted_mode = promoted_mode;
2372 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2374 static void
2375 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2376 struct assign_parm_data_one *data, bool no_rtl)
2378 int varargs_pretend_bytes = 0;
2380 targetm.calls.setup_incoming_varargs (all->args_so_far,
2381 data->promoted_mode,
2382 data->passed_type,
2383 &varargs_pretend_bytes, no_rtl);
2385 /* If the back-end has requested extra stack space, record how much is
2386 needed. Do not change pretend_args_size otherwise since it may be
2387 nonzero from an earlier partial argument. */
2388 if (varargs_pretend_bytes > 0)
2389 all->pretend_args_size = varargs_pretend_bytes;
2392 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2393 the incoming location of the current parameter. */
2395 static void
2396 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2397 struct assign_parm_data_one *data)
2399 HOST_WIDE_INT pretend_bytes = 0;
2400 rtx entry_parm;
2401 bool in_regs;
2403 if (data->promoted_mode == VOIDmode)
2405 data->entry_parm = data->stack_parm = const0_rtx;
2406 return;
2409 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2410 data->promoted_mode,
2411 data->passed_type,
2412 data->named_arg);
2414 if (entry_parm == 0)
2415 data->promoted_mode = data->passed_mode;
2417 /* Determine parm's home in the stack, in case it arrives in the stack
2418 or we should pretend it did. Compute the stack position and rtx where
2419 the argument arrives and its size.
2421 There is one complexity here: If this was a parameter that would
2422 have been passed in registers, but wasn't only because it is
2423 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2424 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2425 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2426 as it was the previous time. */
2427 in_regs = entry_parm != 0;
2428 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2429 in_regs = true;
2430 #endif
2431 if (!in_regs && !data->named_arg)
2433 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2435 rtx tem;
2436 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2437 data->promoted_mode,
2438 data->passed_type, true);
2439 in_regs = tem != NULL;
2443 /* If this parameter was passed both in registers and in the stack, use
2444 the copy on the stack. */
2445 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2446 data->passed_type))
2447 entry_parm = 0;
2449 if (entry_parm)
2451 int partial;
2453 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2454 data->promoted_mode,
2455 data->passed_type,
2456 data->named_arg);
2457 data->partial = partial;
2459 /* The caller might already have allocated stack space for the
2460 register parameters. */
2461 if (partial != 0 && all->reg_parm_stack_space == 0)
2463 /* Part of this argument is passed in registers and part
2464 is passed on the stack. Ask the prologue code to extend
2465 the stack part so that we can recreate the full value.
2467 PRETEND_BYTES is the size of the registers we need to store.
2468 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2469 stack space that the prologue should allocate.
2471 Internally, gcc assumes that the argument pointer is aligned
2472 to STACK_BOUNDARY bits. This is used both for alignment
2473 optimizations (see init_emit) and to locate arguments that are
2474 aligned to more than PARM_BOUNDARY bits. We must preserve this
2475 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2476 a stack boundary. */
2478 /* We assume at most one partial arg, and it must be the first
2479 argument on the stack. */
2480 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2482 pretend_bytes = partial;
2483 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2485 /* We want to align relative to the actual stack pointer, so
2486 don't include this in the stack size until later. */
2487 all->extra_pretend_bytes = all->pretend_args_size;
2491 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2492 entry_parm ? data->partial : 0, current_function_decl,
2493 &all->stack_args_size, &data->locate);
2495 /* Update parm_stack_boundary if this parameter is passed in the
2496 stack. */
2497 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2498 crtl->parm_stack_boundary = data->locate.boundary;
2500 /* Adjust offsets to include the pretend args. */
2501 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2502 data->locate.slot_offset.constant += pretend_bytes;
2503 data->locate.offset.constant += pretend_bytes;
2505 data->entry_parm = entry_parm;
2508 /* A subroutine of assign_parms. If there is actually space on the stack
2509 for this parm, count it in stack_args_size and return true. */
2511 static bool
2512 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2513 struct assign_parm_data_one *data)
2515 /* Trivially true if we've no incoming register. */
2516 if (data->entry_parm == NULL)
2518 /* Also true if we're partially in registers and partially not,
2519 since we've arranged to drop the entire argument on the stack. */
2520 else if (data->partial != 0)
2522 /* Also true if the target says that it's passed in both registers
2523 and on the stack. */
2524 else if (GET_CODE (data->entry_parm) == PARALLEL
2525 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2527 /* Also true if the target says that there's stack allocated for
2528 all register parameters. */
2529 else if (all->reg_parm_stack_space > 0)
2531 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2532 else
2533 return false;
2535 all->stack_args_size.constant += data->locate.size.constant;
2536 if (data->locate.size.var)
2537 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2539 return true;
2542 /* A subroutine of assign_parms. Given that this parameter is allocated
2543 stack space by the ABI, find it. */
2545 static void
2546 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2548 rtx offset_rtx, stack_parm;
2549 unsigned int align, boundary;
2551 /* If we're passing this arg using a reg, make its stack home the
2552 aligned stack slot. */
2553 if (data->entry_parm)
2554 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2555 else
2556 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2558 stack_parm = crtl->args.internal_arg_pointer;
2559 if (offset_rtx != const0_rtx)
2560 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2561 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2563 if (!data->passed_pointer)
2565 set_mem_attributes (stack_parm, parm, 1);
2566 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2567 while promoted mode's size is needed. */
2568 if (data->promoted_mode != BLKmode
2569 && data->promoted_mode != DECL_MODE (parm))
2571 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2572 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2574 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2575 data->promoted_mode);
2576 if (offset)
2577 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2582 boundary = data->locate.boundary;
2583 align = BITS_PER_UNIT;
2585 /* If we're padding upward, we know that the alignment of the slot
2586 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2587 intentionally forcing upward padding. Otherwise we have to come
2588 up with a guess at the alignment based on OFFSET_RTX. */
2589 if (data->locate.where_pad != downward || data->entry_parm)
2590 align = boundary;
2591 else if (CONST_INT_P (offset_rtx))
2593 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2594 align = align & -align;
2596 set_mem_align (stack_parm, align);
2598 if (data->entry_parm)
2599 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2601 data->stack_parm = stack_parm;
2604 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2605 always valid and contiguous. */
2607 static void
2608 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2610 rtx entry_parm = data->entry_parm;
2611 rtx stack_parm = data->stack_parm;
2613 /* If this parm was passed part in regs and part in memory, pretend it
2614 arrived entirely in memory by pushing the register-part onto the stack.
2615 In the special case of a DImode or DFmode that is split, we could put
2616 it together in a pseudoreg directly, but for now that's not worth
2617 bothering with. */
2618 if (data->partial != 0)
2620 /* Handle calls that pass values in multiple non-contiguous
2621 locations. The Irix 6 ABI has examples of this. */
2622 if (GET_CODE (entry_parm) == PARALLEL)
2623 emit_group_store (validize_mem (stack_parm), entry_parm,
2624 data->passed_type,
2625 int_size_in_bytes (data->passed_type));
2626 else
2628 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2629 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2630 data->partial / UNITS_PER_WORD);
2633 entry_parm = stack_parm;
2636 /* If we didn't decide this parm came in a register, by default it came
2637 on the stack. */
2638 else if (entry_parm == NULL)
2639 entry_parm = stack_parm;
2641 /* When an argument is passed in multiple locations, we can't make use
2642 of this information, but we can save some copying if the whole argument
2643 is passed in a single register. */
2644 else if (GET_CODE (entry_parm) == PARALLEL
2645 && data->nominal_mode != BLKmode
2646 && data->passed_mode != BLKmode)
2648 size_t i, len = XVECLEN (entry_parm, 0);
2650 for (i = 0; i < len; i++)
2651 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2652 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2653 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2654 == data->passed_mode)
2655 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2657 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2658 break;
2662 data->entry_parm = entry_parm;
2665 /* A subroutine of assign_parms. Reconstitute any values which were
2666 passed in multiple registers and would fit in a single register. */
2668 static void
2669 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2671 rtx entry_parm = data->entry_parm;
2673 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2674 This can be done with register operations rather than on the
2675 stack, even if we will store the reconstituted parameter on the
2676 stack later. */
2677 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2679 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2680 emit_group_store (parmreg, entry_parm, data->passed_type,
2681 GET_MODE_SIZE (GET_MODE (entry_parm)));
2682 entry_parm = parmreg;
2685 data->entry_parm = entry_parm;
2688 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2689 always valid and properly aligned. */
2691 static void
2692 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2694 rtx stack_parm = data->stack_parm;
2696 /* If we can't trust the parm stack slot to be aligned enough for its
2697 ultimate type, don't use that slot after entry. We'll make another
2698 stack slot, if we need one. */
2699 if (stack_parm
2700 && ((STRICT_ALIGNMENT
2701 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2702 || (data->nominal_type
2703 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2704 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2705 stack_parm = NULL;
2707 /* If parm was passed in memory, and we need to convert it on entry,
2708 don't store it back in that same slot. */
2709 else if (data->entry_parm == stack_parm
2710 && data->nominal_mode != BLKmode
2711 && data->nominal_mode != data->passed_mode)
2712 stack_parm = NULL;
2714 /* If stack protection is in effect for this function, don't leave any
2715 pointers in their passed stack slots. */
2716 else if (crtl->stack_protect_guard
2717 && (flag_stack_protect == 2
2718 || data->passed_pointer
2719 || POINTER_TYPE_P (data->nominal_type)))
2720 stack_parm = NULL;
2722 data->stack_parm = stack_parm;
2725 /* A subroutine of assign_parms. Return true if the current parameter
2726 should be stored as a BLKmode in the current frame. */
2728 static bool
2729 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2731 if (data->nominal_mode == BLKmode)
2732 return true;
2733 if (GET_MODE (data->entry_parm) == BLKmode)
2734 return true;
2736 #ifdef BLOCK_REG_PADDING
2737 /* Only assign_parm_setup_block knows how to deal with register arguments
2738 that are padded at the least significant end. */
2739 if (REG_P (data->entry_parm)
2740 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2741 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2742 == (BYTES_BIG_ENDIAN ? upward : downward)))
2743 return true;
2744 #endif
2746 return false;
2749 /* A subroutine of assign_parms. Arrange for the parameter to be
2750 present and valid in DATA->STACK_RTL. */
2752 static void
2753 assign_parm_setup_block (struct assign_parm_data_all *all,
2754 tree parm, struct assign_parm_data_one *data)
2756 rtx entry_parm = data->entry_parm;
2757 rtx stack_parm = data->stack_parm;
2758 HOST_WIDE_INT size;
2759 HOST_WIDE_INT size_stored;
2761 if (GET_CODE (entry_parm) == PARALLEL)
2762 entry_parm = emit_group_move_into_temps (entry_parm);
2764 size = int_size_in_bytes (data->passed_type);
2765 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2766 if (stack_parm == 0)
2768 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2769 stack_parm = assign_stack_local (BLKmode, size_stored,
2770 DECL_ALIGN (parm));
2771 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2772 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2773 set_mem_attributes (stack_parm, parm, 1);
2776 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2777 calls that pass values in multiple non-contiguous locations. */
2778 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2780 rtx mem;
2782 /* Note that we will be storing an integral number of words.
2783 So we have to be careful to ensure that we allocate an
2784 integral number of words. We do this above when we call
2785 assign_stack_local if space was not allocated in the argument
2786 list. If it was, this will not work if PARM_BOUNDARY is not
2787 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2788 if it becomes a problem. Exception is when BLKmode arrives
2789 with arguments not conforming to word_mode. */
2791 if (data->stack_parm == 0)
2793 else if (GET_CODE (entry_parm) == PARALLEL)
2795 else
2796 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2798 mem = validize_mem (stack_parm);
2800 /* Handle values in multiple non-contiguous locations. */
2801 if (GET_CODE (entry_parm) == PARALLEL)
2803 push_to_sequence2 (all->first_conversion_insn,
2804 all->last_conversion_insn);
2805 emit_group_store (mem, entry_parm, data->passed_type, size);
2806 all->first_conversion_insn = get_insns ();
2807 all->last_conversion_insn = get_last_insn ();
2808 end_sequence ();
2811 else if (size == 0)
2814 /* If SIZE is that of a mode no bigger than a word, just use
2815 that mode's store operation. */
2816 else if (size <= UNITS_PER_WORD)
2818 enum machine_mode mode
2819 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2821 if (mode != BLKmode
2822 #ifdef BLOCK_REG_PADDING
2823 && (size == UNITS_PER_WORD
2824 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2825 != (BYTES_BIG_ENDIAN ? upward : downward)))
2826 #endif
2829 rtx reg;
2831 /* We are really truncating a word_mode value containing
2832 SIZE bytes into a value of mode MODE. If such an
2833 operation requires no actual instructions, we can refer
2834 to the value directly in mode MODE, otherwise we must
2835 start with the register in word_mode and explicitly
2836 convert it. */
2837 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2838 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2839 else
2841 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2842 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2844 emit_move_insn (change_address (mem, mode, 0), reg);
2847 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2848 machine must be aligned to the left before storing
2849 to memory. Note that the previous test doesn't
2850 handle all cases (e.g. SIZE == 3). */
2851 else if (size != UNITS_PER_WORD
2852 #ifdef BLOCK_REG_PADDING
2853 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2854 == downward)
2855 #else
2856 && BYTES_BIG_ENDIAN
2857 #endif
2860 rtx tem, x;
2861 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2862 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2864 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2865 tem = change_address (mem, word_mode, 0);
2866 emit_move_insn (tem, x);
2868 else
2869 move_block_from_reg (REGNO (entry_parm), mem,
2870 size_stored / UNITS_PER_WORD);
2872 else
2873 move_block_from_reg (REGNO (entry_parm), mem,
2874 size_stored / UNITS_PER_WORD);
2876 else if (data->stack_parm == 0)
2878 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2879 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2880 BLOCK_OP_NORMAL);
2881 all->first_conversion_insn = get_insns ();
2882 all->last_conversion_insn = get_last_insn ();
2883 end_sequence ();
2886 data->stack_parm = stack_parm;
2887 SET_DECL_RTL (parm, stack_parm);
2890 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2891 parameter. Get it there. Perform all ABI specified conversions. */
2893 static void
2894 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2895 struct assign_parm_data_one *data)
2897 rtx parmreg, validated_mem;
2898 rtx equiv_stack_parm;
2899 enum machine_mode promoted_nominal_mode;
2900 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2901 bool did_conversion = false;
2902 bool need_conversion, moved;
2904 /* Store the parm in a pseudoregister during the function, but we may
2905 need to do it in a wider mode. Using 2 here makes the result
2906 consistent with promote_decl_mode and thus expand_expr_real_1. */
2907 promoted_nominal_mode
2908 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2909 TREE_TYPE (current_function_decl), 2);
2911 parmreg = gen_reg_rtx (promoted_nominal_mode);
2913 if (!DECL_ARTIFICIAL (parm))
2914 mark_user_reg (parmreg);
2916 /* If this was an item that we received a pointer to,
2917 set DECL_RTL appropriately. */
2918 if (data->passed_pointer)
2920 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2921 set_mem_attributes (x, parm, 1);
2922 SET_DECL_RTL (parm, x);
2924 else
2925 SET_DECL_RTL (parm, parmreg);
2927 assign_parm_remove_parallels (data);
2929 /* Copy the value into the register, thus bridging between
2930 assign_parm_find_data_types and expand_expr_real_1. */
2932 equiv_stack_parm = data->stack_parm;
2933 validated_mem = validize_mem (data->entry_parm);
2935 need_conversion = (data->nominal_mode != data->passed_mode
2936 || promoted_nominal_mode != data->promoted_mode);
2937 moved = false;
2939 if (need_conversion
2940 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2941 && data->nominal_mode == data->passed_mode
2942 && data->nominal_mode == GET_MODE (data->entry_parm))
2944 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2945 mode, by the caller. We now have to convert it to
2946 NOMINAL_MODE, if different. However, PARMREG may be in
2947 a different mode than NOMINAL_MODE if it is being stored
2948 promoted.
2950 If ENTRY_PARM is a hard register, it might be in a register
2951 not valid for operating in its mode (e.g., an odd-numbered
2952 register for a DFmode). In that case, moves are the only
2953 thing valid, so we can't do a convert from there. This
2954 occurs when the calling sequence allow such misaligned
2955 usages.
2957 In addition, the conversion may involve a call, which could
2958 clobber parameters which haven't been copied to pseudo
2959 registers yet.
2961 First, we try to emit an insn which performs the necessary
2962 conversion. We verify that this insn does not clobber any
2963 hard registers. */
2965 enum insn_code icode;
2966 rtx op0, op1;
2968 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2969 unsignedp);
2971 op0 = parmreg;
2972 op1 = validated_mem;
2973 if (icode != CODE_FOR_nothing
2974 && insn_operand_matches (icode, 0, op0)
2975 && insn_operand_matches (icode, 1, op1))
2977 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
2978 rtx insn, insns;
2979 HARD_REG_SET hardregs;
2981 start_sequence ();
2982 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
2983 data->passed_mode, unsignedp);
2984 emit_insn (insn);
2985 insns = get_insns ();
2987 moved = true;
2988 CLEAR_HARD_REG_SET (hardregs);
2989 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
2991 if (INSN_P (insn))
2992 note_stores (PATTERN (insn), record_hard_reg_sets,
2993 &hardregs);
2994 if (!hard_reg_set_empty_p (hardregs))
2995 moved = false;
2998 end_sequence ();
3000 if (moved)
3002 emit_insn (insns);
3003 if (equiv_stack_parm != NULL_RTX)
3004 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3005 equiv_stack_parm);
3010 if (moved)
3011 /* Nothing to do. */
3013 else if (need_conversion)
3015 /* We did not have an insn to convert directly, or the sequence
3016 generated appeared unsafe. We must first copy the parm to a
3017 pseudo reg, and save the conversion until after all
3018 parameters have been moved. */
3020 int save_tree_used;
3021 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3023 emit_move_insn (tempreg, validated_mem);
3025 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3026 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3028 if (GET_CODE (tempreg) == SUBREG
3029 && GET_MODE (tempreg) == data->nominal_mode
3030 && REG_P (SUBREG_REG (tempreg))
3031 && data->nominal_mode == data->passed_mode
3032 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3033 && GET_MODE_SIZE (GET_MODE (tempreg))
3034 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3036 /* The argument is already sign/zero extended, so note it
3037 into the subreg. */
3038 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3039 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3042 /* TREE_USED gets set erroneously during expand_assignment. */
3043 save_tree_used = TREE_USED (parm);
3044 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3045 TREE_USED (parm) = save_tree_used;
3046 all->first_conversion_insn = get_insns ();
3047 all->last_conversion_insn = get_last_insn ();
3048 end_sequence ();
3050 did_conversion = true;
3052 else
3053 emit_move_insn (parmreg, validated_mem);
3055 /* If we were passed a pointer but the actual value can safely live
3056 in a register, put it in one. */
3057 if (data->passed_pointer
3058 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3059 /* If by-reference argument was promoted, demote it. */
3060 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3061 || use_register_for_decl (parm)))
3063 /* We can't use nominal_mode, because it will have been set to
3064 Pmode above. We must use the actual mode of the parm. */
3065 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3066 mark_user_reg (parmreg);
3068 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3070 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3071 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3073 push_to_sequence2 (all->first_conversion_insn,
3074 all->last_conversion_insn);
3075 emit_move_insn (tempreg, DECL_RTL (parm));
3076 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3077 emit_move_insn (parmreg, tempreg);
3078 all->first_conversion_insn = get_insns ();
3079 all->last_conversion_insn = get_last_insn ();
3080 end_sequence ();
3082 did_conversion = true;
3084 else
3085 emit_move_insn (parmreg, DECL_RTL (parm));
3087 SET_DECL_RTL (parm, parmreg);
3089 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3090 now the parm. */
3091 data->stack_parm = NULL;
3094 /* Mark the register as eliminable if we did no conversion and it was
3095 copied from memory at a fixed offset, and the arg pointer was not
3096 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3097 offset formed an invalid address, such memory-equivalences as we
3098 make here would screw up life analysis for it. */
3099 if (data->nominal_mode == data->passed_mode
3100 && !did_conversion
3101 && data->stack_parm != 0
3102 && MEM_P (data->stack_parm)
3103 && data->locate.offset.var == 0
3104 && reg_mentioned_p (virtual_incoming_args_rtx,
3105 XEXP (data->stack_parm, 0)))
3107 rtx linsn = get_last_insn ();
3108 rtx sinsn, set;
3110 /* Mark complex types separately. */
3111 if (GET_CODE (parmreg) == CONCAT)
3113 enum machine_mode submode
3114 = GET_MODE_INNER (GET_MODE (parmreg));
3115 int regnor = REGNO (XEXP (parmreg, 0));
3116 int regnoi = REGNO (XEXP (parmreg, 1));
3117 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3118 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3119 GET_MODE_SIZE (submode));
3121 /* Scan backwards for the set of the real and
3122 imaginary parts. */
3123 for (sinsn = linsn; sinsn != 0;
3124 sinsn = prev_nonnote_insn (sinsn))
3126 set = single_set (sinsn);
3127 if (set == 0)
3128 continue;
3130 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3131 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3132 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3133 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3136 else
3137 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3140 /* For pointer data type, suggest pointer register. */
3141 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3142 mark_reg_pointer (parmreg,
3143 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3146 /* A subroutine of assign_parms. Allocate stack space to hold the current
3147 parameter. Get it there. Perform all ABI specified conversions. */
3149 static void
3150 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3151 struct assign_parm_data_one *data)
3153 /* Value must be stored in the stack slot STACK_PARM during function
3154 execution. */
3155 bool to_conversion = false;
3157 assign_parm_remove_parallels (data);
3159 if (data->promoted_mode != data->nominal_mode)
3161 /* Conversion is required. */
3162 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3164 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3166 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3167 to_conversion = true;
3169 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3170 TYPE_UNSIGNED (TREE_TYPE (parm)));
3172 if (data->stack_parm)
3174 int offset = subreg_lowpart_offset (data->nominal_mode,
3175 GET_MODE (data->stack_parm));
3176 /* ??? This may need a big-endian conversion on sparc64. */
3177 data->stack_parm
3178 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3179 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3180 set_mem_offset (data->stack_parm,
3181 MEM_OFFSET (data->stack_parm) + offset);
3185 if (data->entry_parm != data->stack_parm)
3187 rtx src, dest;
3189 if (data->stack_parm == 0)
3191 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3192 GET_MODE (data->entry_parm),
3193 TYPE_ALIGN (data->passed_type));
3194 data->stack_parm
3195 = assign_stack_local (GET_MODE (data->entry_parm),
3196 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3197 align);
3198 set_mem_attributes (data->stack_parm, parm, 1);
3201 dest = validize_mem (data->stack_parm);
3202 src = validize_mem (data->entry_parm);
3204 if (MEM_P (src))
3206 /* Use a block move to handle potentially misaligned entry_parm. */
3207 if (!to_conversion)
3208 push_to_sequence2 (all->first_conversion_insn,
3209 all->last_conversion_insn);
3210 to_conversion = true;
3212 emit_block_move (dest, src,
3213 GEN_INT (int_size_in_bytes (data->passed_type)),
3214 BLOCK_OP_NORMAL);
3216 else
3217 emit_move_insn (dest, src);
3220 if (to_conversion)
3222 all->first_conversion_insn = get_insns ();
3223 all->last_conversion_insn = get_last_insn ();
3224 end_sequence ();
3227 SET_DECL_RTL (parm, data->stack_parm);
3230 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3231 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3233 static void
3234 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3235 VEC(tree, heap) *fnargs)
3237 tree parm;
3238 tree orig_fnargs = all->orig_fnargs;
3239 unsigned i = 0;
3241 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3243 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3244 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3246 rtx tmp, real, imag;
3247 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3249 real = DECL_RTL (VEC_index (tree, fnargs, i));
3250 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3251 if (inner != GET_MODE (real))
3253 real = gen_lowpart_SUBREG (inner, real);
3254 imag = gen_lowpart_SUBREG (inner, imag);
3257 if (TREE_ADDRESSABLE (parm))
3259 rtx rmem, imem;
3260 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3261 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3262 DECL_MODE (parm),
3263 TYPE_ALIGN (TREE_TYPE (parm)));
3265 /* split_complex_arg put the real and imag parts in
3266 pseudos. Move them to memory. */
3267 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3268 set_mem_attributes (tmp, parm, 1);
3269 rmem = adjust_address_nv (tmp, inner, 0);
3270 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3271 push_to_sequence2 (all->first_conversion_insn,
3272 all->last_conversion_insn);
3273 emit_move_insn (rmem, real);
3274 emit_move_insn (imem, imag);
3275 all->first_conversion_insn = get_insns ();
3276 all->last_conversion_insn = get_last_insn ();
3277 end_sequence ();
3279 else
3280 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3281 SET_DECL_RTL (parm, tmp);
3283 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3284 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3285 if (inner != GET_MODE (real))
3287 real = gen_lowpart_SUBREG (inner, real);
3288 imag = gen_lowpart_SUBREG (inner, imag);
3290 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3291 set_decl_incoming_rtl (parm, tmp, false);
3292 i++;
3297 /* Assign RTL expressions to the function's parameters. This may involve
3298 copying them into registers and using those registers as the DECL_RTL. */
3300 static void
3301 assign_parms (tree fndecl)
3303 struct assign_parm_data_all all;
3304 tree parm;
3305 VEC(tree, heap) *fnargs;
3306 unsigned i;
3308 crtl->args.internal_arg_pointer
3309 = targetm.calls.internal_arg_pointer ();
3311 assign_parms_initialize_all (&all);
3312 fnargs = assign_parms_augmented_arg_list (&all);
3314 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3316 struct assign_parm_data_one data;
3318 /* Extract the type of PARM; adjust it according to ABI. */
3319 assign_parm_find_data_types (&all, parm, &data);
3321 /* Early out for errors and void parameters. */
3322 if (data.passed_mode == VOIDmode)
3324 SET_DECL_RTL (parm, const0_rtx);
3325 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3326 continue;
3329 /* Estimate stack alignment from parameter alignment. */
3330 if (SUPPORTS_STACK_ALIGNMENT)
3332 unsigned int align
3333 = targetm.calls.function_arg_boundary (data.promoted_mode,
3334 data.passed_type);
3335 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3336 align);
3337 if (TYPE_ALIGN (data.nominal_type) > align)
3338 align = MINIMUM_ALIGNMENT (data.nominal_type,
3339 TYPE_MODE (data.nominal_type),
3340 TYPE_ALIGN (data.nominal_type));
3341 if (crtl->stack_alignment_estimated < align)
3343 gcc_assert (!crtl->stack_realign_processed);
3344 crtl->stack_alignment_estimated = align;
3348 if (cfun->stdarg && !DECL_CHAIN (parm))
3349 assign_parms_setup_varargs (&all, &data, false);
3351 /* Find out where the parameter arrives in this function. */
3352 assign_parm_find_entry_rtl (&all, &data);
3354 /* Find out where stack space for this parameter might be. */
3355 if (assign_parm_is_stack_parm (&all, &data))
3357 assign_parm_find_stack_rtl (parm, &data);
3358 assign_parm_adjust_entry_rtl (&data);
3361 /* Record permanently how this parm was passed. */
3362 if (data.passed_pointer)
3364 rtx incoming_rtl
3365 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3366 data.entry_parm);
3367 set_decl_incoming_rtl (parm, incoming_rtl, true);
3369 else
3370 set_decl_incoming_rtl (parm, data.entry_parm, false);
3372 /* Update info on where next arg arrives in registers. */
3373 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3374 data.passed_type, data.named_arg);
3376 assign_parm_adjust_stack_rtl (&data);
3378 if (assign_parm_setup_block_p (&data))
3379 assign_parm_setup_block (&all, parm, &data);
3380 else if (data.passed_pointer || use_register_for_decl (parm))
3381 assign_parm_setup_reg (&all, parm, &data);
3382 else
3383 assign_parm_setup_stack (&all, parm, &data);
3386 if (targetm.calls.split_complex_arg)
3387 assign_parms_unsplit_complex (&all, fnargs);
3389 VEC_free (tree, heap, fnargs);
3391 /* Output all parameter conversion instructions (possibly including calls)
3392 now that all parameters have been copied out of hard registers. */
3393 emit_insn (all.first_conversion_insn);
3395 /* Estimate reload stack alignment from scalar return mode. */
3396 if (SUPPORTS_STACK_ALIGNMENT)
3398 if (DECL_RESULT (fndecl))
3400 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3401 enum machine_mode mode = TYPE_MODE (type);
3403 if (mode != BLKmode
3404 && mode != VOIDmode
3405 && !AGGREGATE_TYPE_P (type))
3407 unsigned int align = GET_MODE_ALIGNMENT (mode);
3408 if (crtl->stack_alignment_estimated < align)
3410 gcc_assert (!crtl->stack_realign_processed);
3411 crtl->stack_alignment_estimated = align;
3417 /* If we are receiving a struct value address as the first argument, set up
3418 the RTL for the function result. As this might require code to convert
3419 the transmitted address to Pmode, we do this here to ensure that possible
3420 preliminary conversions of the address have been emitted already. */
3421 if (all.function_result_decl)
3423 tree result = DECL_RESULT (current_function_decl);
3424 rtx addr = DECL_RTL (all.function_result_decl);
3425 rtx x;
3427 if (DECL_BY_REFERENCE (result))
3429 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3430 x = addr;
3432 else
3434 SET_DECL_VALUE_EXPR (result,
3435 build1 (INDIRECT_REF, TREE_TYPE (result),
3436 all.function_result_decl));
3437 addr = convert_memory_address (Pmode, addr);
3438 x = gen_rtx_MEM (DECL_MODE (result), addr);
3439 set_mem_attributes (x, result, 1);
3442 DECL_HAS_VALUE_EXPR_P (result) = 1;
3444 SET_DECL_RTL (result, x);
3447 /* We have aligned all the args, so add space for the pretend args. */
3448 crtl->args.pretend_args_size = all.pretend_args_size;
3449 all.stack_args_size.constant += all.extra_pretend_bytes;
3450 crtl->args.size = all.stack_args_size.constant;
3452 /* Adjust function incoming argument size for alignment and
3453 minimum length. */
3455 #ifdef REG_PARM_STACK_SPACE
3456 crtl->args.size = MAX (crtl->args.size,
3457 REG_PARM_STACK_SPACE (fndecl));
3458 #endif
3460 crtl->args.size = CEIL_ROUND (crtl->args.size,
3461 PARM_BOUNDARY / BITS_PER_UNIT);
3463 #ifdef ARGS_GROW_DOWNWARD
3464 crtl->args.arg_offset_rtx
3465 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3466 : expand_expr (size_diffop (all.stack_args_size.var,
3467 size_int (-all.stack_args_size.constant)),
3468 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3469 #else
3470 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3471 #endif
3473 /* See how many bytes, if any, of its args a function should try to pop
3474 on return. */
3476 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3477 TREE_TYPE (fndecl),
3478 crtl->args.size);
3480 /* For stdarg.h function, save info about
3481 regs and stack space used by the named args. */
3483 crtl->args.info = all.args_so_far_v;
3485 /* Set the rtx used for the function return value. Put this in its
3486 own variable so any optimizers that need this information don't have
3487 to include tree.h. Do this here so it gets done when an inlined
3488 function gets output. */
3490 crtl->return_rtx
3491 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3492 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3494 /* If scalar return value was computed in a pseudo-reg, or was a named
3495 return value that got dumped to the stack, copy that to the hard
3496 return register. */
3497 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3499 tree decl_result = DECL_RESULT (fndecl);
3500 rtx decl_rtl = DECL_RTL (decl_result);
3502 if (REG_P (decl_rtl)
3503 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3504 : DECL_REGISTER (decl_result))
3506 rtx real_decl_rtl;
3508 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3509 fndecl, true);
3510 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3511 /* The delay slot scheduler assumes that crtl->return_rtx
3512 holds the hard register containing the return value, not a
3513 temporary pseudo. */
3514 crtl->return_rtx = real_decl_rtl;
3519 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3520 For all seen types, gimplify their sizes. */
3522 static tree
3523 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3525 tree t = *tp;
3527 *walk_subtrees = 0;
3528 if (TYPE_P (t))
3530 if (POINTER_TYPE_P (t))
3531 *walk_subtrees = 1;
3532 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3533 && !TYPE_SIZES_GIMPLIFIED (t))
3535 gimplify_type_sizes (t, (gimple_seq *) data);
3536 *walk_subtrees = 1;
3540 return NULL;
3543 /* Gimplify the parameter list for current_function_decl. This involves
3544 evaluating SAVE_EXPRs of variable sized parameters and generating code
3545 to implement callee-copies reference parameters. Returns a sequence of
3546 statements to add to the beginning of the function. */
3548 gimple_seq
3549 gimplify_parameters (void)
3551 struct assign_parm_data_all all;
3552 tree parm;
3553 gimple_seq stmts = NULL;
3554 VEC(tree, heap) *fnargs;
3555 unsigned i;
3557 assign_parms_initialize_all (&all);
3558 fnargs = assign_parms_augmented_arg_list (&all);
3560 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3562 struct assign_parm_data_one data;
3564 /* Extract the type of PARM; adjust it according to ABI. */
3565 assign_parm_find_data_types (&all, parm, &data);
3567 /* Early out for errors and void parameters. */
3568 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3569 continue;
3571 /* Update info on where next arg arrives in registers. */
3572 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3573 data.passed_type, data.named_arg);
3575 /* ??? Once upon a time variable_size stuffed parameter list
3576 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3577 turned out to be less than manageable in the gimple world.
3578 Now we have to hunt them down ourselves. */
3579 walk_tree_without_duplicates (&data.passed_type,
3580 gimplify_parm_type, &stmts);
3582 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3584 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3585 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3588 if (data.passed_pointer)
3590 tree type = TREE_TYPE (data.passed_type);
3591 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3592 type, data.named_arg))
3594 tree local, t;
3596 /* For constant-sized objects, this is trivial; for
3597 variable-sized objects, we have to play games. */
3598 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3599 && !(flag_stack_check == GENERIC_STACK_CHECK
3600 && compare_tree_int (DECL_SIZE_UNIT (parm),
3601 STACK_CHECK_MAX_VAR_SIZE) > 0))
3603 local = create_tmp_var (type, get_name (parm));
3604 DECL_IGNORED_P (local) = 0;
3605 /* If PARM was addressable, move that flag over
3606 to the local copy, as its address will be taken,
3607 not the PARMs. Keep the parms address taken
3608 as we'll query that flag during gimplification. */
3609 if (TREE_ADDRESSABLE (parm))
3610 TREE_ADDRESSABLE (local) = 1;
3611 else if (TREE_CODE (type) == COMPLEX_TYPE
3612 || TREE_CODE (type) == VECTOR_TYPE)
3613 DECL_GIMPLE_REG_P (local) = 1;
3615 else
3617 tree ptr_type, addr;
3619 ptr_type = build_pointer_type (type);
3620 addr = create_tmp_reg (ptr_type, get_name (parm));
3621 DECL_IGNORED_P (addr) = 0;
3622 local = build_fold_indirect_ref (addr);
3624 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3625 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3626 size_int (DECL_ALIGN (parm)));
3628 /* The call has been built for a variable-sized object. */
3629 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3630 t = fold_convert (ptr_type, t);
3631 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3632 gimplify_and_add (t, &stmts);
3635 gimplify_assign (local, parm, &stmts);
3637 SET_DECL_VALUE_EXPR (parm, local);
3638 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3643 VEC_free (tree, heap, fnargs);
3645 return stmts;
3648 /* Compute the size and offset from the start of the stacked arguments for a
3649 parm passed in mode PASSED_MODE and with type TYPE.
3651 INITIAL_OFFSET_PTR points to the current offset into the stacked
3652 arguments.
3654 The starting offset and size for this parm are returned in
3655 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3656 nonzero, the offset is that of stack slot, which is returned in
3657 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3658 padding required from the initial offset ptr to the stack slot.
3660 IN_REGS is nonzero if the argument will be passed in registers. It will
3661 never be set if REG_PARM_STACK_SPACE is not defined.
3663 FNDECL is the function in which the argument was defined.
3665 There are two types of rounding that are done. The first, controlled by
3666 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3667 argument list to be aligned to the specific boundary (in bits). This
3668 rounding affects the initial and starting offsets, but not the argument
3669 size.
3671 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3672 optionally rounds the size of the parm to PARM_BOUNDARY. The
3673 initial offset is not affected by this rounding, while the size always
3674 is and the starting offset may be. */
3676 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3677 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3678 callers pass in the total size of args so far as
3679 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3681 void
3682 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3683 int partial, tree fndecl ATTRIBUTE_UNUSED,
3684 struct args_size *initial_offset_ptr,
3685 struct locate_and_pad_arg_data *locate)
3687 tree sizetree;
3688 enum direction where_pad;
3689 unsigned int boundary, round_boundary;
3690 int reg_parm_stack_space = 0;
3691 int part_size_in_regs;
3693 #ifdef REG_PARM_STACK_SPACE
3694 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3696 /* If we have found a stack parm before we reach the end of the
3697 area reserved for registers, skip that area. */
3698 if (! in_regs)
3700 if (reg_parm_stack_space > 0)
3702 if (initial_offset_ptr->var)
3704 initial_offset_ptr->var
3705 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3706 ssize_int (reg_parm_stack_space));
3707 initial_offset_ptr->constant = 0;
3709 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3710 initial_offset_ptr->constant = reg_parm_stack_space;
3713 #endif /* REG_PARM_STACK_SPACE */
3715 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3717 sizetree
3718 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3719 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3720 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3721 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3722 type);
3723 locate->where_pad = where_pad;
3725 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3726 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3727 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3729 locate->boundary = boundary;
3731 if (SUPPORTS_STACK_ALIGNMENT)
3733 /* stack_alignment_estimated can't change after stack has been
3734 realigned. */
3735 if (crtl->stack_alignment_estimated < boundary)
3737 if (!crtl->stack_realign_processed)
3738 crtl->stack_alignment_estimated = boundary;
3739 else
3741 /* If stack is realigned and stack alignment value
3742 hasn't been finalized, it is OK not to increase
3743 stack_alignment_estimated. The bigger alignment
3744 requirement is recorded in stack_alignment_needed
3745 below. */
3746 gcc_assert (!crtl->stack_realign_finalized
3747 && crtl->stack_realign_needed);
3752 /* Remember if the outgoing parameter requires extra alignment on the
3753 calling function side. */
3754 if (crtl->stack_alignment_needed < boundary)
3755 crtl->stack_alignment_needed = boundary;
3756 if (crtl->preferred_stack_boundary < boundary)
3757 crtl->preferred_stack_boundary = boundary;
3759 #ifdef ARGS_GROW_DOWNWARD
3760 locate->slot_offset.constant = -initial_offset_ptr->constant;
3761 if (initial_offset_ptr->var)
3762 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3763 initial_offset_ptr->var);
3766 tree s2 = sizetree;
3767 if (where_pad != none
3768 && (!host_integerp (sizetree, 1)
3769 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3770 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3771 SUB_PARM_SIZE (locate->slot_offset, s2);
3774 locate->slot_offset.constant += part_size_in_regs;
3776 if (!in_regs
3777 #ifdef REG_PARM_STACK_SPACE
3778 || REG_PARM_STACK_SPACE (fndecl) > 0
3779 #endif
3781 pad_to_arg_alignment (&locate->slot_offset, boundary,
3782 &locate->alignment_pad);
3784 locate->size.constant = (-initial_offset_ptr->constant
3785 - locate->slot_offset.constant);
3786 if (initial_offset_ptr->var)
3787 locate->size.var = size_binop (MINUS_EXPR,
3788 size_binop (MINUS_EXPR,
3789 ssize_int (0),
3790 initial_offset_ptr->var),
3791 locate->slot_offset.var);
3793 /* Pad_below needs the pre-rounded size to know how much to pad
3794 below. */
3795 locate->offset = locate->slot_offset;
3796 if (where_pad == downward)
3797 pad_below (&locate->offset, passed_mode, sizetree);
3799 #else /* !ARGS_GROW_DOWNWARD */
3800 if (!in_regs
3801 #ifdef REG_PARM_STACK_SPACE
3802 || REG_PARM_STACK_SPACE (fndecl) > 0
3803 #endif
3805 pad_to_arg_alignment (initial_offset_ptr, boundary,
3806 &locate->alignment_pad);
3807 locate->slot_offset = *initial_offset_ptr;
3809 #ifdef PUSH_ROUNDING
3810 if (passed_mode != BLKmode)
3811 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3812 #endif
3814 /* Pad_below needs the pre-rounded size to know how much to pad below
3815 so this must be done before rounding up. */
3816 locate->offset = locate->slot_offset;
3817 if (where_pad == downward)
3818 pad_below (&locate->offset, passed_mode, sizetree);
3820 if (where_pad != none
3821 && (!host_integerp (sizetree, 1)
3822 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3823 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3825 ADD_PARM_SIZE (locate->size, sizetree);
3827 locate->size.constant -= part_size_in_regs;
3828 #endif /* ARGS_GROW_DOWNWARD */
3830 #ifdef FUNCTION_ARG_OFFSET
3831 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3832 #endif
3835 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3836 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3838 static void
3839 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3840 struct args_size *alignment_pad)
3842 tree save_var = NULL_TREE;
3843 HOST_WIDE_INT save_constant = 0;
3844 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3845 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3847 #ifdef SPARC_STACK_BOUNDARY_HACK
3848 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3849 the real alignment of %sp. However, when it does this, the
3850 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3851 if (SPARC_STACK_BOUNDARY_HACK)
3852 sp_offset = 0;
3853 #endif
3855 if (boundary > PARM_BOUNDARY)
3857 save_var = offset_ptr->var;
3858 save_constant = offset_ptr->constant;
3861 alignment_pad->var = NULL_TREE;
3862 alignment_pad->constant = 0;
3864 if (boundary > BITS_PER_UNIT)
3866 if (offset_ptr->var)
3868 tree sp_offset_tree = ssize_int (sp_offset);
3869 tree offset = size_binop (PLUS_EXPR,
3870 ARGS_SIZE_TREE (*offset_ptr),
3871 sp_offset_tree);
3872 #ifdef ARGS_GROW_DOWNWARD
3873 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3874 #else
3875 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3876 #endif
3878 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3879 /* ARGS_SIZE_TREE includes constant term. */
3880 offset_ptr->constant = 0;
3881 if (boundary > PARM_BOUNDARY)
3882 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3883 save_var);
3885 else
3887 offset_ptr->constant = -sp_offset +
3888 #ifdef ARGS_GROW_DOWNWARD
3889 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3890 #else
3891 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3892 #endif
3893 if (boundary > PARM_BOUNDARY)
3894 alignment_pad->constant = offset_ptr->constant - save_constant;
3899 static void
3900 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3902 if (passed_mode != BLKmode)
3904 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3905 offset_ptr->constant
3906 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3907 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3908 - GET_MODE_SIZE (passed_mode));
3910 else
3912 if (TREE_CODE (sizetree) != INTEGER_CST
3913 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3915 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3916 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3917 /* Add it in. */
3918 ADD_PARM_SIZE (*offset_ptr, s2);
3919 SUB_PARM_SIZE (*offset_ptr, sizetree);
3925 /* True if register REGNO was alive at a place where `setjmp' was
3926 called and was set more than once or is an argument. Such regs may
3927 be clobbered by `longjmp'. */
3929 static bool
3930 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3932 /* There appear to be cases where some local vars never reach the
3933 backend but have bogus regnos. */
3934 if (regno >= max_reg_num ())
3935 return false;
3937 return ((REG_N_SETS (regno) > 1
3938 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3939 && REGNO_REG_SET_P (setjmp_crosses, regno));
3942 /* Walk the tree of blocks describing the binding levels within a
3943 function and warn about variables the might be killed by setjmp or
3944 vfork. This is done after calling flow_analysis before register
3945 allocation since that will clobber the pseudo-regs to hard
3946 regs. */
3948 static void
3949 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3951 tree decl, sub;
3953 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3955 if (TREE_CODE (decl) == VAR_DECL
3956 && DECL_RTL_SET_P (decl)
3957 && REG_P (DECL_RTL (decl))
3958 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3959 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3960 " %<longjmp%> or %<vfork%>", decl);
3963 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3964 setjmp_vars_warning (setjmp_crosses, sub);
3967 /* Do the appropriate part of setjmp_vars_warning
3968 but for arguments instead of local variables. */
3970 static void
3971 setjmp_args_warning (bitmap setjmp_crosses)
3973 tree decl;
3974 for (decl = DECL_ARGUMENTS (current_function_decl);
3975 decl; decl = DECL_CHAIN (decl))
3976 if (DECL_RTL (decl) != 0
3977 && REG_P (DECL_RTL (decl))
3978 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3979 warning (OPT_Wclobbered,
3980 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3981 decl);
3984 /* Generate warning messages for variables live across setjmp. */
3986 void
3987 generate_setjmp_warnings (void)
3989 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3991 if (n_basic_blocks == NUM_FIXED_BLOCKS
3992 || bitmap_empty_p (setjmp_crosses))
3993 return;
3995 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3996 setjmp_args_warning (setjmp_crosses);
4000 /* Reverse the order of elements in the fragment chain T of blocks,
4001 and return the new head of the chain (old last element). */
4003 static tree
4004 block_fragments_nreverse (tree t)
4006 tree prev = 0, block, next;
4007 for (block = t; block; block = next)
4009 next = BLOCK_FRAGMENT_CHAIN (block);
4010 BLOCK_FRAGMENT_CHAIN (block) = prev;
4011 prev = block;
4013 return prev;
4016 /* Reverse the order of elements in the chain T of blocks,
4017 and return the new head of the chain (old last element).
4018 Also do the same on subblocks and reverse the order of elements
4019 in BLOCK_FRAGMENT_CHAIN as well. */
4021 static tree
4022 blocks_nreverse_all (tree t)
4024 tree prev = 0, block, next;
4025 for (block = t; block; block = next)
4027 next = BLOCK_CHAIN (block);
4028 BLOCK_CHAIN (block) = prev;
4029 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4030 if (BLOCK_FRAGMENT_CHAIN (block)
4031 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4032 BLOCK_FRAGMENT_CHAIN (block)
4033 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4034 prev = block;
4036 return prev;
4040 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4041 and create duplicate blocks. */
4042 /* ??? Need an option to either create block fragments or to create
4043 abstract origin duplicates of a source block. It really depends
4044 on what optimization has been performed. */
4046 void
4047 reorder_blocks (void)
4049 tree block = DECL_INITIAL (current_function_decl);
4050 VEC(tree,heap) *block_stack;
4052 if (block == NULL_TREE)
4053 return;
4055 block_stack = VEC_alloc (tree, heap, 10);
4057 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4058 clear_block_marks (block);
4060 /* Prune the old trees away, so that they don't get in the way. */
4061 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4062 BLOCK_CHAIN (block) = NULL_TREE;
4064 /* Recreate the block tree from the note nesting. */
4065 reorder_blocks_1 (get_insns (), block, &block_stack);
4066 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4068 VEC_free (tree, heap, block_stack);
4071 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4073 void
4074 clear_block_marks (tree block)
4076 while (block)
4078 TREE_ASM_WRITTEN (block) = 0;
4079 clear_block_marks (BLOCK_SUBBLOCKS (block));
4080 block = BLOCK_CHAIN (block);
4084 static void
4085 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4087 rtx insn;
4089 for (insn = insns; insn; insn = NEXT_INSN (insn))
4091 if (NOTE_P (insn))
4093 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4095 tree block = NOTE_BLOCK (insn);
4096 tree origin;
4098 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4099 origin = block;
4101 /* If we have seen this block before, that means it now
4102 spans multiple address regions. Create a new fragment. */
4103 if (TREE_ASM_WRITTEN (block))
4105 tree new_block = copy_node (block);
4107 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4108 BLOCK_FRAGMENT_CHAIN (new_block)
4109 = BLOCK_FRAGMENT_CHAIN (origin);
4110 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4112 NOTE_BLOCK (insn) = new_block;
4113 block = new_block;
4116 BLOCK_SUBBLOCKS (block) = 0;
4117 TREE_ASM_WRITTEN (block) = 1;
4118 /* When there's only one block for the entire function,
4119 current_block == block and we mustn't do this, it
4120 will cause infinite recursion. */
4121 if (block != current_block)
4123 if (block != origin)
4124 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4126 BLOCK_SUPERCONTEXT (block) = current_block;
4127 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4128 BLOCK_SUBBLOCKS (current_block) = block;
4129 current_block = origin;
4131 VEC_safe_push (tree, heap, *p_block_stack, block);
4133 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4135 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4136 current_block = BLOCK_SUPERCONTEXT (current_block);
4142 /* Reverse the order of elements in the chain T of blocks,
4143 and return the new head of the chain (old last element). */
4145 tree
4146 blocks_nreverse (tree t)
4148 tree prev = 0, block, next;
4149 for (block = t; block; block = next)
4151 next = BLOCK_CHAIN (block);
4152 BLOCK_CHAIN (block) = prev;
4153 prev = block;
4155 return prev;
4158 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4159 by modifying the last node in chain 1 to point to chain 2. */
4161 tree
4162 block_chainon (tree op1, tree op2)
4164 tree t1;
4166 if (!op1)
4167 return op2;
4168 if (!op2)
4169 return op1;
4171 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4172 continue;
4173 BLOCK_CHAIN (t1) = op2;
4175 #ifdef ENABLE_TREE_CHECKING
4177 tree t2;
4178 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4179 gcc_assert (t2 != t1);
4181 #endif
4183 return op1;
4186 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4187 non-NULL, list them all into VECTOR, in a depth-first preorder
4188 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4189 blocks. */
4191 static int
4192 all_blocks (tree block, tree *vector)
4194 int n_blocks = 0;
4196 while (block)
4198 TREE_ASM_WRITTEN (block) = 0;
4200 /* Record this block. */
4201 if (vector)
4202 vector[n_blocks] = block;
4204 ++n_blocks;
4206 /* Record the subblocks, and their subblocks... */
4207 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4208 vector ? vector + n_blocks : 0);
4209 block = BLOCK_CHAIN (block);
4212 return n_blocks;
4215 /* Return a vector containing all the blocks rooted at BLOCK. The
4216 number of elements in the vector is stored in N_BLOCKS_P. The
4217 vector is dynamically allocated; it is the caller's responsibility
4218 to call `free' on the pointer returned. */
4220 static tree *
4221 get_block_vector (tree block, int *n_blocks_p)
4223 tree *block_vector;
4225 *n_blocks_p = all_blocks (block, NULL);
4226 block_vector = XNEWVEC (tree, *n_blocks_p);
4227 all_blocks (block, block_vector);
4229 return block_vector;
4232 static GTY(()) int next_block_index = 2;
4234 /* Set BLOCK_NUMBER for all the blocks in FN. */
4236 void
4237 number_blocks (tree fn)
4239 int i;
4240 int n_blocks;
4241 tree *block_vector;
4243 /* For SDB and XCOFF debugging output, we start numbering the blocks
4244 from 1 within each function, rather than keeping a running
4245 count. */
4246 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4247 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4248 next_block_index = 1;
4249 #endif
4251 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4253 /* The top-level BLOCK isn't numbered at all. */
4254 for (i = 1; i < n_blocks; ++i)
4255 /* We number the blocks from two. */
4256 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4258 free (block_vector);
4260 return;
4263 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4265 DEBUG_FUNCTION tree
4266 debug_find_var_in_block_tree (tree var, tree block)
4268 tree t;
4270 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4271 if (t == var)
4272 return block;
4274 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4276 tree ret = debug_find_var_in_block_tree (var, t);
4277 if (ret)
4278 return ret;
4281 return NULL_TREE;
4284 /* Keep track of whether we're in a dummy function context. If we are,
4285 we don't want to invoke the set_current_function hook, because we'll
4286 get into trouble if the hook calls target_reinit () recursively or
4287 when the initial initialization is not yet complete. */
4289 static bool in_dummy_function;
4291 /* Invoke the target hook when setting cfun. Update the optimization options
4292 if the function uses different options than the default. */
4294 static void
4295 invoke_set_current_function_hook (tree fndecl)
4297 if (!in_dummy_function)
4299 tree opts = ((fndecl)
4300 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4301 : optimization_default_node);
4303 if (!opts)
4304 opts = optimization_default_node;
4306 /* Change optimization options if needed. */
4307 if (optimization_current_node != opts)
4309 optimization_current_node = opts;
4310 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4313 targetm.set_current_function (fndecl);
4317 /* cfun should never be set directly; use this function. */
4319 void
4320 set_cfun (struct function *new_cfun)
4322 if (cfun != new_cfun)
4324 cfun = new_cfun;
4325 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4329 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4331 static VEC(function_p,heap) *cfun_stack;
4333 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4335 void
4336 push_cfun (struct function *new_cfun)
4338 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4339 set_cfun (new_cfun);
4342 /* Pop cfun from the stack. */
4344 void
4345 pop_cfun (void)
4347 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4348 set_cfun (new_cfun);
4351 /* Return value of funcdef and increase it. */
4353 get_next_funcdef_no (void)
4355 return funcdef_no++;
4358 /* Return value of funcdef. */
4360 get_last_funcdef_no (void)
4362 return funcdef_no;
4365 /* Allocate a function structure for FNDECL and set its contents
4366 to the defaults. Set cfun to the newly-allocated object.
4367 Some of the helper functions invoked during initialization assume
4368 that cfun has already been set. Therefore, assign the new object
4369 directly into cfun and invoke the back end hook explicitly at the
4370 very end, rather than initializing a temporary and calling set_cfun
4371 on it.
4373 ABSTRACT_P is true if this is a function that will never be seen by
4374 the middle-end. Such functions are front-end concepts (like C++
4375 function templates) that do not correspond directly to functions
4376 placed in object files. */
4378 void
4379 allocate_struct_function (tree fndecl, bool abstract_p)
4381 tree result;
4382 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4384 cfun = ggc_alloc_cleared_function ();
4386 init_eh_for_function ();
4388 if (init_machine_status)
4389 cfun->machine = (*init_machine_status) ();
4391 #ifdef OVERRIDE_ABI_FORMAT
4392 OVERRIDE_ABI_FORMAT (fndecl);
4393 #endif
4395 invoke_set_current_function_hook (fndecl);
4397 if (fndecl != NULL_TREE)
4399 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4400 cfun->decl = fndecl;
4401 current_function_funcdef_no = get_next_funcdef_no ();
4403 result = DECL_RESULT (fndecl);
4404 if (!abstract_p && aggregate_value_p (result, fndecl))
4406 #ifdef PCC_STATIC_STRUCT_RETURN
4407 cfun->returns_pcc_struct = 1;
4408 #endif
4409 cfun->returns_struct = 1;
4412 cfun->stdarg = stdarg_p (fntype);
4414 /* Assume all registers in stdarg functions need to be saved. */
4415 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4416 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4418 /* ??? This could be set on a per-function basis by the front-end
4419 but is this worth the hassle? */
4420 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4424 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4425 instead of just setting it. */
4427 void
4428 push_struct_function (tree fndecl)
4430 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4431 allocate_struct_function (fndecl, false);
4434 /* Reset crtl and other non-struct-function variables to defaults as
4435 appropriate for emitting rtl at the start of a function. */
4437 static void
4438 prepare_function_start (void)
4440 gcc_assert (!crtl->emit.x_last_insn);
4441 init_temp_slots ();
4442 init_emit ();
4443 init_varasm_status ();
4444 init_expr ();
4445 default_rtl_profile ();
4447 if (flag_stack_usage_info)
4449 cfun->su = ggc_alloc_cleared_stack_usage ();
4450 cfun->su->static_stack_size = -1;
4453 cse_not_expected = ! optimize;
4455 /* Caller save not needed yet. */
4456 caller_save_needed = 0;
4458 /* We haven't done register allocation yet. */
4459 reg_renumber = 0;
4461 /* Indicate that we have not instantiated virtual registers yet. */
4462 virtuals_instantiated = 0;
4464 /* Indicate that we want CONCATs now. */
4465 generating_concat_p = 1;
4467 /* Indicate we have no need of a frame pointer yet. */
4468 frame_pointer_needed = 0;
4471 /* Initialize the rtl expansion mechanism so that we can do simple things
4472 like generate sequences. This is used to provide a context during global
4473 initialization of some passes. You must call expand_dummy_function_end
4474 to exit this context. */
4476 void
4477 init_dummy_function_start (void)
4479 gcc_assert (!in_dummy_function);
4480 in_dummy_function = true;
4481 push_struct_function (NULL_TREE);
4482 prepare_function_start ();
4485 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4486 and initialize static variables for generating RTL for the statements
4487 of the function. */
4489 void
4490 init_function_start (tree subr)
4492 if (subr && DECL_STRUCT_FUNCTION (subr))
4493 set_cfun (DECL_STRUCT_FUNCTION (subr));
4494 else
4495 allocate_struct_function (subr, false);
4496 prepare_function_start ();
4497 decide_function_section (subr);
4499 /* Warn if this value is an aggregate type,
4500 regardless of which calling convention we are using for it. */
4501 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4502 warning (OPT_Waggregate_return, "function returns an aggregate");
4505 /* Make sure all values used by the optimization passes have sane defaults. */
4506 unsigned int
4507 init_function_for_compilation (void)
4509 reg_renumber = 0;
4510 return 0;
4513 struct rtl_opt_pass pass_init_function =
4516 RTL_PASS,
4517 "*init_function", /* name */
4518 NULL, /* gate */
4519 init_function_for_compilation, /* execute */
4520 NULL, /* sub */
4521 NULL, /* next */
4522 0, /* static_pass_number */
4523 TV_NONE, /* tv_id */
4524 0, /* properties_required */
4525 0, /* properties_provided */
4526 0, /* properties_destroyed */
4527 0, /* todo_flags_start */
4528 0 /* todo_flags_finish */
4533 void
4534 expand_main_function (void)
4536 #if (defined(INVOKE__main) \
4537 || (!defined(HAS_INIT_SECTION) \
4538 && !defined(INIT_SECTION_ASM_OP) \
4539 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4540 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4541 #endif
4544 /* Expand code to initialize the stack_protect_guard. This is invoked at
4545 the beginning of a function to be protected. */
4547 #ifndef HAVE_stack_protect_set
4548 # define HAVE_stack_protect_set 0
4549 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4550 #endif
4552 void
4553 stack_protect_prologue (void)
4555 tree guard_decl = targetm.stack_protect_guard ();
4556 rtx x, y;
4558 x = expand_normal (crtl->stack_protect_guard);
4559 y = expand_normal (guard_decl);
4561 /* Allow the target to copy from Y to X without leaking Y into a
4562 register. */
4563 if (HAVE_stack_protect_set)
4565 rtx insn = gen_stack_protect_set (x, y);
4566 if (insn)
4568 emit_insn (insn);
4569 return;
4573 /* Otherwise do a straight move. */
4574 emit_move_insn (x, y);
4577 /* Expand code to verify the stack_protect_guard. This is invoked at
4578 the end of a function to be protected. */
4580 #ifndef HAVE_stack_protect_test
4581 # define HAVE_stack_protect_test 0
4582 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4583 #endif
4585 void
4586 stack_protect_epilogue (void)
4588 tree guard_decl = targetm.stack_protect_guard ();
4589 rtx label = gen_label_rtx ();
4590 rtx x, y, tmp;
4592 x = expand_normal (crtl->stack_protect_guard);
4593 y = expand_normal (guard_decl);
4595 /* Allow the target to compare Y with X without leaking either into
4596 a register. */
4597 switch (HAVE_stack_protect_test != 0)
4599 case 1:
4600 tmp = gen_stack_protect_test (x, y, label);
4601 if (tmp)
4603 emit_insn (tmp);
4604 break;
4606 /* FALLTHRU */
4608 default:
4609 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4610 break;
4613 /* The noreturn predictor has been moved to the tree level. The rtl-level
4614 predictors estimate this branch about 20%, which isn't enough to get
4615 things moved out of line. Since this is the only extant case of adding
4616 a noreturn function at the rtl level, it doesn't seem worth doing ought
4617 except adding the prediction by hand. */
4618 tmp = get_last_insn ();
4619 if (JUMP_P (tmp))
4620 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4622 expand_expr_stmt (targetm.stack_protect_fail ());
4623 emit_label (label);
4626 /* Start the RTL for a new function, and set variables used for
4627 emitting RTL.
4628 SUBR is the FUNCTION_DECL node.
4629 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4630 the function's parameters, which must be run at any return statement. */
4632 void
4633 expand_function_start (tree subr)
4635 /* Make sure volatile mem refs aren't considered
4636 valid operands of arithmetic insns. */
4637 init_recog_no_volatile ();
4639 crtl->profile
4640 = (profile_flag
4641 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4643 crtl->limit_stack
4644 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4646 /* Make the label for return statements to jump to. Do not special
4647 case machines with special return instructions -- they will be
4648 handled later during jump, ifcvt, or epilogue creation. */
4649 return_label = gen_label_rtx ();
4651 /* Initialize rtx used to return the value. */
4652 /* Do this before assign_parms so that we copy the struct value address
4653 before any library calls that assign parms might generate. */
4655 /* Decide whether to return the value in memory or in a register. */
4656 if (aggregate_value_p (DECL_RESULT (subr), subr))
4658 /* Returning something that won't go in a register. */
4659 rtx value_address = 0;
4661 #ifdef PCC_STATIC_STRUCT_RETURN
4662 if (cfun->returns_pcc_struct)
4664 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4665 value_address = assemble_static_space (size);
4667 else
4668 #endif
4670 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4671 /* Expect to be passed the address of a place to store the value.
4672 If it is passed as an argument, assign_parms will take care of
4673 it. */
4674 if (sv)
4676 value_address = gen_reg_rtx (Pmode);
4677 emit_move_insn (value_address, sv);
4680 if (value_address)
4682 rtx x = value_address;
4683 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4685 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4686 set_mem_attributes (x, DECL_RESULT (subr), 1);
4688 SET_DECL_RTL (DECL_RESULT (subr), x);
4691 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4692 /* If return mode is void, this decl rtl should not be used. */
4693 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4694 else
4696 /* Compute the return values into a pseudo reg, which we will copy
4697 into the true return register after the cleanups are done. */
4698 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4699 if (TYPE_MODE (return_type) != BLKmode
4700 && targetm.calls.return_in_msb (return_type))
4701 /* expand_function_end will insert the appropriate padding in
4702 this case. Use the return value's natural (unpadded) mode
4703 within the function proper. */
4704 SET_DECL_RTL (DECL_RESULT (subr),
4705 gen_reg_rtx (TYPE_MODE (return_type)));
4706 else
4708 /* In order to figure out what mode to use for the pseudo, we
4709 figure out what the mode of the eventual return register will
4710 actually be, and use that. */
4711 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4713 /* Structures that are returned in registers are not
4714 aggregate_value_p, so we may see a PARALLEL or a REG. */
4715 if (REG_P (hard_reg))
4716 SET_DECL_RTL (DECL_RESULT (subr),
4717 gen_reg_rtx (GET_MODE (hard_reg)));
4718 else
4720 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4721 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4725 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4726 result to the real return register(s). */
4727 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4730 /* Initialize rtx for parameters and local variables.
4731 In some cases this requires emitting insns. */
4732 assign_parms (subr);
4734 /* If function gets a static chain arg, store it. */
4735 if (cfun->static_chain_decl)
4737 tree parm = cfun->static_chain_decl;
4738 rtx local, chain, insn;
4740 local = gen_reg_rtx (Pmode);
4741 chain = targetm.calls.static_chain (current_function_decl, true);
4743 set_decl_incoming_rtl (parm, chain, false);
4744 SET_DECL_RTL (parm, local);
4745 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4747 insn = emit_move_insn (local, chain);
4749 /* Mark the register as eliminable, similar to parameters. */
4750 if (MEM_P (chain)
4751 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4752 set_dst_reg_note (insn, REG_EQUIV, chain, local);
4755 /* If the function receives a non-local goto, then store the
4756 bits we need to restore the frame pointer. */
4757 if (cfun->nonlocal_goto_save_area)
4759 tree t_save;
4760 rtx r_save;
4762 /* ??? We need to do this save early. Unfortunately here is
4763 before the frame variable gets declared. Help out... */
4764 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4765 if (!DECL_RTL_SET_P (var))
4766 expand_decl (var);
4768 t_save = build4 (ARRAY_REF,
4769 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4770 cfun->nonlocal_goto_save_area,
4771 integer_zero_node, NULL_TREE, NULL_TREE);
4772 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4773 gcc_assert (GET_MODE (r_save) == Pmode);
4775 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4776 update_nonlocal_goto_save_area ();
4779 /* The following was moved from init_function_start.
4780 The move is supposed to make sdb output more accurate. */
4781 /* Indicate the beginning of the function body,
4782 as opposed to parm setup. */
4783 emit_note (NOTE_INSN_FUNCTION_BEG);
4785 gcc_assert (NOTE_P (get_last_insn ()));
4787 parm_birth_insn = get_last_insn ();
4789 if (crtl->profile)
4791 #ifdef PROFILE_HOOK
4792 PROFILE_HOOK (current_function_funcdef_no);
4793 #endif
4796 /* If we are doing generic stack checking, the probe should go here. */
4797 if (flag_stack_check == GENERIC_STACK_CHECK)
4798 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4800 /* Make sure there is a line number after the function entry setup code. */
4801 force_next_line_note ();
4804 /* Undo the effects of init_dummy_function_start. */
4805 void
4806 expand_dummy_function_end (void)
4808 gcc_assert (in_dummy_function);
4810 /* End any sequences that failed to be closed due to syntax errors. */
4811 while (in_sequence_p ())
4812 end_sequence ();
4814 /* Outside function body, can't compute type's actual size
4815 until next function's body starts. */
4817 free_after_parsing (cfun);
4818 free_after_compilation (cfun);
4819 pop_cfun ();
4820 in_dummy_function = false;
4823 /* Call DOIT for each hard register used as a return value from
4824 the current function. */
4826 void
4827 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4829 rtx outgoing = crtl->return_rtx;
4831 if (! outgoing)
4832 return;
4834 if (REG_P (outgoing))
4835 (*doit) (outgoing, arg);
4836 else if (GET_CODE (outgoing) == PARALLEL)
4838 int i;
4840 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4842 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4844 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4845 (*doit) (x, arg);
4850 static void
4851 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4853 emit_clobber (reg);
4856 void
4857 clobber_return_register (void)
4859 diddle_return_value (do_clobber_return_reg, NULL);
4861 /* In case we do use pseudo to return value, clobber it too. */
4862 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4864 tree decl_result = DECL_RESULT (current_function_decl);
4865 rtx decl_rtl = DECL_RTL (decl_result);
4866 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4868 do_clobber_return_reg (decl_rtl, NULL);
4873 static void
4874 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4876 emit_use (reg);
4879 static void
4880 use_return_register (void)
4882 diddle_return_value (do_use_return_reg, NULL);
4885 /* Possibly warn about unused parameters. */
4886 void
4887 do_warn_unused_parameter (tree fn)
4889 tree decl;
4891 for (decl = DECL_ARGUMENTS (fn);
4892 decl; decl = DECL_CHAIN (decl))
4893 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4894 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4895 && !TREE_NO_WARNING (decl))
4896 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4899 static GTY(()) rtx initial_trampoline;
4901 /* Generate RTL for the end of the current function. */
4903 void
4904 expand_function_end (void)
4906 rtx clobber_after;
4908 /* If arg_pointer_save_area was referenced only from a nested
4909 function, we will not have initialized it yet. Do that now. */
4910 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4911 get_arg_pointer_save_area ();
4913 /* If we are doing generic stack checking and this function makes calls,
4914 do a stack probe at the start of the function to ensure we have enough
4915 space for another stack frame. */
4916 if (flag_stack_check == GENERIC_STACK_CHECK)
4918 rtx insn, seq;
4920 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4921 if (CALL_P (insn))
4923 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4924 start_sequence ();
4925 if (STACK_CHECK_MOVING_SP)
4926 anti_adjust_stack_and_probe (max_frame_size, true);
4927 else
4928 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4929 seq = get_insns ();
4930 end_sequence ();
4931 set_insn_locators (seq, prologue_locator);
4932 emit_insn_before (seq, stack_check_probe_note);
4933 break;
4937 /* End any sequences that failed to be closed due to syntax errors. */
4938 while (in_sequence_p ())
4939 end_sequence ();
4941 clear_pending_stack_adjust ();
4942 do_pending_stack_adjust ();
4944 /* Output a linenumber for the end of the function.
4945 SDB depends on this. */
4946 force_next_line_note ();
4947 set_curr_insn_source_location (input_location);
4949 /* Before the return label (if any), clobber the return
4950 registers so that they are not propagated live to the rest of
4951 the function. This can only happen with functions that drop
4952 through; if there had been a return statement, there would
4953 have either been a return rtx, or a jump to the return label.
4955 We delay actual code generation after the current_function_value_rtx
4956 is computed. */
4957 clobber_after = get_last_insn ();
4959 /* Output the label for the actual return from the function. */
4960 emit_label (return_label);
4962 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
4964 /* Let except.c know where it should emit the call to unregister
4965 the function context for sjlj exceptions. */
4966 if (flag_exceptions)
4967 sjlj_emit_function_exit_after (get_last_insn ());
4969 else
4971 /* We want to ensure that instructions that may trap are not
4972 moved into the epilogue by scheduling, because we don't
4973 always emit unwind information for the epilogue. */
4974 if (cfun->can_throw_non_call_exceptions)
4975 emit_insn (gen_blockage ());
4978 /* If this is an implementation of throw, do what's necessary to
4979 communicate between __builtin_eh_return and the epilogue. */
4980 expand_eh_return ();
4982 /* If scalar return value was computed in a pseudo-reg, or was a named
4983 return value that got dumped to the stack, copy that to the hard
4984 return register. */
4985 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4987 tree decl_result = DECL_RESULT (current_function_decl);
4988 rtx decl_rtl = DECL_RTL (decl_result);
4990 if (REG_P (decl_rtl)
4991 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4992 : DECL_REGISTER (decl_result))
4994 rtx real_decl_rtl = crtl->return_rtx;
4996 /* This should be set in assign_parms. */
4997 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4999 /* If this is a BLKmode structure being returned in registers,
5000 then use the mode computed in expand_return. Note that if
5001 decl_rtl is memory, then its mode may have been changed,
5002 but that crtl->return_rtx has not. */
5003 if (GET_MODE (real_decl_rtl) == BLKmode)
5004 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5006 /* If a non-BLKmode return value should be padded at the least
5007 significant end of the register, shift it left by the appropriate
5008 amount. BLKmode results are handled using the group load/store
5009 machinery. */
5010 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5011 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5013 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5014 REGNO (real_decl_rtl)),
5015 decl_rtl);
5016 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5018 /* If a named return value dumped decl_return to memory, then
5019 we may need to re-do the PROMOTE_MODE signed/unsigned
5020 extension. */
5021 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5023 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5024 promote_function_mode (TREE_TYPE (decl_result),
5025 GET_MODE (decl_rtl), &unsignedp,
5026 TREE_TYPE (current_function_decl), 1);
5028 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5030 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5032 /* If expand_function_start has created a PARALLEL for decl_rtl,
5033 move the result to the real return registers. Otherwise, do
5034 a group load from decl_rtl for a named return. */
5035 if (GET_CODE (decl_rtl) == PARALLEL)
5036 emit_group_move (real_decl_rtl, decl_rtl);
5037 else
5038 emit_group_load (real_decl_rtl, decl_rtl,
5039 TREE_TYPE (decl_result),
5040 int_size_in_bytes (TREE_TYPE (decl_result)));
5042 /* In the case of complex integer modes smaller than a word, we'll
5043 need to generate some non-trivial bitfield insertions. Do that
5044 on a pseudo and not the hard register. */
5045 else if (GET_CODE (decl_rtl) == CONCAT
5046 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5047 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5049 int old_generating_concat_p;
5050 rtx tmp;
5052 old_generating_concat_p = generating_concat_p;
5053 generating_concat_p = 0;
5054 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5055 generating_concat_p = old_generating_concat_p;
5057 emit_move_insn (tmp, decl_rtl);
5058 emit_move_insn (real_decl_rtl, tmp);
5060 else
5061 emit_move_insn (real_decl_rtl, decl_rtl);
5065 /* If returning a structure, arrange to return the address of the value
5066 in a place where debuggers expect to find it.
5068 If returning a structure PCC style,
5069 the caller also depends on this value.
5070 And cfun->returns_pcc_struct is not necessarily set. */
5071 if (cfun->returns_struct
5072 || cfun->returns_pcc_struct)
5074 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5075 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5076 rtx outgoing;
5078 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5079 type = TREE_TYPE (type);
5080 else
5081 value_address = XEXP (value_address, 0);
5083 outgoing = targetm.calls.function_value (build_pointer_type (type),
5084 current_function_decl, true);
5086 /* Mark this as a function return value so integrate will delete the
5087 assignment and USE below when inlining this function. */
5088 REG_FUNCTION_VALUE_P (outgoing) = 1;
5090 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5091 value_address = convert_memory_address (GET_MODE (outgoing),
5092 value_address);
5094 emit_move_insn (outgoing, value_address);
5096 /* Show return register used to hold result (in this case the address
5097 of the result. */
5098 crtl->return_rtx = outgoing;
5101 /* Emit the actual code to clobber return register. */
5103 rtx seq;
5105 start_sequence ();
5106 clobber_return_register ();
5107 seq = get_insns ();
5108 end_sequence ();
5110 emit_insn_after (seq, clobber_after);
5113 /* Output the label for the naked return from the function. */
5114 if (naked_return_label)
5115 emit_label (naked_return_label);
5117 /* @@@ This is a kludge. We want to ensure that instructions that
5118 may trap are not moved into the epilogue by scheduling, because
5119 we don't always emit unwind information for the epilogue. */
5120 if (cfun->can_throw_non_call_exceptions
5121 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5122 emit_insn (gen_blockage ());
5124 /* If stack protection is enabled for this function, check the guard. */
5125 if (crtl->stack_protect_guard)
5126 stack_protect_epilogue ();
5128 /* If we had calls to alloca, and this machine needs
5129 an accurate stack pointer to exit the function,
5130 insert some code to save and restore the stack pointer. */
5131 if (! EXIT_IGNORE_STACK
5132 && cfun->calls_alloca)
5134 rtx tem = 0, seq;
5136 start_sequence ();
5137 emit_stack_save (SAVE_FUNCTION, &tem);
5138 seq = get_insns ();
5139 end_sequence ();
5140 emit_insn_before (seq, parm_birth_insn);
5142 emit_stack_restore (SAVE_FUNCTION, tem);
5145 /* ??? This should no longer be necessary since stupid is no longer with
5146 us, but there are some parts of the compiler (eg reload_combine, and
5147 sh mach_dep_reorg) that still try and compute their own lifetime info
5148 instead of using the general framework. */
5149 use_return_register ();
5153 get_arg_pointer_save_area (void)
5155 rtx ret = arg_pointer_save_area;
5157 if (! ret)
5159 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5160 arg_pointer_save_area = ret;
5163 if (! crtl->arg_pointer_save_area_init)
5165 rtx seq;
5167 /* Save the arg pointer at the beginning of the function. The
5168 generated stack slot may not be a valid memory address, so we
5169 have to check it and fix it if necessary. */
5170 start_sequence ();
5171 emit_move_insn (validize_mem (ret),
5172 crtl->args.internal_arg_pointer);
5173 seq = get_insns ();
5174 end_sequence ();
5176 push_topmost_sequence ();
5177 emit_insn_after (seq, entry_of_function ());
5178 pop_topmost_sequence ();
5180 crtl->arg_pointer_save_area_init = true;
5183 return ret;
5186 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5187 for the first time. */
5189 static void
5190 record_insns (rtx insns, rtx end, htab_t *hashp)
5192 rtx tmp;
5193 htab_t hash = *hashp;
5195 if (hash == NULL)
5196 *hashp = hash
5197 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5199 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5201 void **slot = htab_find_slot (hash, tmp, INSERT);
5202 gcc_assert (*slot == NULL);
5203 *slot = tmp;
5207 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5208 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5209 insn, then record COPY as well. */
5211 void
5212 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5214 htab_t hash;
5215 void **slot;
5217 hash = epilogue_insn_hash;
5218 if (!hash || !htab_find (hash, insn))
5220 hash = prologue_insn_hash;
5221 if (!hash || !htab_find (hash, insn))
5222 return;
5225 slot = htab_find_slot (hash, copy, INSERT);
5226 gcc_assert (*slot == NULL);
5227 *slot = copy;
5230 /* Set the locator of the insn chain starting at INSN to LOC. */
5231 static void
5232 set_insn_locators (rtx insn, int loc)
5234 while (insn != NULL_RTX)
5236 if (INSN_P (insn))
5237 INSN_LOCATOR (insn) = loc;
5238 insn = NEXT_INSN (insn);
5242 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5243 we can be running after reorg, SEQUENCE rtl is possible. */
5245 static bool
5246 contains (const_rtx insn, htab_t hash)
5248 if (hash == NULL)
5249 return false;
5251 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5253 int i;
5254 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5255 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5256 return true;
5257 return false;
5260 return htab_find (hash, insn) != NULL;
5264 prologue_epilogue_contains (const_rtx insn)
5266 if (contains (insn, prologue_insn_hash))
5267 return 1;
5268 if (contains (insn, epilogue_insn_hash))
5269 return 1;
5270 return 0;
5273 #ifdef HAVE_simple_return
5275 /* Return true if INSN requires the stack frame to be set up.
5276 PROLOGUE_USED contains the hard registers used in the function
5277 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5278 prologue to set up for the function. */
5279 bool
5280 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5281 HARD_REG_SET set_up_by_prologue)
5283 df_ref *df_rec;
5284 HARD_REG_SET hardregs;
5285 unsigned regno;
5287 if (CALL_P (insn))
5288 return !SIBLING_CALL_P (insn);
5290 CLEAR_HARD_REG_SET (hardregs);
5291 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
5293 rtx dreg = DF_REF_REG (*df_rec);
5295 if (!REG_P (dreg))
5296 continue;
5298 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5299 REGNO (dreg));
5301 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5302 return true;
5303 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5304 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5305 if (TEST_HARD_REG_BIT (hardregs, regno)
5306 && df_regs_ever_live_p (regno))
5307 return true;
5309 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5311 rtx reg = DF_REF_REG (*df_rec);
5313 if (!REG_P (reg))
5314 continue;
5316 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5317 REGNO (reg));
5319 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5320 return true;
5322 return false;
5325 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5326 and if BB is its only predecessor. Return that block if so,
5327 otherwise return null. */
5329 static basic_block
5330 next_block_for_reg (basic_block bb, int regno, int end_regno)
5332 edge e, live_edge;
5333 edge_iterator ei;
5334 bitmap live;
5335 int i;
5337 live_edge = NULL;
5338 FOR_EACH_EDGE (e, ei, bb->succs)
5340 live = df_get_live_in (e->dest);
5341 for (i = regno; i < end_regno; i++)
5342 if (REGNO_REG_SET_P (live, i))
5344 if (live_edge && live_edge != e)
5345 return NULL;
5346 live_edge = e;
5350 /* We can sometimes encounter dead code. Don't try to move it
5351 into the exit block. */
5352 if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
5353 return NULL;
5355 /* Reject targets of abnormal edges. This is needed for correctness
5356 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5357 exception edges even though it is generally treated as call-saved
5358 for the majority of the compilation. Moving across abnormal edges
5359 isn't going to be interesting for shrink-wrap usage anyway. */
5360 if (live_edge->flags & EDGE_ABNORMAL)
5361 return NULL;
5363 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5364 return NULL;
5366 return live_edge->dest;
5369 /* Try to move INSN from BB to a successor. Return true on success.
5370 USES and DEFS are the set of registers that are used and defined
5371 after INSN in BB. */
5373 static bool
5374 move_insn_for_shrink_wrap (basic_block bb, rtx insn,
5375 const HARD_REG_SET uses,
5376 const HARD_REG_SET defs)
5378 rtx set, src, dest;
5379 bitmap live_out, live_in, bb_uses, bb_defs;
5380 unsigned int i, dregno, end_dregno, sregno, end_sregno;
5381 basic_block next_block;
5383 /* Look for a simple register copy. */
5384 set = single_set (insn);
5385 if (!set)
5386 return false;
5387 src = SET_SRC (set);
5388 dest = SET_DEST (set);
5389 if (!REG_P (dest) || !REG_P (src))
5390 return false;
5392 /* Make sure that the source register isn't defined later in BB. */
5393 sregno = REGNO (src);
5394 end_sregno = END_REGNO (src);
5395 if (overlaps_hard_reg_set_p (defs, GET_MODE (src), sregno))
5396 return false;
5398 /* Make sure that the destination register isn't referenced later in BB. */
5399 dregno = REGNO (dest);
5400 end_dregno = END_REGNO (dest);
5401 if (overlaps_hard_reg_set_p (uses, GET_MODE (dest), dregno)
5402 || overlaps_hard_reg_set_p (defs, GET_MODE (dest), dregno))
5403 return false;
5405 /* See whether there is a successor block to which we could move INSN. */
5406 next_block = next_block_for_reg (bb, dregno, end_dregno);
5407 if (!next_block)
5408 return false;
5410 /* At this point we are committed to moving INSN, but let's try to
5411 move it as far as we can. */
5414 live_out = df_get_live_out (bb);
5415 live_in = df_get_live_in (next_block);
5416 bb = next_block;
5418 /* Check whether BB uses DEST or clobbers DEST. We need to add
5419 INSN to BB if so. Either way, DEST is no longer live on entry,
5420 except for any part that overlaps SRC (next loop). */
5421 bb_uses = &DF_LR_BB_INFO (bb)->use;
5422 bb_defs = &DF_LR_BB_INFO (bb)->def;
5423 for (i = dregno; i < end_dregno; i++)
5425 if (REGNO_REG_SET_P (bb_uses, i) || REGNO_REG_SET_P (bb_defs, i))
5426 next_block = NULL;
5427 CLEAR_REGNO_REG_SET (live_out, i);
5428 CLEAR_REGNO_REG_SET (live_in, i);
5431 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5432 Either way, SRC is now live on entry. */
5433 for (i = sregno; i < end_sregno; i++)
5435 if (REGNO_REG_SET_P (bb_defs, i))
5436 next_block = NULL;
5437 SET_REGNO_REG_SET (live_out, i);
5438 SET_REGNO_REG_SET (live_in, i);
5441 /* If we don't need to add the move to BB, look for a single
5442 successor block. */
5443 if (next_block)
5444 next_block = next_block_for_reg (next_block, dregno, end_dregno);
5446 while (next_block);
5448 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5449 (next loop). */
5450 for (i = dregno; i < end_dregno; i++)
5452 CLEAR_REGNO_REG_SET (bb_uses, i);
5453 SET_REGNO_REG_SET (bb_defs, i);
5456 /* BB now uses SRC. */
5457 for (i = sregno; i < end_sregno; i++)
5458 SET_REGNO_REG_SET (bb_uses, i);
5460 emit_insn_after (PATTERN (insn), bb_note (bb));
5461 delete_insn (insn);
5462 return true;
5465 /* Look for register copies in the first block of the function, and move
5466 them down into successor blocks if the register is used only on one
5467 path. This exposes more opportunities for shrink-wrapping. These
5468 kinds of sets often occur when incoming argument registers are moved
5469 to call-saved registers because their values are live across one or
5470 more calls during the function. */
5472 static void
5473 prepare_shrink_wrap (basic_block entry_block)
5475 rtx insn, curr, x;
5476 HARD_REG_SET uses, defs;
5477 df_ref *ref;
5479 CLEAR_HARD_REG_SET (uses);
5480 CLEAR_HARD_REG_SET (defs);
5481 FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr)
5482 if (NONDEBUG_INSN_P (insn)
5483 && !move_insn_for_shrink_wrap (entry_block, insn, uses, defs))
5485 /* Add all defined registers to DEFs. */
5486 for (ref = DF_INSN_DEFS (insn); *ref; ref++)
5488 x = DF_REF_REG (*ref);
5489 if (REG_P (x) && HARD_REGISTER_P (x))
5490 SET_HARD_REG_BIT (defs, REGNO (x));
5493 /* Add all used registers to USESs. */
5494 for (ref = DF_INSN_USES (insn); *ref; ref++)
5496 x = DF_REF_REG (*ref);
5497 if (REG_P (x) && HARD_REGISTER_P (x))
5498 SET_HARD_REG_BIT (uses, REGNO (x));
5503 #endif
5505 #ifdef HAVE_return
5506 /* Insert use of return register before the end of BB. */
5508 static void
5509 emit_use_return_register_into_block (basic_block bb)
5511 rtx seq;
5512 start_sequence ();
5513 use_return_register ();
5514 seq = get_insns ();
5515 end_sequence ();
5516 emit_insn_before (seq, BB_END (bb));
5520 /* Create a return pattern, either simple_return or return, depending on
5521 simple_p. */
5523 static rtx
5524 gen_return_pattern (bool simple_p)
5526 #ifdef HAVE_simple_return
5527 return simple_p ? gen_simple_return () : gen_return ();
5528 #else
5529 gcc_assert (!simple_p);
5530 return gen_return ();
5531 #endif
5534 /* Insert an appropriate return pattern at the end of block BB. This
5535 also means updating block_for_insn appropriately. SIMPLE_P is
5536 the same as in gen_return_pattern and passed to it. */
5538 static void
5539 emit_return_into_block (bool simple_p, basic_block bb)
5541 rtx jump, pat;
5542 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5543 pat = PATTERN (jump);
5544 if (GET_CODE (pat) == PARALLEL)
5545 pat = XVECEXP (pat, 0, 0);
5546 gcc_assert (ANY_RETURN_P (pat));
5547 JUMP_LABEL (jump) = pat;
5549 #endif
5551 /* Set JUMP_LABEL for a return insn. */
5553 void
5554 set_return_jump_label (rtx returnjump)
5556 rtx pat = PATTERN (returnjump);
5557 if (GET_CODE (pat) == PARALLEL)
5558 pat = XVECEXP (pat, 0, 0);
5559 if (ANY_RETURN_P (pat))
5560 JUMP_LABEL (returnjump) = pat;
5561 else
5562 JUMP_LABEL (returnjump) = ret_rtx;
5565 #ifdef HAVE_simple_return
5566 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5567 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5568 static void
5569 dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before,
5570 bitmap_head *need_prologue)
5572 edge_iterator ei;
5573 edge e;
5574 rtx insn = BB_END (bb);
5576 /* We know BB has a single successor, so there is no need to copy a
5577 simple jump at the end of BB. */
5578 if (simplejump_p (insn))
5579 insn = PREV_INSN (insn);
5581 start_sequence ();
5582 duplicate_insn_chain (BB_HEAD (bb), insn);
5583 if (dump_file)
5585 unsigned count = 0;
5586 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5587 if (active_insn_p (insn))
5588 ++count;
5589 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n",
5590 bb->index, copy_bb->index, count);
5592 insn = get_insns ();
5593 end_sequence ();
5594 emit_insn_before (insn, before);
5596 /* Redirect all the paths that need no prologue into copy_bb. */
5597 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
5598 if (!bitmap_bit_p (need_prologue, e->src->index))
5600 redirect_edge_and_branch_force (e, copy_bb);
5601 continue;
5603 else
5604 ei_next (&ei);
5606 #endif
5608 #if defined (HAVE_return) || defined (HAVE_simple_return)
5609 /* Return true if there are any active insns between HEAD and TAIL. */
5610 static bool
5611 active_insn_between (rtx head, rtx tail)
5613 while (tail)
5615 if (active_insn_p (tail))
5616 return true;
5617 if (tail == head)
5618 return false;
5619 tail = PREV_INSN (tail);
5621 return false;
5624 /* LAST_BB is a block that exits, and empty of active instructions.
5625 Examine its predecessors for jumps that can be converted to
5626 (conditional) returns. */
5627 static VEC (edge, heap) *
5628 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5629 VEC (edge, heap) *unconverted ATTRIBUTE_UNUSED)
5631 int i;
5632 basic_block bb;
5633 rtx label;
5634 edge_iterator ei;
5635 edge e;
5636 VEC(basic_block,heap) *src_bbs;
5638 src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds));
5639 FOR_EACH_EDGE (e, ei, last_bb->preds)
5640 if (e->src != ENTRY_BLOCK_PTR)
5641 VEC_quick_push (basic_block, src_bbs, e->src);
5643 label = BB_HEAD (last_bb);
5645 FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb)
5647 rtx jump = BB_END (bb);
5649 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5650 continue;
5652 e = find_edge (bb, last_bb);
5654 /* If we have an unconditional jump, we can replace that
5655 with a simple return instruction. */
5656 if (simplejump_p (jump))
5658 /* The use of the return register might be present in the exit
5659 fallthru block. Either:
5660 - removing the use is safe, and we should remove the use in
5661 the exit fallthru block, or
5662 - removing the use is not safe, and we should add it here.
5663 For now, we conservatively choose the latter. Either of the
5664 2 helps in crossjumping. */
5665 emit_use_return_register_into_block (bb);
5667 emit_return_into_block (simple_p, bb);
5668 delete_insn (jump);
5671 /* If we have a conditional jump branching to the last
5672 block, we can try to replace that with a conditional
5673 return instruction. */
5674 else if (condjump_p (jump))
5676 rtx dest;
5678 if (simple_p)
5679 dest = simple_return_rtx;
5680 else
5681 dest = ret_rtx;
5682 if (!redirect_jump (jump, dest, 0))
5684 #ifdef HAVE_simple_return
5685 if (simple_p)
5687 if (dump_file)
5688 fprintf (dump_file,
5689 "Failed to redirect bb %d branch.\n", bb->index);
5690 VEC_safe_push (edge, heap, unconverted, e);
5692 #endif
5693 continue;
5696 /* See comment in simplejump_p case above. */
5697 emit_use_return_register_into_block (bb);
5699 /* If this block has only one successor, it both jumps
5700 and falls through to the fallthru block, so we can't
5701 delete the edge. */
5702 if (single_succ_p (bb))
5703 continue;
5705 else
5707 #ifdef HAVE_simple_return
5708 if (simple_p)
5710 if (dump_file)
5711 fprintf (dump_file,
5712 "Failed to redirect bb %d branch.\n", bb->index);
5713 VEC_safe_push (edge, heap, unconverted, e);
5715 #endif
5716 continue;
5719 /* Fix up the CFG for the successful change we just made. */
5720 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5721 e->flags &= ~EDGE_CROSSING;
5723 VEC_free (basic_block, heap, src_bbs);
5724 return unconverted;
5727 /* Emit a return insn for the exit fallthru block. */
5728 static basic_block
5729 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5731 basic_block last_bb = exit_fallthru_edge->src;
5733 if (JUMP_P (BB_END (last_bb)))
5735 last_bb = split_edge (exit_fallthru_edge);
5736 exit_fallthru_edge = single_succ_edge (last_bb);
5738 emit_barrier_after (BB_END (last_bb));
5739 emit_return_into_block (simple_p, last_bb);
5740 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5741 return last_bb;
5743 #endif
5746 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5747 this into place with notes indicating where the prologue ends and where
5748 the epilogue begins. Update the basic block information when possible.
5750 Notes on epilogue placement:
5751 There are several kinds of edges to the exit block:
5752 * a single fallthru edge from LAST_BB
5753 * possibly, edges from blocks containing sibcalls
5754 * possibly, fake edges from infinite loops
5756 The epilogue is always emitted on the fallthru edge from the last basic
5757 block in the function, LAST_BB, into the exit block.
5759 If LAST_BB is empty except for a label, it is the target of every
5760 other basic block in the function that ends in a return. If a
5761 target has a return or simple_return pattern (possibly with
5762 conditional variants), these basic blocks can be changed so that a
5763 return insn is emitted into them, and their target is adjusted to
5764 the real exit block.
5766 Notes on shrink wrapping: We implement a fairly conservative
5767 version of shrink-wrapping rather than the textbook one. We only
5768 generate a single prologue and a single epilogue. This is
5769 sufficient to catch a number of interesting cases involving early
5770 exits.
5772 First, we identify the blocks that require the prologue to occur before
5773 them. These are the ones that modify a call-saved register, or reference
5774 any of the stack or frame pointer registers. To simplify things, we then
5775 mark everything reachable from these blocks as also requiring a prologue.
5776 This takes care of loops automatically, and avoids the need to examine
5777 whether MEMs reference the frame, since it is sufficient to check for
5778 occurrences of the stack or frame pointer.
5780 We then compute the set of blocks for which the need for a prologue
5781 is anticipatable (borrowing terminology from the shrink-wrapping
5782 description in Muchnick's book). These are the blocks which either
5783 require a prologue themselves, or those that have only successors
5784 where the prologue is anticipatable. The prologue needs to be
5785 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5786 is not. For the moment, we ensure that only one such edge exists.
5788 The epilogue is placed as described above, but we make a
5789 distinction between inserting return and simple_return patterns
5790 when modifying other blocks that end in a return. Blocks that end
5791 in a sibcall omit the sibcall_epilogue if the block is not in
5792 ANTIC. */
5794 static void
5795 thread_prologue_and_epilogue_insns (void)
5797 bool inserted;
5798 #ifdef HAVE_simple_return
5799 VEC (edge, heap) *unconverted_simple_returns = NULL;
5800 bool nonempty_prologue;
5801 bitmap_head bb_flags;
5802 unsigned max_grow_size;
5803 #endif
5804 rtx returnjump;
5805 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5806 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5807 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5808 edge_iterator ei;
5810 df_analyze ();
5812 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5814 inserted = false;
5815 seq = NULL_RTX;
5816 epilogue_end = NULL_RTX;
5817 returnjump = NULL_RTX;
5819 /* Can't deal with multiple successors of the entry block at the
5820 moment. Function should always have at least one entry
5821 point. */
5822 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5823 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5824 orig_entry_edge = entry_edge;
5826 split_prologue_seq = NULL_RTX;
5827 if (flag_split_stack
5828 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5829 == NULL))
5831 #ifndef HAVE_split_stack_prologue
5832 gcc_unreachable ();
5833 #else
5834 gcc_assert (HAVE_split_stack_prologue);
5836 start_sequence ();
5837 emit_insn (gen_split_stack_prologue ());
5838 split_prologue_seq = get_insns ();
5839 end_sequence ();
5841 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5842 set_insn_locators (split_prologue_seq, prologue_locator);
5843 #endif
5846 prologue_seq = NULL_RTX;
5847 #ifdef HAVE_prologue
5848 if (HAVE_prologue)
5850 start_sequence ();
5851 seq = gen_prologue ();
5852 emit_insn (seq);
5854 /* Insert an explicit USE for the frame pointer
5855 if the profiling is on and the frame pointer is required. */
5856 if (crtl->profile && frame_pointer_needed)
5857 emit_use (hard_frame_pointer_rtx);
5859 /* Retain a map of the prologue insns. */
5860 record_insns (seq, NULL, &prologue_insn_hash);
5861 emit_note (NOTE_INSN_PROLOGUE_END);
5863 /* Ensure that instructions are not moved into the prologue when
5864 profiling is on. The call to the profiling routine can be
5865 emitted within the live range of a call-clobbered register. */
5866 if (!targetm.profile_before_prologue () && crtl->profile)
5867 emit_insn (gen_blockage ());
5869 prologue_seq = get_insns ();
5870 end_sequence ();
5871 set_insn_locators (prologue_seq, prologue_locator);
5873 #endif
5875 #ifdef HAVE_simple_return
5876 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5878 /* Try to perform a kind of shrink-wrapping, making sure the
5879 prologue/epilogue is emitted only around those parts of the
5880 function that require it. */
5882 nonempty_prologue = false;
5883 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
5884 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
5886 nonempty_prologue = true;
5887 break;
5890 if (flag_shrink_wrap && HAVE_simple_return
5891 && (targetm.profile_before_prologue () || !crtl->profile)
5892 && nonempty_prologue && !crtl->calls_eh_return)
5894 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
5895 struct hard_reg_set_container set_up_by_prologue;
5896 rtx p_insn;
5897 VEC(basic_block, heap) *vec;
5898 basic_block bb;
5899 bitmap_head bb_antic_flags;
5900 bitmap_head bb_on_list;
5901 bitmap_head bb_tail;
5903 if (dump_file)
5904 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
5906 /* Compute the registers set and used in the prologue. */
5907 CLEAR_HARD_REG_SET (prologue_clobbered);
5908 CLEAR_HARD_REG_SET (prologue_used);
5909 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
5911 HARD_REG_SET this_used;
5912 if (!NONDEBUG_INSN_P (p_insn))
5913 continue;
5915 CLEAR_HARD_REG_SET (this_used);
5916 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
5917 &this_used);
5918 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
5919 IOR_HARD_REG_SET (prologue_used, this_used);
5920 note_stores (PATTERN (p_insn), record_hard_reg_sets,
5921 &prologue_clobbered);
5924 prepare_shrink_wrap (entry_edge->dest);
5926 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
5927 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
5928 bitmap_initialize (&bb_tail, &bitmap_default_obstack);
5930 /* Find the set of basic blocks that require a stack frame,
5931 and blocks that are too big to be duplicated. */
5933 vec = VEC_alloc (basic_block, heap, n_basic_blocks);
5935 CLEAR_HARD_REG_SET (set_up_by_prologue.set);
5936 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
5937 STACK_POINTER_REGNUM);
5938 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, ARG_POINTER_REGNUM);
5939 if (frame_pointer_needed)
5940 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
5941 HARD_FRAME_POINTER_REGNUM);
5942 if (pic_offset_table_rtx)
5943 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
5944 PIC_OFFSET_TABLE_REGNUM);
5945 if (stack_realign_drap && crtl->drap_reg)
5946 add_to_hard_reg_set (&set_up_by_prologue.set,
5947 GET_MODE (crtl->drap_reg),
5948 REGNO (crtl->drap_reg));
5949 if (targetm.set_up_by_prologue)
5950 targetm.set_up_by_prologue (&set_up_by_prologue);
5952 /* We don't use a different max size depending on
5953 optimize_bb_for_speed_p because increasing shrink-wrapping
5954 opportunities by duplicating tail blocks can actually result
5955 in an overall decrease in code size. */
5956 max_grow_size = get_uncond_jump_length ();
5957 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
5959 FOR_EACH_BB (bb)
5961 rtx insn;
5962 unsigned size = 0;
5964 FOR_BB_INSNS (bb, insn)
5965 if (NONDEBUG_INSN_P (insn))
5967 if (requires_stack_frame_p (insn, prologue_used,
5968 set_up_by_prologue.set))
5970 if (bb == entry_edge->dest)
5971 goto fail_shrinkwrap;
5972 bitmap_set_bit (&bb_flags, bb->index);
5973 VEC_quick_push (basic_block, vec, bb);
5974 break;
5976 else if (size <= max_grow_size)
5978 size += get_attr_min_length (insn);
5979 if (size > max_grow_size)
5980 bitmap_set_bit (&bb_on_list, bb->index);
5985 /* Blocks that really need a prologue, or are too big for tails. */
5986 bitmap_ior_into (&bb_on_list, &bb_flags);
5988 /* For every basic block that needs a prologue, mark all blocks
5989 reachable from it, so as to ensure they are also seen as
5990 requiring a prologue. */
5991 while (!VEC_empty (basic_block, vec))
5993 basic_block tmp_bb = VEC_pop (basic_block, vec);
5995 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
5996 if (e->dest != EXIT_BLOCK_PTR
5997 && bitmap_set_bit (&bb_flags, e->dest->index))
5998 VEC_quick_push (basic_block, vec, e->dest);
6001 /* Find the set of basic blocks that need no prologue, have a
6002 single successor, can be duplicated, meet a max size
6003 requirement, and go to the exit via like blocks. */
6004 VEC_quick_push (basic_block, vec, EXIT_BLOCK_PTR);
6005 while (!VEC_empty (basic_block, vec))
6007 basic_block tmp_bb = VEC_pop (basic_block, vec);
6009 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6010 if (single_succ_p (e->src)
6011 && !bitmap_bit_p (&bb_on_list, e->src->index)
6012 && can_duplicate_block_p (e->src))
6014 edge pe;
6015 edge_iterator pei;
6017 /* If there is predecessor of e->src which doesn't
6018 need prologue and the edge is complex,
6019 we might not be able to redirect the branch
6020 to a copy of e->src. */
6021 FOR_EACH_EDGE (pe, pei, e->src->preds)
6022 if ((pe->flags & EDGE_COMPLEX) != 0
6023 && !bitmap_bit_p (&bb_flags, pe->src->index))
6024 break;
6025 if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
6026 VEC_quick_push (basic_block, vec, e->src);
6030 /* Now walk backwards from every block that is marked as needing
6031 a prologue to compute the bb_antic_flags bitmap. Exclude
6032 tail blocks; They can be duplicated to be used on paths not
6033 needing a prologue. */
6034 bitmap_clear (&bb_on_list);
6035 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
6036 FOR_EACH_BB (bb)
6038 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6039 continue;
6040 FOR_EACH_EDGE (e, ei, bb->preds)
6041 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6042 && bitmap_set_bit (&bb_on_list, e->src->index))
6043 VEC_quick_push (basic_block, vec, e->src);
6045 while (!VEC_empty (basic_block, vec))
6047 basic_block tmp_bb = VEC_pop (basic_block, vec);
6048 bool all_set = true;
6050 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
6051 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6052 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
6054 all_set = false;
6055 break;
6058 if (all_set)
6060 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
6061 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6062 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6063 && bitmap_set_bit (&bb_on_list, e->src->index))
6064 VEC_quick_push (basic_block, vec, e->src);
6067 /* Find exactly one edge that leads to a block in ANTIC from
6068 a block that isn't. */
6069 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
6070 FOR_EACH_BB (bb)
6072 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6073 continue;
6074 FOR_EACH_EDGE (e, ei, bb->preds)
6075 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
6077 if (entry_edge != orig_entry_edge)
6079 entry_edge = orig_entry_edge;
6080 if (dump_file)
6081 fprintf (dump_file, "More than one candidate edge.\n");
6082 goto fail_shrinkwrap;
6084 if (dump_file)
6085 fprintf (dump_file, "Found candidate edge for "
6086 "shrink-wrapping, %d->%d.\n", e->src->index,
6087 e->dest->index);
6088 entry_edge = e;
6092 if (entry_edge != orig_entry_edge)
6094 /* Test whether the prologue is known to clobber any register
6095 (other than FP or SP) which are live on the edge. */
6096 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
6097 if (frame_pointer_needed)
6098 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
6099 CLEAR_HARD_REG_SET (live_on_edge);
6100 reg_set_to_hard_reg_set (&live_on_edge,
6101 df_get_live_in (entry_edge->dest));
6102 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
6104 entry_edge = orig_entry_edge;
6105 if (dump_file)
6106 fprintf (dump_file,
6107 "Shrink-wrapping aborted due to clobber.\n");
6110 if (entry_edge != orig_entry_edge)
6112 crtl->shrink_wrapped = true;
6113 if (dump_file)
6114 fprintf (dump_file, "Performing shrink-wrapping.\n");
6116 /* Find tail blocks reachable from both blocks needing a
6117 prologue and blocks not needing a prologue. */
6118 if (!bitmap_empty_p (&bb_tail))
6119 FOR_EACH_BB (bb)
6121 bool some_pro, some_no_pro;
6122 if (!bitmap_bit_p (&bb_tail, bb->index))
6123 continue;
6124 some_pro = some_no_pro = false;
6125 FOR_EACH_EDGE (e, ei, bb->preds)
6127 if (bitmap_bit_p (&bb_flags, e->src->index))
6128 some_pro = true;
6129 else
6130 some_no_pro = true;
6132 if (some_pro && some_no_pro)
6133 VEC_quick_push (basic_block, vec, bb);
6134 else
6135 bitmap_clear_bit (&bb_tail, bb->index);
6137 /* Find the head of each tail. */
6138 while (!VEC_empty (basic_block, vec))
6140 basic_block tbb = VEC_pop (basic_block, vec);
6142 if (!bitmap_bit_p (&bb_tail, tbb->index))
6143 continue;
6145 while (single_succ_p (tbb))
6147 tbb = single_succ (tbb);
6148 bitmap_clear_bit (&bb_tail, tbb->index);
6151 /* Now duplicate the tails. */
6152 if (!bitmap_empty_p (&bb_tail))
6153 FOR_EACH_BB_REVERSE (bb)
6155 basic_block copy_bb, tbb;
6156 rtx insert_point;
6157 int eflags;
6159 if (!bitmap_clear_bit (&bb_tail, bb->index))
6160 continue;
6162 /* Create a copy of BB, instructions and all, for
6163 use on paths that don't need a prologue.
6164 Ideal placement of the copy is on a fall-thru edge
6165 or after a block that would jump to the copy. */
6166 FOR_EACH_EDGE (e, ei, bb->preds)
6167 if (!bitmap_bit_p (&bb_flags, e->src->index)
6168 && single_succ_p (e->src))
6169 break;
6170 if (e)
6172 copy_bb = create_basic_block (NEXT_INSN (BB_END (e->src)),
6173 NULL_RTX, e->src);
6174 BB_COPY_PARTITION (copy_bb, e->src);
6176 else
6178 /* Otherwise put the copy at the end of the function. */
6179 copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
6180 EXIT_BLOCK_PTR->prev_bb);
6181 BB_COPY_PARTITION (copy_bb, bb);
6184 insert_point = emit_note_after (NOTE_INSN_DELETED,
6185 BB_END (copy_bb));
6186 emit_barrier_after (BB_END (copy_bb));
6188 tbb = bb;
6189 while (1)
6191 dup_block_and_redirect (tbb, copy_bb, insert_point,
6192 &bb_flags);
6193 tbb = single_succ (tbb);
6194 if (tbb == EXIT_BLOCK_PTR)
6195 break;
6196 e = split_block (copy_bb, PREV_INSN (insert_point));
6197 copy_bb = e->dest;
6200 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6201 We have yet to add a simple_return to the tails,
6202 as we'd like to first convert_jumps_to_returns in
6203 case the block is no longer used after that. */
6204 eflags = EDGE_FAKE;
6205 if (CALL_P (PREV_INSN (insert_point))
6206 && SIBLING_CALL_P (PREV_INSN (insert_point)))
6207 eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
6208 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
6210 /* verify_flow_info doesn't like a note after a
6211 sibling call. */
6212 delete_insn (insert_point);
6213 if (bitmap_empty_p (&bb_tail))
6214 break;
6218 fail_shrinkwrap:
6219 bitmap_clear (&bb_tail);
6220 bitmap_clear (&bb_antic_flags);
6221 bitmap_clear (&bb_on_list);
6222 VEC_free (basic_block, heap, vec);
6224 #endif
6226 if (split_prologue_seq != NULL_RTX)
6228 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6229 inserted = true;
6231 if (prologue_seq != NULL_RTX)
6233 insert_insn_on_edge (prologue_seq, entry_edge);
6234 inserted = true;
6237 /* If the exit block has no non-fake predecessors, we don't need
6238 an epilogue. */
6239 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6240 if ((e->flags & EDGE_FAKE) == 0)
6241 break;
6242 if (e == NULL)
6243 goto epilogue_done;
6245 rtl_profile_for_bb (EXIT_BLOCK_PTR);
6247 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
6249 /* If we're allowed to generate a simple return instruction, then by
6250 definition we don't need a full epilogue. If the last basic
6251 block before the exit block does not contain active instructions,
6252 examine its predecessors and try to emit (conditional) return
6253 instructions. */
6254 #ifdef HAVE_simple_return
6255 if (entry_edge != orig_entry_edge)
6257 if (optimize)
6259 unsigned i, last;
6261 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6262 (but won't remove). Stop at end of current preds. */
6263 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
6264 for (i = 0; i < last; i++)
6266 e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
6267 if (LABEL_P (BB_HEAD (e->src))
6268 && !bitmap_bit_p (&bb_flags, e->src->index)
6269 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
6270 unconverted_simple_returns
6271 = convert_jumps_to_returns (e->src, true,
6272 unconverted_simple_returns);
6276 if (exit_fallthru_edge != NULL
6277 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0
6278 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index))
6280 basic_block last_bb;
6282 last_bb = emit_return_for_exit (exit_fallthru_edge, true);
6283 returnjump = BB_END (last_bb);
6284 exit_fallthru_edge = NULL;
6287 #endif
6288 #ifdef HAVE_return
6289 if (HAVE_return)
6291 if (exit_fallthru_edge == NULL)
6292 goto epilogue_done;
6294 if (optimize)
6296 basic_block last_bb = exit_fallthru_edge->src;
6298 if (LABEL_P (BB_HEAD (last_bb))
6299 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6300 convert_jumps_to_returns (last_bb, false, NULL);
6302 if (EDGE_COUNT (last_bb->preds) != 0
6303 && single_succ_p (last_bb))
6305 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6306 epilogue_end = returnjump = BB_END (last_bb);
6307 #ifdef HAVE_simple_return
6308 /* Emitting the return may add a basic block.
6309 Fix bb_flags for the added block. */
6310 if (last_bb != exit_fallthru_edge->src)
6311 bitmap_set_bit (&bb_flags, last_bb->index);
6312 #endif
6313 goto epilogue_done;
6317 #endif
6319 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6320 this marker for the splits of EH_RETURN patterns, and nothing else
6321 uses the flag in the meantime. */
6322 epilogue_completed = 1;
6324 #ifdef HAVE_eh_return
6325 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6326 some targets, these get split to a special version of the epilogue
6327 code. In order to be able to properly annotate these with unwind
6328 info, try to split them now. If we get a valid split, drop an
6329 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6330 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6332 rtx prev, last, trial;
6334 if (e->flags & EDGE_FALLTHRU)
6335 continue;
6336 last = BB_END (e->src);
6337 if (!eh_returnjump_p (last))
6338 continue;
6340 prev = PREV_INSN (last);
6341 trial = try_split (PATTERN (last), last, 1);
6342 if (trial == last)
6343 continue;
6345 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6346 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6348 #endif
6350 /* If nothing falls through into the exit block, we don't need an
6351 epilogue. */
6353 if (exit_fallthru_edge == NULL)
6354 goto epilogue_done;
6356 #ifdef HAVE_epilogue
6357 if (HAVE_epilogue)
6359 start_sequence ();
6360 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6361 seq = gen_epilogue ();
6362 if (seq)
6363 emit_jump_insn (seq);
6365 /* Retain a map of the epilogue insns. */
6366 record_insns (seq, NULL, &epilogue_insn_hash);
6367 set_insn_locators (seq, epilogue_locator);
6369 seq = get_insns ();
6370 returnjump = get_last_insn ();
6371 end_sequence ();
6373 insert_insn_on_edge (seq, exit_fallthru_edge);
6374 inserted = true;
6376 if (JUMP_P (returnjump))
6377 set_return_jump_label (returnjump);
6379 else
6380 #endif
6382 basic_block cur_bb;
6384 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6385 goto epilogue_done;
6386 /* We have a fall-through edge to the exit block, the source is not
6387 at the end of the function, and there will be an assembler epilogue
6388 at the end of the function.
6389 We can't use force_nonfallthru here, because that would try to
6390 use return. Inserting a jump 'by hand' is extremely messy, so
6391 we take advantage of cfg_layout_finalize using
6392 fixup_fallthru_exit_predecessor. */
6393 cfg_layout_initialize (0);
6394 FOR_EACH_BB (cur_bb)
6395 if (cur_bb->index >= NUM_FIXED_BLOCKS
6396 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6397 cur_bb->aux = cur_bb->next_bb;
6398 cfg_layout_finalize ();
6401 epilogue_done:
6403 default_rtl_profile ();
6405 if (inserted)
6407 sbitmap blocks;
6409 commit_edge_insertions ();
6411 /* Look for basic blocks within the prologue insns. */
6412 blocks = sbitmap_alloc (last_basic_block);
6413 sbitmap_zero (blocks);
6414 SET_BIT (blocks, entry_edge->dest->index);
6415 SET_BIT (blocks, orig_entry_edge->dest->index);
6416 find_many_sub_basic_blocks (blocks);
6417 sbitmap_free (blocks);
6419 /* The epilogue insns we inserted may cause the exit edge to no longer
6420 be fallthru. */
6421 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6423 if (((e->flags & EDGE_FALLTHRU) != 0)
6424 && returnjump_p (BB_END (e->src)))
6425 e->flags &= ~EDGE_FALLTHRU;
6429 #ifdef HAVE_simple_return
6430 /* If there were branches to an empty LAST_BB which we tried to
6431 convert to conditional simple_returns, but couldn't for some
6432 reason, create a block to hold a simple_return insn and redirect
6433 those remaining edges. */
6434 if (!VEC_empty (edge, unconverted_simple_returns))
6436 basic_block simple_return_block_hot = NULL;
6437 basic_block simple_return_block_cold = NULL;
6438 edge pending_edge_hot = NULL;
6439 edge pending_edge_cold = NULL;
6440 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb;
6441 int i;
6443 gcc_assert (entry_edge != orig_entry_edge);
6445 /* See if we can reuse the last insn that was emitted for the
6446 epilogue. */
6447 if (returnjump != NULL_RTX
6448 && JUMP_LABEL (returnjump) == simple_return_rtx)
6450 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump));
6451 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6452 simple_return_block_hot = e->dest;
6453 else
6454 simple_return_block_cold = e->dest;
6457 /* Also check returns we might need to add to tail blocks. */
6458 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6459 if (EDGE_COUNT (e->src->preds) != 0
6460 && (e->flags & EDGE_FAKE) != 0
6461 && !bitmap_bit_p (&bb_flags, e->src->index))
6463 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6464 pending_edge_hot = e;
6465 else
6466 pending_edge_cold = e;
6469 FOR_EACH_VEC_ELT (edge, unconverted_simple_returns, i, e)
6471 basic_block *pdest_bb;
6472 edge pending;
6474 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6476 pdest_bb = &simple_return_block_hot;
6477 pending = pending_edge_hot;
6479 else
6481 pdest_bb = &simple_return_block_cold;
6482 pending = pending_edge_cold;
6485 if (*pdest_bb == NULL && pending != NULL)
6487 emit_return_into_block (true, pending->src);
6488 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6489 *pdest_bb = pending->src;
6491 else if (*pdest_bb == NULL)
6493 basic_block bb;
6494 rtx start;
6496 bb = create_basic_block (NULL, NULL, exit_pred);
6497 BB_COPY_PARTITION (bb, e->src);
6498 start = emit_jump_insn_after (gen_simple_return (),
6499 BB_END (bb));
6500 JUMP_LABEL (start) = simple_return_rtx;
6501 emit_barrier_after (start);
6503 *pdest_bb = bb;
6504 make_edge (bb, EXIT_BLOCK_PTR, 0);
6506 redirect_edge_and_branch_force (e, *pdest_bb);
6508 VEC_free (edge, heap, unconverted_simple_returns);
6511 if (entry_edge != orig_entry_edge)
6513 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6514 if (EDGE_COUNT (e->src->preds) != 0
6515 && (e->flags & EDGE_FAKE) != 0
6516 && !bitmap_bit_p (&bb_flags, e->src->index))
6518 emit_return_into_block (true, e->src);
6519 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6522 #endif
6524 #ifdef HAVE_sibcall_epilogue
6525 /* Emit sibling epilogues before any sibling call sites. */
6526 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
6528 basic_block bb = e->src;
6529 rtx insn = BB_END (bb);
6530 rtx ep_seq;
6532 if (!CALL_P (insn)
6533 || ! SIBLING_CALL_P (insn)
6534 #ifdef HAVE_simple_return
6535 || (entry_edge != orig_entry_edge
6536 && !bitmap_bit_p (&bb_flags, bb->index))
6537 #endif
6540 ei_next (&ei);
6541 continue;
6544 ep_seq = gen_sibcall_epilogue ();
6545 if (ep_seq)
6547 start_sequence ();
6548 emit_note (NOTE_INSN_EPILOGUE_BEG);
6549 emit_insn (ep_seq);
6550 seq = get_insns ();
6551 end_sequence ();
6553 /* Retain a map of the epilogue insns. Used in life analysis to
6554 avoid getting rid of sibcall epilogue insns. Do this before we
6555 actually emit the sequence. */
6556 record_insns (seq, NULL, &epilogue_insn_hash);
6557 set_insn_locators (seq, epilogue_locator);
6559 emit_insn_before (seq, insn);
6561 ei_next (&ei);
6563 #endif
6565 #ifdef HAVE_epilogue
6566 if (epilogue_end)
6568 rtx insn, next;
6570 /* Similarly, move any line notes that appear after the epilogue.
6571 There is no need, however, to be quite so anal about the existence
6572 of such a note. Also possibly move
6573 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6574 info generation. */
6575 for (insn = epilogue_end; insn; insn = next)
6577 next = NEXT_INSN (insn);
6578 if (NOTE_P (insn)
6579 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6580 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6583 #endif
6585 #ifdef HAVE_simple_return
6586 bitmap_clear (&bb_flags);
6587 #endif
6589 /* Threading the prologue and epilogue changes the artificial refs
6590 in the entry and exit blocks. */
6591 epilogue_completed = 1;
6592 df_update_entry_exit_and_calls ();
6595 /* Reposition the prologue-end and epilogue-begin notes after
6596 instruction scheduling. */
6598 void
6599 reposition_prologue_and_epilogue_notes (void)
6601 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6602 || defined (HAVE_sibcall_epilogue)
6603 /* Since the hash table is created on demand, the fact that it is
6604 non-null is a signal that it is non-empty. */
6605 if (prologue_insn_hash != NULL)
6607 size_t len = htab_elements (prologue_insn_hash);
6608 rtx insn, last = NULL, note = NULL;
6610 /* Scan from the beginning until we reach the last prologue insn. */
6611 /* ??? While we do have the CFG intact, there are two problems:
6612 (1) The prologue can contain loops (typically probing the stack),
6613 which means that the end of the prologue isn't in the first bb.
6614 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6615 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6617 if (NOTE_P (insn))
6619 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6620 note = insn;
6622 else if (contains (insn, prologue_insn_hash))
6624 last = insn;
6625 if (--len == 0)
6626 break;
6630 if (last)
6632 if (note == NULL)
6634 /* Scan forward looking for the PROLOGUE_END note. It should
6635 be right at the beginning of the block, possibly with other
6636 insn notes that got moved there. */
6637 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6639 if (NOTE_P (note)
6640 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6641 break;
6645 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6646 if (LABEL_P (last))
6647 last = NEXT_INSN (last);
6648 reorder_insns (note, note, last);
6652 if (epilogue_insn_hash != NULL)
6654 edge_iterator ei;
6655 edge e;
6657 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6659 rtx insn, first = NULL, note = NULL;
6660 basic_block bb = e->src;
6662 /* Scan from the beginning until we reach the first epilogue insn. */
6663 FOR_BB_INSNS (bb, insn)
6665 if (NOTE_P (insn))
6667 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6669 note = insn;
6670 if (first != NULL)
6671 break;
6674 else if (first == NULL && contains (insn, epilogue_insn_hash))
6676 first = insn;
6677 if (note != NULL)
6678 break;
6682 if (note)
6684 /* If the function has a single basic block, and no real
6685 epilogue insns (e.g. sibcall with no cleanup), the
6686 epilogue note can get scheduled before the prologue
6687 note. If we have frame related prologue insns, having
6688 them scanned during the epilogue will result in a crash.
6689 In this case re-order the epilogue note to just before
6690 the last insn in the block. */
6691 if (first == NULL)
6692 first = BB_END (bb);
6694 if (PREV_INSN (first) != note)
6695 reorder_insns (note, note, PREV_INSN (first));
6699 #endif /* HAVE_prologue or HAVE_epilogue */
6702 /* Returns the name of the current function. */
6703 const char *
6704 current_function_name (void)
6706 if (cfun == NULL)
6707 return "<none>";
6708 return lang_hooks.decl_printable_name (cfun->decl, 2);
6712 static unsigned int
6713 rest_of_handle_check_leaf_regs (void)
6715 #ifdef LEAF_REGISTERS
6716 current_function_uses_only_leaf_regs
6717 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6718 #endif
6719 return 0;
6722 /* Insert a TYPE into the used types hash table of CFUN. */
6724 static void
6725 used_types_insert_helper (tree type, struct function *func)
6727 if (type != NULL && func != NULL)
6729 void **slot;
6731 if (func->used_types_hash == NULL)
6732 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6733 htab_eq_pointer, NULL);
6734 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6735 if (*slot == NULL)
6736 *slot = type;
6740 /* Given a type, insert it into the used hash table in cfun. */
6741 void
6742 used_types_insert (tree t)
6744 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6745 if (TYPE_NAME (t))
6746 break;
6747 else
6748 t = TREE_TYPE (t);
6749 if (TREE_CODE (t) == ERROR_MARK)
6750 return;
6751 if (TYPE_NAME (t) == NULL_TREE
6752 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6753 t = TYPE_MAIN_VARIANT (t);
6754 if (debug_info_level > DINFO_LEVEL_NONE)
6756 if (cfun)
6757 used_types_insert_helper (t, cfun);
6758 else
6759 /* So this might be a type referenced by a global variable.
6760 Record that type so that we can later decide to emit its debug
6761 information. */
6762 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
6766 /* Helper to Hash a struct types_used_by_vars_entry. */
6768 static hashval_t
6769 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6771 gcc_assert (entry && entry->var_decl && entry->type);
6773 return iterative_hash_object (entry->type,
6774 iterative_hash_object (entry->var_decl, 0));
6777 /* Hash function of the types_used_by_vars_entry hash table. */
6779 hashval_t
6780 types_used_by_vars_do_hash (const void *x)
6782 const struct types_used_by_vars_entry *entry =
6783 (const struct types_used_by_vars_entry *) x;
6785 return hash_types_used_by_vars_entry (entry);
6788 /*Equality function of the types_used_by_vars_entry hash table. */
6791 types_used_by_vars_eq (const void *x1, const void *x2)
6793 const struct types_used_by_vars_entry *e1 =
6794 (const struct types_used_by_vars_entry *) x1;
6795 const struct types_used_by_vars_entry *e2 =
6796 (const struct types_used_by_vars_entry *)x2;
6798 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6801 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6803 void
6804 types_used_by_var_decl_insert (tree type, tree var_decl)
6806 if (type != NULL && var_decl != NULL)
6808 void **slot;
6809 struct types_used_by_vars_entry e;
6810 e.var_decl = var_decl;
6811 e.type = type;
6812 if (types_used_by_vars_hash == NULL)
6813 types_used_by_vars_hash =
6814 htab_create_ggc (37, types_used_by_vars_do_hash,
6815 types_used_by_vars_eq, NULL);
6816 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6817 hash_types_used_by_vars_entry (&e), INSERT);
6818 if (*slot == NULL)
6820 struct types_used_by_vars_entry *entry;
6821 entry = ggc_alloc_types_used_by_vars_entry ();
6822 entry->type = type;
6823 entry->var_decl = var_decl;
6824 *slot = entry;
6829 struct rtl_opt_pass pass_leaf_regs =
6832 RTL_PASS,
6833 "*leaf_regs", /* name */
6834 NULL, /* gate */
6835 rest_of_handle_check_leaf_regs, /* execute */
6836 NULL, /* sub */
6837 NULL, /* next */
6838 0, /* static_pass_number */
6839 TV_NONE, /* tv_id */
6840 0, /* properties_required */
6841 0, /* properties_provided */
6842 0, /* properties_destroyed */
6843 0, /* todo_flags_start */
6844 0 /* todo_flags_finish */
6848 static unsigned int
6849 rest_of_handle_thread_prologue_and_epilogue (void)
6851 if (optimize)
6852 cleanup_cfg (CLEANUP_EXPENSIVE);
6854 /* On some machines, the prologue and epilogue code, or parts thereof,
6855 can be represented as RTL. Doing so lets us schedule insns between
6856 it and the rest of the code and also allows delayed branch
6857 scheduling to operate in the epilogue. */
6858 thread_prologue_and_epilogue_insns ();
6860 /* The stack usage info is finalized during prologue expansion. */
6861 if (flag_stack_usage_info)
6862 output_stack_usage ();
6864 return 0;
6867 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
6870 RTL_PASS,
6871 "pro_and_epilogue", /* name */
6872 NULL, /* gate */
6873 rest_of_handle_thread_prologue_and_epilogue, /* execute */
6874 NULL, /* sub */
6875 NULL, /* next */
6876 0, /* static_pass_number */
6877 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6878 0, /* properties_required */
6879 0, /* properties_provided */
6880 0, /* properties_destroyed */
6881 TODO_verify_flow, /* todo_flags_start */
6882 TODO_df_verify |
6883 TODO_df_finish | TODO_verify_rtl_sharing |
6884 TODO_ggc_collect /* todo_flags_finish */
6889 /* This mini-pass fixes fall-out from SSA in asm statements that have
6890 in-out constraints. Say you start with
6892 orig = inout;
6893 asm ("": "+mr" (inout));
6894 use (orig);
6896 which is transformed very early to use explicit output and match operands:
6898 orig = inout;
6899 asm ("": "=mr" (inout) : "0" (inout));
6900 use (orig);
6902 Or, after SSA and copyprop,
6904 asm ("": "=mr" (inout_2) : "0" (inout_1));
6905 use (inout_1);
6907 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6908 they represent two separate values, so they will get different pseudo
6909 registers during expansion. Then, since the two operands need to match
6910 per the constraints, but use different pseudo registers, reload can
6911 only register a reload for these operands. But reloads can only be
6912 satisfied by hardregs, not by memory, so we need a register for this
6913 reload, just because we are presented with non-matching operands.
6914 So, even though we allow memory for this operand, no memory can be
6915 used for it, just because the two operands don't match. This can
6916 cause reload failures on register-starved targets.
6918 So it's a symptom of reload not being able to use memory for reloads
6919 or, alternatively it's also a symptom of both operands not coming into
6920 reload as matching (in which case the pseudo could go to memory just
6921 fine, as the alternative allows it, and no reload would be necessary).
6922 We fix the latter problem here, by transforming
6924 asm ("": "=mr" (inout_2) : "0" (inout_1));
6926 back to
6928 inout_2 = inout_1;
6929 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6931 static void
6932 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
6934 int i;
6935 bool changed = false;
6936 rtx op = SET_SRC (p_sets[0]);
6937 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6938 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6939 bool *output_matched = XALLOCAVEC (bool, noutputs);
6941 memset (output_matched, 0, noutputs * sizeof (bool));
6942 for (i = 0; i < ninputs; i++)
6944 rtx input, output, insns;
6945 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6946 char *end;
6947 int match, j;
6949 if (*constraint == '%')
6950 constraint++;
6952 match = strtoul (constraint, &end, 10);
6953 if (end == constraint)
6954 continue;
6956 gcc_assert (match < noutputs);
6957 output = SET_DEST (p_sets[match]);
6958 input = RTVEC_ELT (inputs, i);
6959 /* Only do the transformation for pseudos. */
6960 if (! REG_P (output)
6961 || rtx_equal_p (output, input)
6962 || (GET_MODE (input) != VOIDmode
6963 && GET_MODE (input) != GET_MODE (output)))
6964 continue;
6966 /* We can't do anything if the output is also used as input,
6967 as we're going to overwrite it. */
6968 for (j = 0; j < ninputs; j++)
6969 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6970 break;
6971 if (j != ninputs)
6972 continue;
6974 /* Avoid changing the same input several times. For
6975 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6976 only change in once (to out1), rather than changing it
6977 first to out1 and afterwards to out2. */
6978 if (i > 0)
6980 for (j = 0; j < noutputs; j++)
6981 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6982 break;
6983 if (j != noutputs)
6984 continue;
6986 output_matched[match] = true;
6988 start_sequence ();
6989 emit_move_insn (output, input);
6990 insns = get_insns ();
6991 end_sequence ();
6992 emit_insn_before (insns, insn);
6994 /* Now replace all mentions of the input with output. We can't
6995 just replace the occurrence in inputs[i], as the register might
6996 also be used in some other input (or even in an address of an
6997 output), which would mean possibly increasing the number of
6998 inputs by one (namely 'output' in addition), which might pose
6999 a too complicated problem for reload to solve. E.g. this situation:
7001 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7003 Here 'input' is used in two occurrences as input (once for the
7004 input operand, once for the address in the second output operand).
7005 If we would replace only the occurrence of the input operand (to
7006 make the matching) we would be left with this:
7008 output = input
7009 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7011 Now we suddenly have two different input values (containing the same
7012 value, but different pseudos) where we formerly had only one.
7013 With more complicated asms this might lead to reload failures
7014 which wouldn't have happen without this pass. So, iterate over
7015 all operands and replace all occurrences of the register used. */
7016 for (j = 0; j < noutputs; j++)
7017 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
7018 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
7019 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
7020 input, output);
7021 for (j = 0; j < ninputs; j++)
7022 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
7023 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
7024 input, output);
7026 changed = true;
7029 if (changed)
7030 df_insn_rescan (insn);
7033 static unsigned
7034 rest_of_match_asm_constraints (void)
7036 basic_block bb;
7037 rtx insn, pat, *p_sets;
7038 int noutputs;
7040 if (!crtl->has_asm_statement)
7041 return 0;
7043 df_set_flags (DF_DEFER_INSN_RESCAN);
7044 FOR_EACH_BB (bb)
7046 FOR_BB_INSNS (bb, insn)
7048 if (!INSN_P (insn))
7049 continue;
7051 pat = PATTERN (insn);
7052 if (GET_CODE (pat) == PARALLEL)
7053 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7054 else if (GET_CODE (pat) == SET)
7055 p_sets = &PATTERN (insn), noutputs = 1;
7056 else
7057 continue;
7059 if (GET_CODE (*p_sets) == SET
7060 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7061 match_asm_constraints_1 (insn, p_sets, noutputs);
7065 return TODO_df_finish;
7068 struct rtl_opt_pass pass_match_asm_constraints =
7071 RTL_PASS,
7072 "asmcons", /* name */
7073 NULL, /* gate */
7074 rest_of_match_asm_constraints, /* execute */
7075 NULL, /* sub */
7076 NULL, /* next */
7077 0, /* static_pass_number */
7078 TV_NONE, /* tv_id */
7079 0, /* properties_required */
7080 0, /* properties_provided */
7081 0, /* properties_destroyed */
7082 0, /* todo_flags_start */
7083 0 /* todo_flags_finish */
7088 #include "gt-function.h"