Remove Dwarf2 restriction on EH frame generation
[official-gcc.git] / gcc / sibcall.c
blob34cf5a93348ee67a50fa15e6f3ed47ac0fc6dd38
1 /* Generic sibling call optimization support
2 Copyright (C) 1999, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "function.h"
27 #include "hard-reg-set.h"
28 #include "flags.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "except.h"
35 static int identify_call_return_value PARAMS ((rtx, rtx *, rtx *));
36 static rtx skip_copy_to_return_value PARAMS ((rtx, rtx, rtx));
37 static rtx skip_use_of_return_value PARAMS ((rtx, enum rtx_code));
38 static rtx skip_stack_adjustment PARAMS ((rtx));
39 static rtx skip_jump_insn PARAMS ((rtx));
40 static int uses_addressof PARAMS ((rtx));
41 static int sequence_uses_addressof PARAMS ((rtx));
42 static void purge_reg_equiv_notes PARAMS ((void));
44 /* Examine a CALL_PLACEHOLDER pattern and determine where the call's
45 return value is located. P_HARD_RETURN receives the hard register
46 that the function used; P_SOFT_RETURN receives the pseudo register
47 that the sequence used. Return non-zero if the values were located. */
49 static int
50 identify_call_return_value (cp, p_hard_return, p_soft_return)
51 rtx cp;
52 rtx *p_hard_return, *p_soft_return;
54 rtx insn, set, hard, soft;
56 insn = XEXP (cp, 0);
57 /* Search backward through the "normal" call sequence to the CALL insn. */
58 while (NEXT_INSN (insn))
59 insn = NEXT_INSN (insn);
60 while (GET_CODE (insn) != CALL_INSN)
61 insn = PREV_INSN (insn);
63 /* Assume the pattern is (set (dest) (call ...)), or that the first
64 member of a parallel is. This is the hard return register used
65 by the function. */
66 if (GET_CODE (PATTERN (insn)) == SET
67 && GET_CODE (SET_SRC (PATTERN (insn))) == CALL)
68 hard = SET_DEST (PATTERN (insn));
69 else if (GET_CODE (PATTERN (insn)) == PARALLEL
70 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET
71 && GET_CODE (SET_SRC (XVECEXP (PATTERN (insn), 0, 0))) == CALL)
72 hard = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
73 else
74 return 0;
76 /* If we didn't get a single hard register (e.g. a parallel), give up. */
77 if (GET_CODE (hard) != REG)
78 return 0;
80 /* Stack adjustment done after call may appear here. */
81 insn = skip_stack_adjustment (insn);
82 if (! insn)
83 return 0;
85 /* If there's nothing after, there's no soft return value. */
86 insn = NEXT_INSN (insn);
87 if (! insn)
88 return 0;
90 /* We're looking for a source of the hard return register. */
91 set = single_set (insn);
92 if (! set || SET_SRC (set) != hard)
93 return 0;
95 soft = SET_DEST (set);
96 insn = NEXT_INSN (insn);
98 /* Allow this first destination to be copied to a second register,
99 as might happen if the first register wasn't the particular pseudo
100 we'd been expecting. */
101 if (insn
102 && (set = single_set (insn)) != NULL_RTX
103 && SET_SRC (set) == soft)
105 soft = SET_DEST (set);
106 insn = NEXT_INSN (insn);
109 /* Don't fool with anything but pseudo registers. */
110 if (GET_CODE (soft) != REG || REGNO (soft) < FIRST_PSEUDO_REGISTER)
111 return 0;
113 /* This value must not be modified before the end of the sequence. */
114 if (reg_set_between_p (soft, insn, NULL_RTX))
115 return 0;
117 *p_hard_return = hard;
118 *p_soft_return = soft;
120 return 1;
123 /* If the first real insn after ORIG_INSN copies to this function's
124 return value from RETVAL, then return the insn which performs the
125 copy. Otherwise return ORIG_INSN. */
127 static rtx
128 skip_copy_to_return_value (orig_insn, hardret, softret)
129 rtx orig_insn;
130 rtx hardret, softret;
132 rtx insn, set = NULL_RTX;
134 insn = next_nonnote_insn (orig_insn);
135 if (! insn)
136 return orig_insn;
138 set = single_set (insn);
139 if (! set)
140 return orig_insn;
142 /* The destination must be the same as the called function's return
143 value to ensure that any return value is put in the same place by the
144 current function and the function we're calling.
146 Further, the source must be the same as the pseudo into which the
147 called function's return value was copied. Otherwise we're returning
148 some other value. */
150 #ifndef OUTGOING_REGNO
151 #define OUTGOING_REGNO(N) (N)
152 #endif
154 if (SET_DEST (set) == current_function_return_rtx
155 && REG_P (SET_DEST (set))
156 && OUTGOING_REGNO (REGNO (SET_DEST (set))) == REGNO (hardret)
157 && SET_SRC (set) == softret)
158 return insn;
160 /* It did not look like a copy of the return value, so return the
161 same insn we were passed. */
162 return orig_insn;
165 /* If the first real insn after ORIG_INSN is a CODE of this function's return
166 value, return insn. Otherwise return ORIG_INSN. */
168 static rtx
169 skip_use_of_return_value (orig_insn, code)
170 rtx orig_insn;
171 enum rtx_code code;
173 rtx insn;
175 insn = next_nonnote_insn (orig_insn);
177 if (insn
178 && GET_CODE (insn) == INSN
179 && GET_CODE (PATTERN (insn)) == code
180 && (XEXP (PATTERN (insn), 0) == current_function_return_rtx
181 || XEXP (PATTERN (insn), 0) == const0_rtx))
182 return insn;
184 return orig_insn;
187 /* If the first real insn after ORIG_INSN adjusts the stack pointer
188 by a constant, return the insn with the stack pointer adjustment.
189 Otherwise return ORIG_INSN. */
191 static rtx
192 skip_stack_adjustment (orig_insn)
193 rtx orig_insn;
195 rtx insn, set = NULL_RTX;
197 insn = next_nonnote_insn (orig_insn);
199 if (insn)
200 set = single_set (insn);
202 /* The source must be the same as the current function's return value to
203 ensure that any return value is put in the same place by the current
204 function and the function we're calling. The destination register
205 must be a pseudo. */
206 if (insn
207 && set
208 && GET_CODE (SET_SRC (set)) == PLUS
209 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
210 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT
211 && SET_DEST (set) == stack_pointer_rtx)
212 return insn;
214 /* It did not look like a copy of the return value, so return the
215 same insn we were passed. */
216 return orig_insn;
219 /* If the first real insn after ORIG_INSN is a jump, return the JUMP_INSN.
220 Otherwise return ORIG_INSN. */
222 static rtx
223 skip_jump_insn (orig_insn)
224 rtx orig_insn;
226 rtx insn;
228 insn = next_nonnote_insn (orig_insn);
230 if (insn
231 && GET_CODE (insn) == JUMP_INSN
232 && simplejump_p (insn))
233 return insn;
235 return orig_insn;
238 /* Scan the rtx X for an ADDRESSOF expressions. Return nonzero if an ADDRESSOF
239 expresion is found, else return zero. */
241 static int
242 uses_addressof (x)
243 rtx x;
245 RTX_CODE code;
246 int i, j;
247 const char *fmt;
249 if (x == NULL_RTX)
250 return 0;
252 code = GET_CODE (x);
254 if (code == ADDRESSOF)
255 return 1;
257 /* Scan all subexpressions. */
258 fmt = GET_RTX_FORMAT (code);
259 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
261 if (*fmt == 'e')
263 if (uses_addressof (XEXP (x, i)))
264 return 1;
266 else if (*fmt == 'E')
268 for (j = 0; j < XVECLEN (x, i); j++)
269 if (uses_addressof (XVECEXP (x, i, j)))
270 return 1;
273 return 0;
276 /* Scan the sequence of insns in SEQ to see if any have an ADDRESSOF
277 rtl expression. If an ADDRESSOF expression is found, return nonzero,
278 else return zero.
280 This function handles CALL_PLACEHOLDERs which contain multiple sequences
281 of insns. */
283 static int
284 sequence_uses_addressof (seq)
285 rtx seq;
287 rtx insn;
289 for (insn = seq; insn; insn = NEXT_INSN (insn))
290 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
292 /* If this is a CALL_PLACEHOLDER, then recursively call ourselves
293 with each nonempty sequence attached to the CALL_PLACEHOLDER. */
294 if (GET_CODE (insn) == CALL_INSN
295 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
297 if (XEXP (PATTERN (insn), 0) != NULL_RTX
298 && sequence_uses_addressof (XEXP (PATTERN (insn), 0)))
299 return 1;
300 if (XEXP (PATTERN (insn), 1) != NULL_RTX
301 && sequence_uses_addressof (XEXP (PATTERN (insn), 1)))
302 return 1;
303 if (XEXP (PATTERN (insn), 2) != NULL_RTX
304 && sequence_uses_addressof (XEXP (PATTERN (insn), 2)))
305 return 1;
307 else if (uses_addressof (PATTERN (insn))
308 || (REG_NOTES (insn) && uses_addressof (REG_NOTES (insn))))
309 return 1;
311 return 0;
314 /* Remove all REG_EQUIV notes found in the insn chain. */
316 static void
317 purge_reg_equiv_notes ()
319 rtx insn;
321 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
323 while (1)
325 rtx note = find_reg_note (insn, REG_EQUIV, 0);
326 if (note)
328 /* Remove the note and keep looking at the notes for
329 this insn. */
330 remove_note (insn, note);
331 continue;
333 break;
338 /* Replace the CALL_PLACEHOLDER with one of its children. INSN should be
339 the CALL_PLACEHOLDER insn; USE tells which child to use. */
341 void
342 replace_call_placeholder (insn, use)
343 rtx insn;
344 sibcall_use_t use;
346 if (use == sibcall_use_tail_recursion)
347 emit_insns_before (XEXP (PATTERN (insn), 2), insn);
348 else if (use == sibcall_use_sibcall)
349 emit_insns_before (XEXP (PATTERN (insn), 1), insn);
350 else if (use == sibcall_use_normal)
351 emit_insns_before (XEXP (PATTERN (insn), 0), insn);
352 else
353 abort();
355 /* Turn off LABEL_PRESERVE_P for the tail recursion label if it
356 exists. We only had to set it long enough to keep the jump
357 pass above from deleting it as unused. */
358 if (XEXP (PATTERN (insn), 3))
359 LABEL_PRESERVE_P (XEXP (PATTERN (insn), 3)) = 0;
361 /* "Delete" the placeholder insn. */
362 PUT_CODE (insn, NOTE);
363 NOTE_SOURCE_FILE (insn) = 0;
364 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
367 /* Given a (possibly empty) set of potential sibling or tail recursion call
368 sites, determine if optimization is possible.
370 Potential sibling or tail recursion calls are marked with CALL_PLACEHOLDER
371 insns. The CALL_PLACEHOLDER insn holds chains of insns to implement a
372 normal call, sibling call or tail recursive call.
374 Replace the CALL_PLACEHOLDER with an appropriate insn chain. */
376 void
377 optimize_sibling_and_tail_recursive_calls ()
379 rtx insn, insns;
380 basic_block alternate_exit = EXIT_BLOCK_PTR;
381 int current_function_uses_addressof;
382 int successful_sibling_call = 0;
383 int replaced_call_placeholder = 0;
384 edge e;
386 insns = get_insns ();
388 /* We do not perform these calls when flag_exceptions is true, so this
389 is probably a NOP at the current time. However, we may want to support
390 sibling and tail recursion optimizations in the future, so let's plan
391 ahead and find all the EH labels. */
392 find_exception_handler_labels ();
394 /* Run a jump optimization pass to clean up the CFG. We primarily want
395 this to thread jumps so that it is obvious which blocks jump to the
396 epilouge. */
397 jump_optimize_minimal (insns);
399 /* We need cfg information to determine which blocks are succeeded
400 only by the epilogue. */
401 find_basic_blocks (insns, max_reg_num (), 0);
402 cleanup_cfg (insns);
404 /* If there are no basic blocks, then there is nothing to do. */
405 if (n_basic_blocks == 0)
406 return;
408 /* Find the exit block.
410 It is possible that we have blocks which can reach the exit block
411 directly. However, most of the time a block will jump (or fall into)
412 N_BASIC_BLOCKS - 1, which in turn falls into the exit block. */
413 for (e = EXIT_BLOCK_PTR->pred;
414 e && alternate_exit == EXIT_BLOCK_PTR;
415 e = e->pred_next)
417 rtx insn;
419 if (e->dest != EXIT_BLOCK_PTR || e->succ_next != NULL)
420 continue;
422 /* Walk forwards through the last normal block and see if it
423 does nothing except fall into the exit block. */
424 for (insn = BLOCK_HEAD (n_basic_blocks - 1);
425 insn;
426 insn = NEXT_INSN (insn))
428 /* This should only happen once, at the start of this block. */
429 if (GET_CODE (insn) == CODE_LABEL)
430 continue;
432 if (GET_CODE (insn) == NOTE)
433 continue;
435 if (GET_CODE (insn) == INSN
436 && GET_CODE (PATTERN (insn)) == USE)
437 continue;
439 break;
442 /* If INSN is zero, then the search walked all the way through the
443 block without hitting anything interesting. This block is a
444 valid alternate exit block. */
445 if (insn == NULL)
446 alternate_exit = e->src;
449 /* If the function uses ADDRESSOF, we can't (easily) determine
450 at this point if the value will end up on the stack. */
451 current_function_uses_addressof = sequence_uses_addressof (insns);
453 /* Walk the insn chain and find any CALL_PLACEHOLDER insns. We need to
454 select one of the insn sequences attached to each CALL_PLACEHOLDER.
456 The different sequences represent different ways to implement the call,
457 ie, tail recursion, sibling call or normal call.
459 Since we do not create nested CALL_PLACEHOLDERs, the scan
460 continues with the insn that was after a replaced CALL_PLACEHOLDER;
461 we don't rescan the replacement insns. */
462 for (insn = insns; insn; insn = NEXT_INSN (insn))
464 if (GET_CODE (insn) == CALL_INSN
465 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
467 int sibcall = (XEXP (PATTERN (insn), 1) != NULL_RTX);
468 int tailrecursion = (XEXP (PATTERN (insn), 2) != NULL_RTX);
469 basic_block succ_block, call_block;
470 rtx temp, hardret, softret;
472 /* We must be careful with stack slots which are live at
473 potential optimization sites.
475 ?!? This test is overly conservative and will be replaced. */
476 if (frame_offset)
477 goto failure;
479 /* alloca (until we have stack slot life analysis) inhibits
480 sibling call optimizations, but not tail recursion.
482 Similarly if we have ADDRESSOF expressions.
484 Similarly if we use varargs or stdarg since they implicitly
485 may take the address of an argument. */
486 if (current_function_calls_alloca || current_function_uses_addressof
487 || current_function_varargs || current_function_stdarg)
488 sibcall = 0;
490 call_block = BLOCK_FOR_INSN (insn);
492 /* If the block has more than one successor, then we can not
493 perform sibcall or tail recursion optimizations. */
494 if (call_block->succ == NULL
495 || call_block->succ->succ_next != NULL)
496 goto failure;
498 /* If the single successor is not the exit block, then we can not
499 perform sibcall or tail recursion optimizations.
501 Note that this test combined with the previous is sufficient
502 to prevent tail call optimization in the presense of active
503 exception handlers. */
504 succ_block = call_block->succ->dest;
505 if (succ_block != EXIT_BLOCK_PTR && succ_block != alternate_exit)
506 goto failure;
508 /* If the call was the end of the block, then we're OK. */
509 temp = insn;
510 if (temp == call_block->end)
511 goto success;
513 /* Skip over copying from the call's return value pseudo into
514 this function's hard return register. */
515 if (identify_call_return_value (PATTERN (insn), &hardret, &softret))
517 temp = skip_copy_to_return_value (temp, hardret, softret);
518 if (temp == call_block->end)
519 goto success;
522 /* Skip any stack adjustment. */
523 temp = skip_stack_adjustment (temp);
524 if (temp == call_block->end)
525 goto success;
527 /* Skip over a CLOBBER of the return value (as a hard reg). */
528 temp = skip_use_of_return_value (temp, CLOBBER);
529 if (temp == call_block->end)
530 goto success;
532 /* Skip over a USE of the return value (as a hard reg). */
533 temp = skip_use_of_return_value (temp, USE);
534 if (temp == call_block->end)
535 goto success;
537 /* Skip over the JUMP_INSN at the end of the block. */
538 temp = skip_jump_insn (temp);
539 if (GET_CODE (temp) == NOTE)
540 temp = next_nonnote_insn (temp);
541 if (temp == call_block->end)
542 goto success;
544 /* There are operations at the end of the block which we must
545 execute after returning from the function call. So this call
546 can not be optimized. */
547 failure:
548 sibcall = 0, tailrecursion = 0;
549 success:
551 /* Select a set of insns to implement the call and emit them.
552 Tail recursion is the most efficient, so select it over
553 a tail/sibling call. */
555 if (sibcall)
556 successful_sibling_call = 1;
557 replaced_call_placeholder = 1;
558 replace_call_placeholder (insn,
559 tailrecursion != 0
560 ? sibcall_use_tail_recursion
561 : sibcall != 0
562 ? sibcall_use_sibcall
563 : sibcall_use_normal);
567 /* A sibling call sequence invalidates any REG_EQUIV notes made for
568 this function's incoming arguments.
570 At the start of RTL generation we know the only REG_EQUIV notes
571 in the rtl chain are those for incoming arguments, so we can safely
572 flush any REG_EQUIV note.
574 This is (slight) overkill. We could keep track of the highest argument
575 we clobber and be more selective in removing notes, but it does not
576 seem to be worth the effort. */
577 if (successful_sibling_call)
578 purge_reg_equiv_notes ();
580 /* There may have been NOTE_INSN_BLOCK_{BEGIN,END} notes in the
581 CALL_PLACEHOLDER alternatives that we didn't emit. Rebuild the
582 lexical block tree to correspond to the notes that still exist. */
583 if (replaced_call_placeholder)
584 reorder_blocks ();
586 /* This information will be invalid after inline expansion. Kill it now. */
587 free_basic_block_vars (0);