2013-01-08 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / combine-stack-adj.c
blobca94515e842ed9547f84acf9fb55ff4649cb0bc0
1 /* Combine stack adjustments.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Track stack adjustments and stack memory references. Attempt to
23 reduce the number of stack adjustments by back-propagating across
24 the memory references.
26 This is intended primarily for use with targets that do not define
27 ACCUMULATE_OUTGOING_ARGS. It is of significantly more value to
28 targets that define PREFERRED_STACK_BOUNDARY more aligned than
29 STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
30 (e.g. x86 fp regs) which would ordinarily have to be implemented
31 as a sub/mov pair due to restrictions in calls.c.
33 Propagation stops when any of the insns that need adjusting are
34 (a) no longer valid because we've exceeded their range, (b) a
35 non-trivial push instruction, or (c) a call instruction.
37 Restriction B is based on the assumption that push instructions
38 are smaller or faster. If a port really wants to remove all
39 pushes, it should have defined ACCUMULATE_OUTGOING_ARGS. The
40 one exception that is made is for an add immediately followed
41 by a push. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "rtl.h"
48 #include "tm_p.h"
49 #include "insn-config.h"
50 #include "recog.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "flags.h"
54 #include "function.h"
55 #include "expr.h"
56 #include "basic-block.h"
57 #include "df.h"
58 #include "except.h"
59 #include "reload.h"
60 #include "tree-pass.h"
63 /* Turn STACK_GROWS_DOWNWARD into a boolean. */
64 #ifdef STACK_GROWS_DOWNWARD
65 #undef STACK_GROWS_DOWNWARD
66 #define STACK_GROWS_DOWNWARD 1
67 #else
68 #define STACK_GROWS_DOWNWARD 0
69 #endif
71 /* This structure records two kinds of stack references between stack
72 adjusting instructions: stack references in memory addresses for
73 regular insns and all stack references for debug insns. */
75 struct csa_reflist
77 HOST_WIDE_INT sp_offset;
78 rtx insn, *ref;
79 struct csa_reflist *next;
82 static int stack_memref_p (rtx);
83 static rtx single_set_for_csa (rtx);
84 static void free_csa_reflist (struct csa_reflist *);
85 static struct csa_reflist *record_one_stack_ref (rtx, rtx *,
86 struct csa_reflist *);
87 static int try_apply_stack_adjustment (rtx, struct csa_reflist *,
88 HOST_WIDE_INT, HOST_WIDE_INT);
89 static void combine_stack_adjustments_for_block (basic_block);
90 static int record_stack_refs (rtx *, void *);
93 /* Main entry point for stack adjustment combination. */
95 static void
96 combine_stack_adjustments (void)
98 basic_block bb;
100 FOR_EACH_BB (bb)
101 combine_stack_adjustments_for_block (bb);
104 /* Recognize a MEM of the form (sp) or (plus sp const). */
106 static int
107 stack_memref_p (rtx x)
109 if (!MEM_P (x))
110 return 0;
111 x = XEXP (x, 0);
113 if (x == stack_pointer_rtx)
114 return 1;
115 if (GET_CODE (x) == PLUS
116 && XEXP (x, 0) == stack_pointer_rtx
117 && CONST_INT_P (XEXP (x, 1)))
118 return 1;
120 return 0;
123 /* Recognize either normal single_set or the hack in i386.md for
124 tying fp and sp adjustments. */
126 static rtx
127 single_set_for_csa (rtx insn)
129 int i;
130 rtx tmp = single_set (insn);
131 if (tmp)
132 return tmp;
134 if (!NONJUMP_INSN_P (insn)
135 || GET_CODE (PATTERN (insn)) != PARALLEL)
136 return NULL_RTX;
138 tmp = PATTERN (insn);
139 if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
140 return NULL_RTX;
142 for (i = 1; i < XVECLEN (tmp, 0); ++i)
144 rtx this_rtx = XVECEXP (tmp, 0, i);
146 /* The special case is allowing a no-op set. */
147 if (GET_CODE (this_rtx) == SET
148 && SET_SRC (this_rtx) == SET_DEST (this_rtx))
150 else if (GET_CODE (this_rtx) != CLOBBER
151 && GET_CODE (this_rtx) != USE)
152 return NULL_RTX;
155 return XVECEXP (tmp, 0, 0);
158 /* Free the list of csa_reflist nodes. */
160 static void
161 free_csa_reflist (struct csa_reflist *reflist)
163 struct csa_reflist *next;
164 for (; reflist ; reflist = next)
166 next = reflist->next;
167 free (reflist);
171 /* Create a new csa_reflist node from the given stack reference.
172 It is already known that the reference is either a MEM satisfying the
173 predicate stack_memref_p or a REG representing the stack pointer. */
175 static struct csa_reflist *
176 record_one_stack_ref (rtx insn, rtx *ref, struct csa_reflist *next_reflist)
178 struct csa_reflist *ml;
180 ml = XNEW (struct csa_reflist);
182 if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
183 ml->sp_offset = 0;
184 else
185 ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
187 ml->insn = insn;
188 ml->ref = ref;
189 ml->next = next_reflist;
191 return ml;
194 /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
195 as each of the memories and stack references in REFLIST. Return true
196 on success. */
198 static int
199 try_apply_stack_adjustment (rtx insn, struct csa_reflist *reflist,
200 HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta)
202 struct csa_reflist *ml;
203 rtx set;
205 set = single_set_for_csa (insn);
206 if (MEM_P (SET_DEST (set)))
207 validate_change (insn, &SET_DEST (set),
208 replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
210 else
211 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
213 for (ml = reflist; ml ; ml = ml->next)
215 rtx new_addr = plus_constant (Pmode, stack_pointer_rtx,
216 ml->sp_offset - delta);
217 rtx new_val;
219 if (MEM_P (*ml->ref))
220 new_val = replace_equiv_address_nv (*ml->ref, new_addr);
221 else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
222 new_val = new_addr;
223 else
224 new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
225 GET_MODE (new_addr));
226 validate_change (ml->insn, ml->ref, new_val, 1);
229 if (apply_change_group ())
231 /* Succeeded. Update our knowledge of the stack references. */
232 for (ml = reflist; ml ; ml = ml->next)
233 ml->sp_offset -= delta;
235 return 1;
237 else
238 return 0;
241 /* Called via for_each_rtx and used to record all stack memory and other
242 references in the insn and discard all other stack pointer references. */
243 struct record_stack_refs_data
245 rtx insn;
246 struct csa_reflist *reflist;
249 static int
250 record_stack_refs (rtx *xp, void *data)
252 rtx x = *xp;
253 struct record_stack_refs_data *d =
254 (struct record_stack_refs_data *) data;
255 if (!x)
256 return 0;
257 switch (GET_CODE (x))
259 case MEM:
260 if (!reg_mentioned_p (stack_pointer_rtx, x))
261 return -1;
262 /* We are not able to handle correctly all possible memrefs containing
263 stack pointer, so this check is necessary. */
264 if (stack_memref_p (x))
266 d->reflist = record_one_stack_ref (d->insn, xp, d->reflist);
267 return -1;
269 /* Try harder for DEBUG_INSNs, handle e.g. (mem (mem (sp + 16) + 4). */
270 return !DEBUG_INSN_P (d->insn);
271 case REG:
272 /* ??? We want be able to handle non-memory stack pointer
273 references later. For now just discard all insns referring to
274 stack pointer outside mem expressions. We would probably
275 want to teach validate_replace to simplify expressions first.
277 We can't just compare with STACK_POINTER_RTX because the
278 reference to the stack pointer might be in some other mode.
279 In particular, an explicit clobber in an asm statement will
280 result in a QImode clobber.
282 In DEBUG_INSNs, we want to replace all occurrences, otherwise
283 they will cause -fcompare-debug failures. */
284 if (REGNO (x) == STACK_POINTER_REGNUM)
286 if (!DEBUG_INSN_P (d->insn))
287 return 1;
288 d->reflist = record_one_stack_ref (d->insn, xp, d->reflist);
289 return -1;
291 break;
292 default:
293 break;
295 return 0;
298 /* If INSN has a REG_ARGS_SIZE note, move it to LAST.
299 AFTER is true iff LAST follows INSN in the instruction stream. */
301 static void
302 maybe_move_args_size_note (rtx last, rtx insn, bool after)
304 rtx note, last_note;
306 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
307 if (note == NULL)
308 return;
310 last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
311 if (last_note)
313 /* The ARGS_SIZE notes are *not* cumulative. They represent an
314 absolute value, and the "most recent" note wins. */
315 if (!after)
316 XEXP (last_note, 0) = XEXP (note, 0);
318 else
319 add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
322 /* Return the next (or previous) active insn within BB. */
324 static rtx
325 prev_active_insn_bb (basic_block bb, rtx insn)
327 for (insn = PREV_INSN (insn);
328 insn != PREV_INSN (BB_HEAD (bb));
329 insn = PREV_INSN (insn))
330 if (active_insn_p (insn))
331 return insn;
332 return NULL_RTX;
335 static rtx
336 next_active_insn_bb (basic_block bb, rtx insn)
338 for (insn = NEXT_INSN (insn);
339 insn != NEXT_INSN (BB_END (bb));
340 insn = NEXT_INSN (insn))
341 if (active_insn_p (insn))
342 return insn;
343 return NULL_RTX;
346 /* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV. Otherwise
347 search for a nearby candidate within BB where we can stick the note. */
349 static void
350 force_move_args_size_note (basic_block bb, rtx prev, rtx insn)
352 rtx note, test, next_candidate, prev_candidate;
354 /* If PREV exists, tail-call to the logic in the other function. */
355 if (prev)
357 maybe_move_args_size_note (prev, insn, false);
358 return;
361 /* First, make sure there's anything that needs doing. */
362 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
363 if (note == NULL)
364 return;
366 /* We need to find a spot between the previous and next exception points
367 where we can place the note and "properly" deallocate the arguments. */
368 next_candidate = prev_candidate = NULL;
370 /* It is often the case that we have insns in the order:
371 call
372 add sp (previous deallocation)
373 sub sp (align for next arglist)
374 push arg
375 and the add/sub cancel. Therefore we begin by searching forward. */
377 test = insn;
378 while ((test = next_active_insn_bb (bb, test)) != NULL)
380 /* Found an existing note: nothing to do. */
381 if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
382 return;
383 /* Found something that affects unwinding. Stop searching. */
384 if (CALL_P (test) || !insn_nothrow_p (test))
385 break;
386 if (next_candidate == NULL)
387 next_candidate = test;
390 test = insn;
391 while ((test = prev_active_insn_bb (bb, test)) != NULL)
393 rtx tnote;
394 /* Found a place that seems logical to adjust the stack. */
395 tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
396 if (tnote)
398 XEXP (tnote, 0) = XEXP (note, 0);
399 return;
401 if (prev_candidate == NULL)
402 prev_candidate = test;
403 /* Found something that affects unwinding. Stop searching. */
404 if (CALL_P (test) || !insn_nothrow_p (test))
405 break;
408 if (prev_candidate)
409 test = prev_candidate;
410 else if (next_candidate)
411 test = next_candidate;
412 else
414 /* ??? We *must* have a place, lest we ICE on the lost adjustment.
415 Options are: dummy clobber insn, nop, or prevent the removal of
416 the sp += 0 insn. */
417 /* TODO: Find another way to indicate to the dwarf2 code that we
418 have not in fact lost an adjustment. */
419 test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
421 add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
424 /* Subroutine of combine_stack_adjustments, called for each basic block. */
426 static void
427 combine_stack_adjustments_for_block (basic_block bb)
429 HOST_WIDE_INT last_sp_adjust = 0;
430 rtx last_sp_set = NULL_RTX;
431 rtx last2_sp_set = NULL_RTX;
432 struct csa_reflist *reflist = NULL;
433 rtx insn, next, set;
434 struct record_stack_refs_data data;
435 bool end_of_block = false;
437 for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
439 end_of_block = insn == BB_END (bb);
440 next = NEXT_INSN (insn);
442 if (! INSN_P (insn))
443 continue;
445 set = single_set_for_csa (insn);
446 if (set)
448 rtx dest = SET_DEST (set);
449 rtx src = SET_SRC (set);
451 /* Find constant additions to the stack pointer. */
452 if (dest == stack_pointer_rtx
453 && GET_CODE (src) == PLUS
454 && XEXP (src, 0) == stack_pointer_rtx
455 && CONST_INT_P (XEXP (src, 1)))
457 HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
459 /* If we've not seen an adjustment previously, record
460 it now and continue. */
461 if (! last_sp_set)
463 last_sp_set = insn;
464 last_sp_adjust = this_adjust;
465 continue;
468 /* If not all recorded refs can be adjusted, or the
469 adjustment is now too large for a constant addition,
470 we cannot merge the two stack adjustments.
472 Also we need to be careful to not move stack pointer
473 such that we create stack accesses outside the allocated
474 area. We can combine an allocation into the first insn,
475 or a deallocation into the second insn. We can not
476 combine an allocation followed by a deallocation.
478 The only somewhat frequent occurrence of the later is when
479 a function allocates a stack frame but does not use it.
480 For this case, we would need to analyze rtl stream to be
481 sure that allocated area is really unused. This means not
482 only checking the memory references, but also all registers
483 or global memory references possibly containing a stack
484 frame address.
486 Perhaps the best way to address this problem is to teach
487 gcc not to allocate stack for objects never used. */
489 /* Combine an allocation into the first instruction. */
490 if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
492 if (try_apply_stack_adjustment (last_sp_set, reflist,
493 last_sp_adjust + this_adjust,
494 this_adjust))
496 /* It worked! */
497 maybe_move_args_size_note (last_sp_set, insn, false);
498 delete_insn (insn);
499 last_sp_adjust += this_adjust;
500 continue;
504 /* Otherwise we have a deallocation. Do not combine with
505 a previous allocation. Combine into the second insn. */
506 else if (STACK_GROWS_DOWNWARD
507 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
509 if (try_apply_stack_adjustment (insn, reflist,
510 last_sp_adjust + this_adjust,
511 -last_sp_adjust))
513 /* It worked! */
514 maybe_move_args_size_note (insn, last_sp_set, true);
515 delete_insn (last_sp_set);
516 last_sp_set = insn;
517 last_sp_adjust += this_adjust;
518 free_csa_reflist (reflist);
519 reflist = NULL;
520 continue;
524 /* Combination failed. Restart processing from here. If
525 deallocation+allocation conspired to cancel, we can
526 delete the old deallocation insn. */
527 if (last_sp_set)
529 if (last_sp_adjust == 0)
531 maybe_move_args_size_note (insn, last_sp_set, true);
532 delete_insn (last_sp_set);
534 else
535 last2_sp_set = last_sp_set;
537 free_csa_reflist (reflist);
538 reflist = NULL;
539 last_sp_set = insn;
540 last_sp_adjust = this_adjust;
541 continue;
544 /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
545 the previous adjustment and turn it into a simple store. This
546 is equivalent to anticipating the stack adjustment so this must
547 be an allocation. */
548 if (MEM_P (dest)
549 && ((STACK_GROWS_DOWNWARD
550 ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
551 && last_sp_adjust
552 == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest)))
553 : (GET_CODE (XEXP (dest, 0)) == PRE_INC
554 && last_sp_adjust
555 == -(HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
556 || ((STACK_GROWS_DOWNWARD
557 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
558 && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
559 && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
560 && XEXP (XEXP (XEXP (dest, 0), 1), 0)
561 == stack_pointer_rtx
562 && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
563 == CONST_INT
564 && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
565 == -last_sp_adjust))
566 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
567 && !reg_mentioned_p (stack_pointer_rtx, src)
568 && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
569 && try_apply_stack_adjustment (insn, reflist, 0,
570 -last_sp_adjust))
572 if (last2_sp_set)
573 maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
574 else
575 maybe_move_args_size_note (insn, last_sp_set, true);
576 delete_insn (last_sp_set);
577 free_csa_reflist (reflist);
578 reflist = NULL;
579 last_sp_set = NULL_RTX;
580 last_sp_adjust = 0;
581 continue;
585 data.insn = insn;
586 data.reflist = reflist;
587 if (!CALL_P (insn) && last_sp_set
588 && !for_each_rtx (&PATTERN (insn), record_stack_refs, &data))
590 reflist = data.reflist;
591 continue;
593 reflist = data.reflist;
595 /* Otherwise, we were not able to process the instruction.
596 Do not continue collecting data across such a one. */
597 if (last_sp_set
598 && (CALL_P (insn)
599 || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
601 if (last_sp_set && last_sp_adjust == 0)
603 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
604 delete_insn (last_sp_set);
606 free_csa_reflist (reflist);
607 reflist = NULL;
608 last2_sp_set = NULL_RTX;
609 last_sp_set = NULL_RTX;
610 last_sp_adjust = 0;
614 if (last_sp_set && last_sp_adjust == 0)
616 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
617 delete_insn (last_sp_set);
620 if (reflist)
621 free_csa_reflist (reflist);
625 static bool
626 gate_handle_stack_adjustments (void)
628 /* This is kind of a heuristic. We need to run combine_stack_adjustments
629 even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
630 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
631 push instructions will have popping returns. */
632 #ifndef PUSH_ROUNDING
633 if (ACCUMULATE_OUTGOING_ARGS)
634 return false;
635 #endif
636 return flag_combine_stack_adjustments;
639 static unsigned int
640 rest_of_handle_stack_adjustments (void)
642 df_note_add_problem ();
643 df_analyze ();
644 combine_stack_adjustments ();
645 return 0;
648 struct rtl_opt_pass pass_stack_adjustments =
651 RTL_PASS,
652 "csa", /* name */
653 OPTGROUP_NONE, /* optinfo_flags */
654 gate_handle_stack_adjustments, /* gate */
655 rest_of_handle_stack_adjustments, /* execute */
656 NULL, /* sub */
657 NULL, /* next */
658 0, /* static_pass_number */
659 TV_COMBINE_STACK_ADJUST, /* tv_id */
660 0, /* properties_required */
661 0, /* properties_provided */
662 0, /* properties_destroyed */
663 0, /* todo_flags_start */
664 TODO_df_finish | TODO_verify_rtl_sharing |
665 TODO_ggc_collect, /* todo_flags_finish */