[Patch AArch64 1/3] Enable CRC by default for armv8.1-a
[official-gcc.git] / gcc / combine-stack-adj.c
bloba74b65ab095388e987ec63310343878ba62020e3
1 /* Combine stack adjustments.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Track stack adjustments and stack memory references. Attempt to
21 reduce the number of stack adjustments by back-propagating across
22 the memory references.
24 This is intended primarily for use with targets that do not define
25 ACCUMULATE_OUTGOING_ARGS. It is of significantly more value to
26 targets that define PREFERRED_STACK_BOUNDARY more aligned than
27 STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
28 (e.g. x86 fp regs) which would ordinarily have to be implemented
29 as a sub/mov pair due to restrictions in calls.c.
31 Propagation stops when any of the insns that need adjusting are
32 (a) no longer valid because we've exceeded their range, (b) a
33 non-trivial push instruction, or (c) a call instruction.
35 Restriction B is based on the assumption that push instructions
36 are smaller or faster. If a port really wants to remove all
37 pushes, it should have defined ACCUMULATE_OUTGOING_ARGS. The
38 one exception that is made is for an add immediately followed
39 by a push. */
41 #include "config.h"
42 #include "system.h"
43 #include "coretypes.h"
44 #include "backend.h"
45 #include "rtl.h"
46 #include "df.h"
47 #include "insn-config.h"
48 #include "emit-rtl.h"
49 #include "recog.h"
50 #include "cfgrtl.h"
51 #include "tree-pass.h"
52 #include "rtl-iter.h"
55 /* This structure records two kinds of stack references between stack
56 adjusting instructions: stack references in memory addresses for
57 regular insns and all stack references for debug insns. */
59 struct csa_reflist
61 HOST_WIDE_INT sp_offset;
62 rtx_insn *insn;
63 rtx *ref;
64 struct csa_reflist *next;
67 static int stack_memref_p (rtx);
68 static rtx single_set_for_csa (rtx_insn *);
69 static void free_csa_reflist (struct csa_reflist *);
70 static struct csa_reflist *record_one_stack_ref (rtx_insn *, rtx *,
71 struct csa_reflist *);
72 static int try_apply_stack_adjustment (rtx_insn *, struct csa_reflist *,
73 HOST_WIDE_INT, HOST_WIDE_INT);
74 static void combine_stack_adjustments_for_block (basic_block);
77 /* Main entry point for stack adjustment combination. */
79 static void
80 combine_stack_adjustments (void)
82 basic_block bb;
84 FOR_EACH_BB_FN (bb, cfun)
85 combine_stack_adjustments_for_block (bb);
88 /* Recognize a MEM of the form (sp) or (plus sp const). */
90 static int
91 stack_memref_p (rtx x)
93 if (!MEM_P (x))
94 return 0;
95 x = XEXP (x, 0);
97 if (x == stack_pointer_rtx)
98 return 1;
99 if (GET_CODE (x) == PLUS
100 && XEXP (x, 0) == stack_pointer_rtx
101 && CONST_INT_P (XEXP (x, 1)))
102 return 1;
104 return 0;
107 /* Recognize either normal single_set or the hack in i386.md for
108 tying fp and sp adjustments. */
110 static rtx
111 single_set_for_csa (rtx_insn *insn)
113 int i;
114 rtx tmp = single_set (insn);
115 if (tmp)
116 return tmp;
118 if (!NONJUMP_INSN_P (insn)
119 || GET_CODE (PATTERN (insn)) != PARALLEL)
120 return NULL_RTX;
122 tmp = PATTERN (insn);
123 if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
124 return NULL_RTX;
126 for (i = 1; i < XVECLEN (tmp, 0); ++i)
128 rtx this_rtx = XVECEXP (tmp, 0, i);
130 /* The special case is allowing a no-op set. */
131 if (GET_CODE (this_rtx) == SET
132 && SET_SRC (this_rtx) == SET_DEST (this_rtx))
134 else if (GET_CODE (this_rtx) != CLOBBER
135 && GET_CODE (this_rtx) != USE)
136 return NULL_RTX;
139 return XVECEXP (tmp, 0, 0);
142 /* Free the list of csa_reflist nodes. */
144 static void
145 free_csa_reflist (struct csa_reflist *reflist)
147 struct csa_reflist *next;
148 for (; reflist ; reflist = next)
150 next = reflist->next;
151 free (reflist);
155 /* Create a new csa_reflist node from the given stack reference.
156 It is already known that the reference is either a MEM satisfying the
157 predicate stack_memref_p or a REG representing the stack pointer. */
159 static struct csa_reflist *
160 record_one_stack_ref (rtx_insn *insn, rtx *ref, struct csa_reflist *next_reflist)
162 struct csa_reflist *ml;
164 ml = XNEW (struct csa_reflist);
166 if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
167 ml->sp_offset = 0;
168 else
169 ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
171 ml->insn = insn;
172 ml->ref = ref;
173 ml->next = next_reflist;
175 return ml;
178 /* We only know how to adjust the CFA; no other frame-related changes
179 may appear in any insn to be deleted. */
181 static bool
182 no_unhandled_cfa (rtx_insn *insn)
184 if (!RTX_FRAME_RELATED_P (insn))
185 return true;
187 /* No CFA notes at all is a legacy interpretation like
188 FRAME_RELATED_EXPR, and is context sensitive within
189 the prologue state machine. We can't handle that here. */
190 bool has_cfa_adjust = false;
192 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
193 switch (REG_NOTE_KIND (link))
195 default:
196 break;
197 case REG_CFA_ADJUST_CFA:
198 has_cfa_adjust = true;
199 break;
201 case REG_FRAME_RELATED_EXPR:
202 case REG_CFA_DEF_CFA:
203 case REG_CFA_OFFSET:
204 case REG_CFA_REGISTER:
205 case REG_CFA_EXPRESSION:
206 case REG_CFA_RESTORE:
207 case REG_CFA_SET_VDRAP:
208 case REG_CFA_WINDOW_SAVE:
209 case REG_CFA_FLUSH_QUEUE:
210 return false;
213 return has_cfa_adjust;
216 /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
217 as each of the memories and stack references in REFLIST. Return true
218 on success. */
220 static int
221 try_apply_stack_adjustment (rtx_insn *insn, struct csa_reflist *reflist,
222 HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta)
224 struct csa_reflist *ml;
225 rtx set;
227 set = single_set_for_csa (insn);
228 if (MEM_P (SET_DEST (set)))
229 validate_change (insn, &SET_DEST (set),
230 replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
232 else
233 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
235 for (ml = reflist; ml ; ml = ml->next)
237 rtx new_addr = plus_constant (Pmode, stack_pointer_rtx,
238 ml->sp_offset - delta);
239 rtx new_val;
241 if (MEM_P (*ml->ref))
242 new_val = replace_equiv_address_nv (*ml->ref, new_addr);
243 else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
244 new_val = new_addr;
245 else
246 new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
247 GET_MODE (new_addr));
248 validate_change (ml->insn, ml->ref, new_val, 1);
251 if (apply_change_group ())
253 /* Succeeded. Update our knowledge of the stack references. */
254 for (ml = reflist; ml ; ml = ml->next)
255 ml->sp_offset -= delta;
257 return 1;
259 else
260 return 0;
263 /* For non-debug insns, record all stack memory references in INSN
264 and return true if there were no other (unrecorded) references to the
265 stack pointer. For debug insns, record all stack references regardless
266 of context and unconditionally return true. */
268 static bool
269 record_stack_refs (rtx_insn *insn, struct csa_reflist **reflist)
271 subrtx_ptr_iterator::array_type array;
272 FOR_EACH_SUBRTX_PTR (iter, array, &PATTERN (insn), NONCONST)
274 rtx *loc = *iter;
275 rtx x = *loc;
276 switch (GET_CODE (x))
278 case MEM:
279 if (!reg_mentioned_p (stack_pointer_rtx, x))
280 iter.skip_subrtxes ();
281 /* We are not able to handle correctly all possible memrefs
282 containing stack pointer, so this check is necessary. */
283 else if (stack_memref_p (x))
285 *reflist = record_one_stack_ref (insn, loc, *reflist);
286 iter.skip_subrtxes ();
288 /* Try harder for DEBUG_INSNs, handle e.g.
289 (mem (mem (sp + 16) + 4). */
290 else if (!DEBUG_INSN_P (insn))
291 return false;
292 break;
294 case REG:
295 /* ??? We want be able to handle non-memory stack pointer
296 references later. For now just discard all insns referring to
297 stack pointer outside mem expressions. We would probably
298 want to teach validate_replace to simplify expressions first.
300 We can't just compare with STACK_POINTER_RTX because the
301 reference to the stack pointer might be in some other mode.
302 In particular, an explicit clobber in an asm statement will
303 result in a QImode clobber.
305 In DEBUG_INSNs, we want to replace all occurrences, otherwise
306 they will cause -fcompare-debug failures. */
307 if (REGNO (x) == STACK_POINTER_REGNUM)
309 if (!DEBUG_INSN_P (insn))
310 return false;
311 *reflist = record_one_stack_ref (insn, loc, *reflist);
313 break;
315 default:
316 break;
319 return true;
322 /* If INSN has a REG_ARGS_SIZE note, move it to LAST.
323 AFTER is true iff LAST follows INSN in the instruction stream. */
325 static void
326 maybe_move_args_size_note (rtx_insn *last, rtx_insn *insn, bool after)
328 rtx note, last_note;
330 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
331 if (note == NULL)
332 return;
334 last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
335 if (last_note)
337 /* The ARGS_SIZE notes are *not* cumulative. They represent an
338 absolute value, and the "most recent" note wins. */
339 if (!after)
340 XEXP (last_note, 0) = XEXP (note, 0);
342 else
343 add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
346 /* Merge any REG_CFA_ADJUST_CFA note from SRC into DST.
347 AFTER is true iff DST follows SRC in the instruction stream. */
349 static void
350 maybe_merge_cfa_adjust (rtx_insn *dst, rtx_insn *src, bool after)
352 rtx snote = NULL, dnote = NULL;
353 rtx sexp, dexp;
354 rtx exp1, exp2;
356 if (RTX_FRAME_RELATED_P (src))
357 snote = find_reg_note (src, REG_CFA_ADJUST_CFA, NULL_RTX);
358 if (snote == NULL)
359 return;
360 sexp = XEXP (snote, 0);
362 if (RTX_FRAME_RELATED_P (dst))
363 dnote = find_reg_note (dst, REG_CFA_ADJUST_CFA, NULL_RTX);
364 if (dnote == NULL)
366 add_reg_note (dst, REG_CFA_ADJUST_CFA, sexp);
367 return;
369 dexp = XEXP (dnote, 0);
371 gcc_assert (GET_CODE (sexp) == SET);
372 gcc_assert (GET_CODE (dexp) == SET);
374 if (after)
375 exp1 = dexp, exp2 = sexp;
376 else
377 exp1 = sexp, exp2 = dexp;
379 SET_SRC (exp1) = simplify_replace_rtx (SET_SRC (exp1), SET_DEST (exp2),
380 SET_SRC (exp2));
381 XEXP (dnote, 0) = exp1;
384 /* Return the next (or previous) active insn within BB. */
386 static rtx_insn *
387 prev_active_insn_bb (basic_block bb, rtx_insn *insn)
389 for (insn = PREV_INSN (insn);
390 insn != PREV_INSN (BB_HEAD (bb));
391 insn = PREV_INSN (insn))
392 if (active_insn_p (insn))
393 return insn;
394 return NULL;
397 static rtx_insn *
398 next_active_insn_bb (basic_block bb, rtx_insn *insn)
400 for (insn = NEXT_INSN (insn);
401 insn != NEXT_INSN (BB_END (bb));
402 insn = NEXT_INSN (insn))
403 if (active_insn_p (insn))
404 return insn;
405 return NULL;
408 /* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV. Otherwise
409 search for a nearby candidate within BB where we can stick the note. */
411 static void
412 force_move_args_size_note (basic_block bb, rtx_insn *prev, rtx_insn *insn)
414 rtx note;
415 rtx_insn *test, *next_candidate, *prev_candidate;
417 /* If PREV exists, tail-call to the logic in the other function. */
418 if (prev)
420 maybe_move_args_size_note (prev, insn, false);
421 return;
424 /* First, make sure there's anything that needs doing. */
425 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
426 if (note == NULL)
427 return;
429 /* We need to find a spot between the previous and next exception points
430 where we can place the note and "properly" deallocate the arguments. */
431 next_candidate = prev_candidate = NULL;
433 /* It is often the case that we have insns in the order:
434 call
435 add sp (previous deallocation)
436 sub sp (align for next arglist)
437 push arg
438 and the add/sub cancel. Therefore we begin by searching forward. */
440 test = insn;
441 while ((test = next_active_insn_bb (bb, test)) != NULL)
443 /* Found an existing note: nothing to do. */
444 if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
445 return;
446 /* Found something that affects unwinding. Stop searching. */
447 if (CALL_P (test) || !insn_nothrow_p (test))
448 break;
449 if (next_candidate == NULL)
450 next_candidate = test;
453 test = insn;
454 while ((test = prev_active_insn_bb (bb, test)) != NULL)
456 rtx tnote;
457 /* Found a place that seems logical to adjust the stack. */
458 tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
459 if (tnote)
461 XEXP (tnote, 0) = XEXP (note, 0);
462 return;
464 if (prev_candidate == NULL)
465 prev_candidate = test;
466 /* Found something that affects unwinding. Stop searching. */
467 if (CALL_P (test) || !insn_nothrow_p (test))
468 break;
471 if (prev_candidate)
472 test = prev_candidate;
473 else if (next_candidate)
474 test = next_candidate;
475 else
477 /* ??? We *must* have a place, lest we ICE on the lost adjustment.
478 Options are: dummy clobber insn, nop, or prevent the removal of
479 the sp += 0 insn. */
480 /* TODO: Find another way to indicate to the dwarf2 code that we
481 have not in fact lost an adjustment. */
482 test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
484 add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
487 /* Subroutine of combine_stack_adjustments, called for each basic block. */
489 static void
490 combine_stack_adjustments_for_block (basic_block bb)
492 HOST_WIDE_INT last_sp_adjust = 0;
493 rtx_insn *last_sp_set = NULL;
494 rtx_insn *last2_sp_set = NULL;
495 struct csa_reflist *reflist = NULL;
496 rtx_insn *insn, *next;
497 rtx set;
498 bool end_of_block = false;
500 for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
502 end_of_block = insn == BB_END (bb);
503 next = NEXT_INSN (insn);
505 if (! INSN_P (insn))
506 continue;
508 set = single_set_for_csa (insn);
509 if (set)
511 rtx dest = SET_DEST (set);
512 rtx src = SET_SRC (set);
514 /* Find constant additions to the stack pointer. */
515 if (dest == stack_pointer_rtx
516 && GET_CODE (src) == PLUS
517 && XEXP (src, 0) == stack_pointer_rtx
518 && CONST_INT_P (XEXP (src, 1)))
520 HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
522 /* If we've not seen an adjustment previously, record
523 it now and continue. */
524 if (! last_sp_set)
526 last_sp_set = insn;
527 last_sp_adjust = this_adjust;
528 continue;
531 /* If not all recorded refs can be adjusted, or the
532 adjustment is now too large for a constant addition,
533 we cannot merge the two stack adjustments.
535 Also we need to be careful to not move stack pointer
536 such that we create stack accesses outside the allocated
537 area. We can combine an allocation into the first insn,
538 or a deallocation into the second insn. We can not
539 combine an allocation followed by a deallocation.
541 The only somewhat frequent occurrence of the later is when
542 a function allocates a stack frame but does not use it.
543 For this case, we would need to analyze rtl stream to be
544 sure that allocated area is really unused. This means not
545 only checking the memory references, but also all registers
546 or global memory references possibly containing a stack
547 frame address.
549 Perhaps the best way to address this problem is to teach
550 gcc not to allocate stack for objects never used. */
552 /* Combine an allocation into the first instruction. */
553 if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
555 if (no_unhandled_cfa (insn)
556 && try_apply_stack_adjustment (last_sp_set, reflist,
557 last_sp_adjust
558 + this_adjust,
559 this_adjust))
561 /* It worked! */
562 maybe_move_args_size_note (last_sp_set, insn, false);
563 maybe_merge_cfa_adjust (last_sp_set, insn, false);
564 delete_insn (insn);
565 last_sp_adjust += this_adjust;
566 continue;
570 /* Otherwise we have a deallocation. Do not combine with
571 a previous allocation. Combine into the second insn. */
572 else if (STACK_GROWS_DOWNWARD
573 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
575 if (no_unhandled_cfa (last_sp_set)
576 && try_apply_stack_adjustment (insn, reflist,
577 last_sp_adjust
578 + this_adjust,
579 -last_sp_adjust))
581 /* It worked! */
582 maybe_move_args_size_note (insn, last_sp_set, true);
583 maybe_merge_cfa_adjust (insn, last_sp_set, true);
584 delete_insn (last_sp_set);
585 last_sp_set = insn;
586 last_sp_adjust += this_adjust;
587 free_csa_reflist (reflist);
588 reflist = NULL;
589 continue;
593 /* Combination failed. Restart processing from here. If
594 deallocation+allocation conspired to cancel, we can
595 delete the old deallocation insn. */
596 if (last_sp_set)
598 if (last_sp_adjust == 0 && no_unhandled_cfa (last_sp_set))
600 maybe_move_args_size_note (insn, last_sp_set, true);
601 maybe_merge_cfa_adjust (insn, last_sp_set, true);
602 delete_insn (last_sp_set);
604 else
605 last2_sp_set = last_sp_set;
607 free_csa_reflist (reflist);
608 reflist = NULL;
609 last_sp_set = insn;
610 last_sp_adjust = this_adjust;
611 continue;
614 /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
615 the previous adjustment and turn it into a simple store. This
616 is equivalent to anticipating the stack adjustment so this must
617 be an allocation. */
618 if (MEM_P (dest)
619 && ((STACK_GROWS_DOWNWARD
620 ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
621 && last_sp_adjust
622 == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest)))
623 : (GET_CODE (XEXP (dest, 0)) == PRE_INC
624 && last_sp_adjust
625 == -(HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
626 || ((STACK_GROWS_DOWNWARD
627 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
628 && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
629 && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
630 && XEXP (XEXP (XEXP (dest, 0), 1), 0)
631 == stack_pointer_rtx
632 && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
633 == CONST_INT
634 && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
635 == -last_sp_adjust))
636 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
637 && !reg_mentioned_p (stack_pointer_rtx, src)
638 && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
639 && try_apply_stack_adjustment (insn, reflist, 0,
640 -last_sp_adjust))
642 if (last2_sp_set)
643 maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
644 else
645 maybe_move_args_size_note (insn, last_sp_set, true);
646 delete_insn (last_sp_set);
647 free_csa_reflist (reflist);
648 reflist = NULL;
649 last_sp_set = NULL;
650 last_sp_adjust = 0;
651 continue;
655 if (!CALL_P (insn) && last_sp_set
656 && record_stack_refs (insn, &reflist))
657 continue;
659 /* Otherwise, we were not able to process the instruction.
660 Do not continue collecting data across such a one. */
661 if (last_sp_set
662 && (CALL_P (insn)
663 || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
665 if (last_sp_set && last_sp_adjust == 0)
667 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
668 delete_insn (last_sp_set);
670 free_csa_reflist (reflist);
671 reflist = NULL;
672 last2_sp_set = NULL;
673 last_sp_set = NULL;
674 last_sp_adjust = 0;
678 if (last_sp_set && last_sp_adjust == 0)
680 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
681 delete_insn (last_sp_set);
684 if (reflist)
685 free_csa_reflist (reflist);
688 static unsigned int
689 rest_of_handle_stack_adjustments (void)
691 df_note_add_problem ();
692 df_analyze ();
693 combine_stack_adjustments ();
694 return 0;
697 namespace {
699 const pass_data pass_data_stack_adjustments =
701 RTL_PASS, /* type */
702 "csa", /* name */
703 OPTGROUP_NONE, /* optinfo_flags */
704 TV_COMBINE_STACK_ADJUST, /* tv_id */
705 0, /* properties_required */
706 0, /* properties_provided */
707 0, /* properties_destroyed */
708 0, /* todo_flags_start */
709 TODO_df_finish, /* todo_flags_finish */
712 class pass_stack_adjustments : public rtl_opt_pass
714 public:
715 pass_stack_adjustments (gcc::context *ctxt)
716 : rtl_opt_pass (pass_data_stack_adjustments, ctxt)
719 /* opt_pass methods: */
720 virtual bool gate (function *);
721 virtual unsigned int execute (function *)
723 return rest_of_handle_stack_adjustments ();
726 }; // class pass_stack_adjustments
728 bool
729 pass_stack_adjustments::gate (function *)
731 /* This is kind of a heuristic. We need to run combine_stack_adjustments
732 even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
733 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
734 push instructions will have popping returns. */
735 #ifndef PUSH_ROUNDING
736 if (ACCUMULATE_OUTGOING_ARGS)
737 return false;
738 #endif
739 return flag_combine_stack_adjustments;
742 } // anon namespace
744 rtl_opt_pass *
745 make_pass_stack_adjustments (gcc::context *ctxt)
747 return new pass_stack_adjustments (ctxt);