1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
49 #include "coretypes.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
59 #include "conditions.h"
62 #include "hard-reg-set.h"
69 #include "basic-block.h"
73 #include "cfglayout.h"
74 #include "tree-pass.h"
84 #ifdef XCOFF_DEBUGGING_INFO
85 #include "xcoffout.h" /* Needed for external data
86 declarations for e.g. AIX 4.x. */
89 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
90 #include "dwarf2out.h"
93 #ifdef DBX_DEBUGGING_INFO
97 #ifdef SDB_DEBUGGING_INFO
101 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
102 null default for it to save conditionalization later. */
103 #ifndef CC_STATUS_INIT
104 #define CC_STATUS_INIT
107 /* How to start an assembler comment. */
108 #ifndef ASM_COMMENT_START
109 #define ASM_COMMENT_START ";#"
112 /* Is the given character a logical line separator for the assembler? */
113 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
114 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
117 #ifndef JUMP_TABLES_IN_TEXT_SECTION
118 #define JUMP_TABLES_IN_TEXT_SECTION 0
121 /* Bitflags used by final_scan_insn. */
124 #define SEEN_EMITTED 4
126 /* Last insn processed by final_scan_insn. */
127 static rtx debug_insn
;
128 rtx current_output_insn
;
130 /* Line number of last NOTE. */
131 static int last_linenum
;
133 /* Highest line number in current block. */
134 static int high_block_linenum
;
136 /* Likewise for function. */
137 static int high_function_linenum
;
139 /* Filename of last NOTE. */
140 static const char *last_filename
;
142 /* Override filename and line number. */
143 static const char *override_filename
;
144 static int override_linenum
;
146 /* Whether to force emission of a line note before the next insn. */
147 static bool force_source_line
= false;
149 extern const int length_unit_log
; /* This is defined in insn-attrtab.c. */
151 /* Nonzero while outputting an `asm' with operands.
152 This means that inconsistencies are the user's fault, so don't die.
153 The precise value is the insn being output, to pass to error_for_asm. */
154 rtx this_is_asm_operands
;
156 /* Number of operands of this insn, for an `asm' with operands. */
157 static unsigned int insn_noperands
;
159 /* Compare optimization flag. */
161 static rtx last_ignored_compare
= 0;
163 /* Assign a unique number to each insn that is output.
164 This can be used to generate unique local labels. */
166 static int insn_counter
= 0;
169 /* This variable contains machine-dependent flags (defined in tm.h)
170 set and examined by output routines
171 that describe how to interpret the condition codes properly. */
175 /* During output of an insn, this contains a copy of cc_status
176 from before the insn. */
178 CC_STATUS cc_prev_status
;
181 /* Nonzero means current function must be given a frame pointer.
182 Initialized in function.c to 0. Set only in reload1.c as per
183 the needs of the function. */
185 int frame_pointer_needed
;
187 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
189 static int block_depth
;
191 /* Nonzero if have enabled APP processing of our assembler output. */
195 /* If we are outputting an insn sequence, this contains the sequence rtx.
200 #ifdef ASSEMBLER_DIALECT
202 /* Number of the assembler dialect to use, starting at 0. */
203 static int dialect_number
;
206 #ifdef HAVE_conditional_execution
207 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
208 rtx current_insn_predicate
;
211 #ifdef HAVE_ATTR_length
212 static int asm_insn_count (rtx
);
214 static void profile_function (FILE *);
215 static void profile_after_prologue (FILE *);
216 static bool notice_source_line (rtx
);
217 static rtx
walk_alter_subreg (rtx
*, bool *);
218 static void output_asm_name (void);
219 static void output_alternate_entry_point (FILE *, rtx
);
220 static tree
get_mem_expr_from_op (rtx
, int *);
221 static void output_asm_operand_names (rtx
*, int *, int);
222 static void output_operand (rtx
, int);
223 #ifdef LEAF_REGISTERS
224 static void leaf_renumber_regs (rtx
);
227 static int alter_cond (rtx
);
229 #ifndef ADDR_VEC_ALIGN
230 static int final_addr_vec_align (rtx
);
232 #ifdef HAVE_ATTR_length
233 static int align_fuzz (rtx
, rtx
, int, unsigned);
236 /* Initialize data in final at the beginning of a compilation. */
239 init_final (const char *filename ATTRIBUTE_UNUSED
)
244 #ifdef ASSEMBLER_DIALECT
245 dialect_number
= ASSEMBLER_DIALECT
;
249 /* Default target function prologue and epilogue assembler output.
251 If not overridden for epilogue code, then the function body itself
252 contains return instructions wherever needed. */
254 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED
,
255 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
259 /* Default target hook that outputs nothing to a stream. */
261 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED
)
265 /* Enable APP processing of subsequent output.
266 Used before the output from an `asm' statement. */
273 fputs (ASM_APP_ON
, asm_out_file
);
278 /* Disable APP processing of subsequent output.
279 Called from varasm.c before most kinds of output. */
286 fputs (ASM_APP_OFF
, asm_out_file
);
291 /* Return the number of slots filled in the current
292 delayed branch sequence (we don't count the insn needing the
293 delay slot). Zero if not in a delayed branch sequence. */
297 dbr_sequence_length (void)
299 if (final_sequence
!= 0)
300 return XVECLEN (final_sequence
, 0) - 1;
306 /* The next two pages contain routines used to compute the length of an insn
307 and to shorten branches. */
309 /* Arrays for insn lengths, and addresses. The latter is referenced by
310 `insn_current_length'. */
312 static int *insn_lengths
;
314 VEC(int,heap
) *insn_addresses_
;
316 /* Max uid for which the above arrays are valid. */
317 static int insn_lengths_max_uid
;
319 /* Address of insn being processed. Used by `insn_current_length'. */
320 int insn_current_address
;
322 /* Address of insn being processed in previous iteration. */
323 int insn_last_address
;
325 /* known invariant alignment of insn being processed. */
326 int insn_current_align
;
328 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
329 gives the next following alignment insn that increases the known
330 alignment, or NULL_RTX if there is no such insn.
331 For any alignment obtained this way, we can again index uid_align with
332 its uid to obtain the next following align that in turn increases the
333 alignment, till we reach NULL_RTX; the sequence obtained this way
334 for each insn we'll call the alignment chain of this insn in the following
337 struct label_alignment
343 static rtx
*uid_align
;
344 static int *uid_shuid
;
345 static struct label_alignment
*label_align
;
347 /* Indicate that branch shortening hasn't yet been done. */
350 init_insn_lengths (void)
361 insn_lengths_max_uid
= 0;
363 #ifdef HAVE_ATTR_length
364 INSN_ADDRESSES_FREE ();
373 /* Obtain the current length of an insn. If branch shortening has been done,
374 get its actual length. Otherwise, use FALLBACK_FN to calculate the
377 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED
,
378 int (*fallback_fn
) (rtx
) ATTRIBUTE_UNUSED
)
380 #ifdef HAVE_ATTR_length
385 if (insn_lengths_max_uid
> INSN_UID (insn
))
386 return insn_lengths
[INSN_UID (insn
)];
388 switch (GET_CODE (insn
))
396 length
= fallback_fn (insn
);
400 body
= PATTERN (insn
);
401 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
403 /* Alignment is machine-dependent and should be handled by
407 length
= fallback_fn (insn
);
411 body
= PATTERN (insn
);
412 if (GET_CODE (body
) == USE
|| GET_CODE (body
) == CLOBBER
)
415 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
416 length
= asm_insn_count (body
) * fallback_fn (insn
);
417 else if (GET_CODE (body
) == SEQUENCE
)
418 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
419 length
+= get_attr_length (XVECEXP (body
, 0, i
));
421 length
= fallback_fn (insn
);
428 #ifdef ADJUST_INSN_LENGTH
429 ADJUST_INSN_LENGTH (insn
, length
);
432 #else /* not HAVE_ATTR_length */
434 #define insn_default_length 0
435 #define insn_min_length 0
436 #endif /* not HAVE_ATTR_length */
439 /* Obtain the current length of an insn. If branch shortening has been done,
440 get its actual length. Otherwise, get its maximum length. */
442 get_attr_length (rtx insn
)
444 return get_attr_length_1 (insn
, insn_default_length
);
447 /* Obtain the current length of an insn. If branch shortening has been done,
448 get its actual length. Otherwise, get its minimum length. */
450 get_attr_min_length (rtx insn
)
452 return get_attr_length_1 (insn
, insn_min_length
);
455 /* Code to handle alignment inside shorten_branches. */
457 /* Here is an explanation how the algorithm in align_fuzz can give
460 Call a sequence of instructions beginning with alignment point X
461 and continuing until the next alignment point `block X'. When `X'
462 is used in an expression, it means the alignment value of the
465 Call the distance between the start of the first insn of block X, and
466 the end of the last insn of block X `IX', for the `inner size of X'.
467 This is clearly the sum of the instruction lengths.
469 Likewise with the next alignment-delimited block following X, which we
472 Call the distance between the start of the first insn of block X, and
473 the start of the first insn of block Y `OX', for the `outer size of X'.
475 The estimated padding is then OX - IX.
477 OX can be safely estimated as
482 OX = round_up(IX, X) + Y - X
484 Clearly est(IX) >= real(IX), because that only depends on the
485 instruction lengths, and those being overestimated is a given.
487 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
488 we needn't worry about that when thinking about OX.
490 When X >= Y, the alignment provided by Y adds no uncertainty factor
491 for branch ranges starting before X, so we can just round what we have.
492 But when X < Y, we don't know anything about the, so to speak,
493 `middle bits', so we have to assume the worst when aligning up from an
494 address mod X to one mod Y, which is Y - X. */
497 #define LABEL_ALIGN(LABEL) align_labels_log
500 #ifndef LABEL_ALIGN_MAX_SKIP
501 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
505 #define LOOP_ALIGN(LABEL) align_loops_log
508 #ifndef LOOP_ALIGN_MAX_SKIP
509 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
512 #ifndef LABEL_ALIGN_AFTER_BARRIER
513 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
516 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
517 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
521 #define JUMP_ALIGN(LABEL) align_jumps_log
524 #ifndef JUMP_ALIGN_MAX_SKIP
525 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
528 #ifndef ADDR_VEC_ALIGN
530 final_addr_vec_align (rtx addr_vec
)
532 int align
= GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec
)));
534 if (align
> BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
535 align
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
536 return exact_log2 (align
);
540 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
543 #ifndef INSN_LENGTH_ALIGNMENT
544 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
547 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
549 static int min_labelno
, max_labelno
;
551 #define LABEL_TO_ALIGNMENT(LABEL) \
552 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
554 #define LABEL_TO_MAX_SKIP(LABEL) \
555 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
557 /* For the benefit of port specific code do this also as a function. */
560 label_to_alignment (rtx label
)
562 return LABEL_TO_ALIGNMENT (label
);
565 #ifdef HAVE_ATTR_length
566 /* The differences in addresses
567 between a branch and its target might grow or shrink depending on
568 the alignment the start insn of the range (the branch for a forward
569 branch or the label for a backward branch) starts out on; if these
570 differences are used naively, they can even oscillate infinitely.
571 We therefore want to compute a 'worst case' address difference that
572 is independent of the alignment the start insn of the range end
573 up on, and that is at least as large as the actual difference.
574 The function align_fuzz calculates the amount we have to add to the
575 naively computed difference, by traversing the part of the alignment
576 chain of the start insn of the range that is in front of the end insn
577 of the range, and considering for each alignment the maximum amount
578 that it might contribute to a size increase.
580 For casesi tables, we also want to know worst case minimum amounts of
581 address difference, in case a machine description wants to introduce
582 some common offset that is added to all offsets in a table.
583 For this purpose, align_fuzz with a growth argument of 0 computes the
584 appropriate adjustment. */
586 /* Compute the maximum delta by which the difference of the addresses of
587 START and END might grow / shrink due to a different address for start
588 which changes the size of alignment insns between START and END.
589 KNOWN_ALIGN_LOG is the alignment known for START.
590 GROWTH should be ~0 if the objective is to compute potential code size
591 increase, and 0 if the objective is to compute potential shrink.
592 The return value is undefined for any other value of GROWTH. */
595 align_fuzz (rtx start
, rtx end
, int known_align_log
, unsigned int growth
)
597 int uid
= INSN_UID (start
);
599 int known_align
= 1 << known_align_log
;
600 int end_shuid
= INSN_SHUID (end
);
603 for (align_label
= uid_align
[uid
]; align_label
; align_label
= uid_align
[uid
])
605 int align_addr
, new_align
;
607 uid
= INSN_UID (align_label
);
608 align_addr
= INSN_ADDRESSES (uid
) - insn_lengths
[uid
];
609 if (uid_shuid
[uid
] > end_shuid
)
611 known_align_log
= LABEL_TO_ALIGNMENT (align_label
);
612 new_align
= 1 << known_align_log
;
613 if (new_align
< known_align
)
615 fuzz
+= (-align_addr
^ growth
) & (new_align
- known_align
);
616 known_align
= new_align
;
621 /* Compute a worst-case reference address of a branch so that it
622 can be safely used in the presence of aligned labels. Since the
623 size of the branch itself is unknown, the size of the branch is
624 not included in the range. I.e. for a forward branch, the reference
625 address is the end address of the branch as known from the previous
626 branch shortening pass, minus a value to account for possible size
627 increase due to alignment. For a backward branch, it is the start
628 address of the branch as known from the current pass, plus a value
629 to account for possible size increase due to alignment.
630 NB.: Therefore, the maximum offset allowed for backward branches needs
631 to exclude the branch size. */
634 insn_current_reference_address (rtx branch
)
639 if (! INSN_ADDRESSES_SET_P ())
642 seq
= NEXT_INSN (PREV_INSN (branch
));
643 seq_uid
= INSN_UID (seq
);
644 if (!JUMP_P (branch
))
645 /* This can happen for example on the PA; the objective is to know the
646 offset to address something in front of the start of the function.
647 Thus, we can treat it like a backward branch.
648 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
649 any alignment we'd encounter, so we skip the call to align_fuzz. */
650 return insn_current_address
;
651 dest
= JUMP_LABEL (branch
);
653 /* BRANCH has no proper alignment chain set, so use SEQ.
654 BRANCH also has no INSN_SHUID. */
655 if (INSN_SHUID (seq
) < INSN_SHUID (dest
))
657 /* Forward branch. */
658 return (insn_last_address
+ insn_lengths
[seq_uid
]
659 - align_fuzz (seq
, dest
, length_unit_log
, ~0));
663 /* Backward branch. */
664 return (insn_current_address
665 + align_fuzz (dest
, seq
, length_unit_log
, ~0));
668 #endif /* HAVE_ATTR_length */
670 /* Compute branch alignments based on frequency information in the
674 compute_alignments (void)
676 int log
, max_skip
, max_log
;
679 int freq_threshold
= 0;
687 max_labelno
= max_label_num ();
688 min_labelno
= get_first_label_num ();
689 label_align
= XCNEWVEC (struct label_alignment
, max_labelno
- min_labelno
+ 1);
691 /* If not optimizing or optimizing for size, don't assign any alignments. */
692 if (! optimize
|| optimize_size
)
697 dump_flow_info (dump_file
, TDF_DETAILS
);
698 flow_loops_dump (dump_file
, NULL
, 1);
699 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
702 if (bb
->frequency
> freq_max
)
703 freq_max
= bb
->frequency
;
704 freq_threshold
= freq_max
/ PARAM_VALUE (PARAM_ALIGN_THRESHOLD
);
707 fprintf(dump_file
, "freq_max: %i\n",freq_max
);
710 rtx label
= BB_HEAD (bb
);
711 int fallthru_frequency
= 0, branch_frequency
= 0, has_fallthru
= 0;
716 || probably_never_executed_bb_p (bb
))
719 fprintf(dump_file
, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
720 bb
->index
, bb
->frequency
, bb
->loop_father
->num
, bb
->loop_depth
);
723 max_log
= LABEL_ALIGN (label
);
724 max_skip
= LABEL_ALIGN_MAX_SKIP
;
726 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
728 if (e
->flags
& EDGE_FALLTHRU
)
729 has_fallthru
= 1, fallthru_frequency
+= EDGE_FREQUENCY (e
);
731 branch_frequency
+= EDGE_FREQUENCY (e
);
735 fprintf(dump_file
, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
736 bb
->index
, bb
->frequency
, bb
->loop_father
->num
,
738 fallthru_frequency
, branch_frequency
);
739 if (!bb
->loop_father
->inner
&& bb
->loop_father
->num
)
740 fprintf (dump_file
, " inner_loop");
741 if (bb
->loop_father
->header
== bb
)
742 fprintf (dump_file
, " loop_header");
743 fprintf (dump_file
, "\n");
746 /* There are two purposes to align block with no fallthru incoming edge:
747 1) to avoid fetch stalls when branch destination is near cache boundary
748 2) to improve cache efficiency in case the previous block is not executed
749 (so it does not need to be in the cache).
751 We to catch first case, we align frequently executed blocks.
752 To catch the second, we align blocks that are executed more frequently
753 than the predecessor and the predecessor is likely to not be executed
754 when function is called. */
757 && (branch_frequency
> freq_threshold
758 || (bb
->frequency
> bb
->prev_bb
->frequency
* 10
759 && (bb
->prev_bb
->frequency
760 <= ENTRY_BLOCK_PTR
->frequency
/ 2))))
762 log
= JUMP_ALIGN (label
);
764 fprintf(dump_file
, " jump alignment added.\n");
768 max_skip
= JUMP_ALIGN_MAX_SKIP
;
771 /* In case block is frequent and reached mostly by non-fallthru edge,
772 align it. It is most likely a first block of loop. */
774 && maybe_hot_bb_p (bb
)
775 && branch_frequency
+ fallthru_frequency
> freq_threshold
777 > fallthru_frequency
* PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS
)))
779 log
= LOOP_ALIGN (label
);
781 fprintf(dump_file
, " internal loop alignment added.\n");
785 max_skip
= LOOP_ALIGN_MAX_SKIP
;
788 LABEL_TO_ALIGNMENT (label
) = max_log
;
789 LABEL_TO_MAX_SKIP (label
) = max_skip
;
793 loop_optimizer_finalize ();
797 struct tree_opt_pass pass_compute_alignments
=
799 "alignments", /* name */
801 compute_alignments
, /* execute */
804 0, /* static_pass_number */
806 0, /* properties_required */
807 0, /* properties_provided */
808 0, /* properties_destroyed */
809 0, /* todo_flags_start */
810 TODO_dump_func
| TODO_verify_rtl_sharing
811 | TODO_ggc_collect
, /* todo_flags_finish */
816 /* Make a pass over all insns and compute their actual lengths by shortening
817 any branches of variable length if possible. */
819 /* shorten_branches might be called multiple times: for example, the SH
820 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
821 In order to do this, it needs proper length information, which it obtains
822 by calling shorten_branches. This cannot be collapsed with
823 shorten_branches itself into a single pass unless we also want to integrate
824 reorg.c, since the branch splitting exposes new instructions with delay
828 shorten_branches (rtx first ATTRIBUTE_UNUSED
)
835 #ifdef HAVE_ATTR_length
836 #define MAX_CODE_ALIGN 16
838 int something_changed
= 1;
839 char *varying_length
;
842 rtx align_tab
[MAX_CODE_ALIGN
];
846 /* Compute maximum UID and allocate label_align / uid_shuid. */
847 max_uid
= get_max_uid ();
849 /* Free uid_shuid before reallocating it. */
852 uid_shuid
= XNEWVEC (int, max_uid
);
854 if (max_labelno
!= max_label_num ())
856 int old
= max_labelno
;
860 max_labelno
= max_label_num ();
862 n_labels
= max_labelno
- min_labelno
+ 1;
863 n_old_labels
= old
- min_labelno
+ 1;
865 label_align
= xrealloc (label_align
,
866 n_labels
* sizeof (struct label_alignment
));
868 /* Range of labels grows monotonically in the function. Failing here
869 means that the initialization of array got lost. */
870 gcc_assert (n_old_labels
<= n_labels
);
872 memset (label_align
+ n_old_labels
, 0,
873 (n_labels
- n_old_labels
) * sizeof (struct label_alignment
));
876 /* Initialize label_align and set up uid_shuid to be strictly
877 monotonically rising with insn order. */
878 /* We use max_log here to keep track of the maximum alignment we want to
879 impose on the next CODE_LABEL (or the current one if we are processing
880 the CODE_LABEL itself). */
885 for (insn
= get_insns (), i
= 1; insn
; insn
= NEXT_INSN (insn
))
889 INSN_SHUID (insn
) = i
++;
897 /* Merge in alignments computed by compute_alignments. */
898 log
= LABEL_TO_ALIGNMENT (insn
);
902 max_skip
= LABEL_TO_MAX_SKIP (insn
);
905 log
= LABEL_ALIGN (insn
);
909 max_skip
= LABEL_ALIGN_MAX_SKIP
;
911 next
= next_nonnote_insn (insn
);
912 /* ADDR_VECs only take room if read-only data goes into the text
914 if (JUMP_TABLES_IN_TEXT_SECTION
915 || readonly_data_section
== text_section
)
916 if (next
&& JUMP_P (next
))
918 rtx nextbody
= PATTERN (next
);
919 if (GET_CODE (nextbody
) == ADDR_VEC
920 || GET_CODE (nextbody
) == ADDR_DIFF_VEC
)
922 log
= ADDR_VEC_ALIGN (next
);
926 max_skip
= LABEL_ALIGN_MAX_SKIP
;
930 LABEL_TO_ALIGNMENT (insn
) = max_log
;
931 LABEL_TO_MAX_SKIP (insn
) = max_skip
;
935 else if (BARRIER_P (insn
))
939 for (label
= insn
; label
&& ! INSN_P (label
);
940 label
= NEXT_INSN (label
))
943 log
= LABEL_ALIGN_AFTER_BARRIER (insn
);
947 max_skip
= LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
;
953 #ifdef HAVE_ATTR_length
955 /* Allocate the rest of the arrays. */
956 insn_lengths
= XNEWVEC (int, max_uid
);
957 insn_lengths_max_uid
= max_uid
;
958 /* Syntax errors can lead to labels being outside of the main insn stream.
959 Initialize insn_addresses, so that we get reproducible results. */
960 INSN_ADDRESSES_ALLOC (max_uid
);
962 varying_length
= XCNEWVEC (char, max_uid
);
964 /* Initialize uid_align. We scan instructions
965 from end to start, and keep in align_tab[n] the last seen insn
966 that does an alignment of at least n+1, i.e. the successor
967 in the alignment chain for an insn that does / has a known
969 uid_align
= XCNEWVEC (rtx
, max_uid
);
971 for (i
= MAX_CODE_ALIGN
; --i
>= 0;)
972 align_tab
[i
] = NULL_RTX
;
973 seq
= get_last_insn ();
974 for (; seq
; seq
= PREV_INSN (seq
))
976 int uid
= INSN_UID (seq
);
978 log
= (LABEL_P (seq
) ? LABEL_TO_ALIGNMENT (seq
) : 0);
979 uid_align
[uid
] = align_tab
[0];
982 /* Found an alignment label. */
983 uid_align
[uid
] = align_tab
[log
];
984 for (i
= log
- 1; i
>= 0; i
--)
988 #ifdef CASE_VECTOR_SHORTEN_MODE
991 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
994 int min_shuid
= INSN_SHUID (get_insns ()) - 1;
995 int max_shuid
= INSN_SHUID (get_last_insn ()) + 1;
998 for (insn
= first
; insn
!= 0; insn
= NEXT_INSN (insn
))
1000 rtx min_lab
= NULL_RTX
, max_lab
= NULL_RTX
, pat
;
1001 int len
, i
, min
, max
, insn_shuid
;
1003 addr_diff_vec_flags flags
;
1006 || GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
1008 pat
= PATTERN (insn
);
1009 len
= XVECLEN (pat
, 1);
1010 gcc_assert (len
> 0);
1011 min_align
= MAX_CODE_ALIGN
;
1012 for (min
= max_shuid
, max
= min_shuid
, i
= len
- 1; i
>= 0; i
--)
1014 rtx lab
= XEXP (XVECEXP (pat
, 1, i
), 0);
1015 int shuid
= INSN_SHUID (lab
);
1026 if (min_align
> LABEL_TO_ALIGNMENT (lab
))
1027 min_align
= LABEL_TO_ALIGNMENT (lab
);
1029 XEXP (pat
, 2) = gen_rtx_LABEL_REF (Pmode
, min_lab
);
1030 XEXP (pat
, 3) = gen_rtx_LABEL_REF (Pmode
, max_lab
);
1031 insn_shuid
= INSN_SHUID (insn
);
1032 rel
= INSN_SHUID (XEXP (XEXP (pat
, 0), 0));
1033 memset (&flags
, 0, sizeof (flags
));
1034 flags
.min_align
= min_align
;
1035 flags
.base_after_vec
= rel
> insn_shuid
;
1036 flags
.min_after_vec
= min
> insn_shuid
;
1037 flags
.max_after_vec
= max
> insn_shuid
;
1038 flags
.min_after_base
= min
> rel
;
1039 flags
.max_after_base
= max
> rel
;
1040 ADDR_DIFF_VEC_FLAGS (pat
) = flags
;
1043 #endif /* CASE_VECTOR_SHORTEN_MODE */
1045 /* Compute initial lengths, addresses, and varying flags for each insn. */
1046 for (insn_current_address
= 0, insn
= first
;
1048 insn_current_address
+= insn_lengths
[uid
], insn
= NEXT_INSN (insn
))
1050 uid
= INSN_UID (insn
);
1052 insn_lengths
[uid
] = 0;
1056 int log
= LABEL_TO_ALIGNMENT (insn
);
1059 int align
= 1 << log
;
1060 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1061 insn_lengths
[uid
] = new_address
- insn_current_address
;
1065 INSN_ADDRESSES (uid
) = insn_current_address
+ insn_lengths
[uid
];
1067 if (NOTE_P (insn
) || BARRIER_P (insn
)
1070 if (INSN_DELETED_P (insn
))
1073 body
= PATTERN (insn
);
1074 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
1076 /* This only takes room if read-only data goes into the text
1078 if (JUMP_TABLES_IN_TEXT_SECTION
1079 || readonly_data_section
== text_section
)
1080 insn_lengths
[uid
] = (XVECLEN (body
,
1081 GET_CODE (body
) == ADDR_DIFF_VEC
)
1082 * GET_MODE_SIZE (GET_MODE (body
)));
1083 /* Alignment is handled by ADDR_VEC_ALIGN. */
1085 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
1086 insn_lengths
[uid
] = asm_insn_count (body
) * insn_default_length (insn
);
1087 else if (GET_CODE (body
) == SEQUENCE
)
1090 int const_delay_slots
;
1092 const_delay_slots
= const_num_delay_slots (XVECEXP (body
, 0, 0));
1094 const_delay_slots
= 0;
1096 /* Inside a delay slot sequence, we do not do any branch shortening
1097 if the shortening could change the number of delay slots
1099 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1101 rtx inner_insn
= XVECEXP (body
, 0, i
);
1102 int inner_uid
= INSN_UID (inner_insn
);
1105 if (GET_CODE (body
) == ASM_INPUT
1106 || asm_noperands (PATTERN (XVECEXP (body
, 0, i
))) >= 0)
1107 inner_length
= (asm_insn_count (PATTERN (inner_insn
))
1108 * insn_default_length (inner_insn
));
1110 inner_length
= insn_default_length (inner_insn
);
1112 insn_lengths
[inner_uid
] = inner_length
;
1113 if (const_delay_slots
)
1115 if ((varying_length
[inner_uid
]
1116 = insn_variable_length_p (inner_insn
)) != 0)
1117 varying_length
[uid
] = 1;
1118 INSN_ADDRESSES (inner_uid
) = (insn_current_address
1119 + insn_lengths
[uid
]);
1122 varying_length
[inner_uid
] = 0;
1123 insn_lengths
[uid
] += inner_length
;
1126 else if (GET_CODE (body
) != USE
&& GET_CODE (body
) != CLOBBER
)
1128 insn_lengths
[uid
] = insn_default_length (insn
);
1129 varying_length
[uid
] = insn_variable_length_p (insn
);
1132 /* If needed, do any adjustment. */
1133 #ifdef ADJUST_INSN_LENGTH
1134 ADJUST_INSN_LENGTH (insn
, insn_lengths
[uid
]);
1135 if (insn_lengths
[uid
] < 0)
1136 fatal_insn ("negative insn length", insn
);
1140 /* Now loop over all the insns finding varying length insns. For each,
1141 get the current insn length. If it has changed, reflect the change.
1142 When nothing changes for a full pass, we are done. */
1144 while (something_changed
)
1146 something_changed
= 0;
1147 insn_current_align
= MAX_CODE_ALIGN
- 1;
1148 for (insn_current_address
= 0, insn
= first
;
1150 insn
= NEXT_INSN (insn
))
1153 #ifdef ADJUST_INSN_LENGTH
1158 uid
= INSN_UID (insn
);
1162 int log
= LABEL_TO_ALIGNMENT (insn
);
1163 if (log
> insn_current_align
)
1165 int align
= 1 << log
;
1166 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1167 insn_lengths
[uid
] = new_address
- insn_current_address
;
1168 insn_current_align
= log
;
1169 insn_current_address
= new_address
;
1172 insn_lengths
[uid
] = 0;
1173 INSN_ADDRESSES (uid
) = insn_current_address
;
1177 length_align
= INSN_LENGTH_ALIGNMENT (insn
);
1178 if (length_align
< insn_current_align
)
1179 insn_current_align
= length_align
;
1181 insn_last_address
= INSN_ADDRESSES (uid
);
1182 INSN_ADDRESSES (uid
) = insn_current_address
;
1184 #ifdef CASE_VECTOR_SHORTEN_MODE
1185 if (optimize
&& JUMP_P (insn
)
1186 && GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1188 rtx body
= PATTERN (insn
);
1189 int old_length
= insn_lengths
[uid
];
1190 rtx rel_lab
= XEXP (XEXP (body
, 0), 0);
1191 rtx min_lab
= XEXP (XEXP (body
, 2), 0);
1192 rtx max_lab
= XEXP (XEXP (body
, 3), 0);
1193 int rel_addr
= INSN_ADDRESSES (INSN_UID (rel_lab
));
1194 int min_addr
= INSN_ADDRESSES (INSN_UID (min_lab
));
1195 int max_addr
= INSN_ADDRESSES (INSN_UID (max_lab
));
1198 addr_diff_vec_flags flags
;
1200 /* Avoid automatic aggregate initialization. */
1201 flags
= ADDR_DIFF_VEC_FLAGS (body
);
1203 /* Try to find a known alignment for rel_lab. */
1204 for (prev
= rel_lab
;
1206 && ! insn_lengths
[INSN_UID (prev
)]
1207 && ! (varying_length
[INSN_UID (prev
)] & 1);
1208 prev
= PREV_INSN (prev
))
1209 if (varying_length
[INSN_UID (prev
)] & 2)
1211 rel_align
= LABEL_TO_ALIGNMENT (prev
);
1215 /* See the comment on addr_diff_vec_flags in rtl.h for the
1216 meaning of the flags values. base: REL_LAB vec: INSN */
1217 /* Anything after INSN has still addresses from the last
1218 pass; adjust these so that they reflect our current
1219 estimate for this pass. */
1220 if (flags
.base_after_vec
)
1221 rel_addr
+= insn_current_address
- insn_last_address
;
1222 if (flags
.min_after_vec
)
1223 min_addr
+= insn_current_address
- insn_last_address
;
1224 if (flags
.max_after_vec
)
1225 max_addr
+= insn_current_address
- insn_last_address
;
1226 /* We want to know the worst case, i.e. lowest possible value
1227 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1228 its offset is positive, and we have to be wary of code shrink;
1229 otherwise, it is negative, and we have to be vary of code
1231 if (flags
.min_after_base
)
1233 /* If INSN is between REL_LAB and MIN_LAB, the size
1234 changes we are about to make can change the alignment
1235 within the observed offset, therefore we have to break
1236 it up into two parts that are independent. */
1237 if (! flags
.base_after_vec
&& flags
.min_after_vec
)
1239 min_addr
-= align_fuzz (rel_lab
, insn
, rel_align
, 0);
1240 min_addr
-= align_fuzz (insn
, min_lab
, 0, 0);
1243 min_addr
-= align_fuzz (rel_lab
, min_lab
, rel_align
, 0);
1247 if (flags
.base_after_vec
&& ! flags
.min_after_vec
)
1249 min_addr
-= align_fuzz (min_lab
, insn
, 0, ~0);
1250 min_addr
-= align_fuzz (insn
, rel_lab
, 0, ~0);
1253 min_addr
-= align_fuzz (min_lab
, rel_lab
, 0, ~0);
1255 /* Likewise, determine the highest lowest possible value
1256 for the offset of MAX_LAB. */
1257 if (flags
.max_after_base
)
1259 if (! flags
.base_after_vec
&& flags
.max_after_vec
)
1261 max_addr
+= align_fuzz (rel_lab
, insn
, rel_align
, ~0);
1262 max_addr
+= align_fuzz (insn
, max_lab
, 0, ~0);
1265 max_addr
+= align_fuzz (rel_lab
, max_lab
, rel_align
, ~0);
1269 if (flags
.base_after_vec
&& ! flags
.max_after_vec
)
1271 max_addr
+= align_fuzz (max_lab
, insn
, 0, 0);
1272 max_addr
+= align_fuzz (insn
, rel_lab
, 0, 0);
1275 max_addr
+= align_fuzz (max_lab
, rel_lab
, 0, 0);
1277 PUT_MODE (body
, CASE_VECTOR_SHORTEN_MODE (min_addr
- rel_addr
,
1278 max_addr
- rel_addr
,
1280 if (JUMP_TABLES_IN_TEXT_SECTION
1281 || readonly_data_section
== text_section
)
1284 = (XVECLEN (body
, 1) * GET_MODE_SIZE (GET_MODE (body
)));
1285 insn_current_address
+= insn_lengths
[uid
];
1286 if (insn_lengths
[uid
] != old_length
)
1287 something_changed
= 1;
1292 #endif /* CASE_VECTOR_SHORTEN_MODE */
1294 if (! (varying_length
[uid
]))
1296 if (NONJUMP_INSN_P (insn
)
1297 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1301 body
= PATTERN (insn
);
1302 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1304 rtx inner_insn
= XVECEXP (body
, 0, i
);
1305 int inner_uid
= INSN_UID (inner_insn
);
1307 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1309 insn_current_address
+= insn_lengths
[inner_uid
];
1313 insn_current_address
+= insn_lengths
[uid
];
1318 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1322 body
= PATTERN (insn
);
1324 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1326 rtx inner_insn
= XVECEXP (body
, 0, i
);
1327 int inner_uid
= INSN_UID (inner_insn
);
1330 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1332 /* insn_current_length returns 0 for insns with a
1333 non-varying length. */
1334 if (! varying_length
[inner_uid
])
1335 inner_length
= insn_lengths
[inner_uid
];
1337 inner_length
= insn_current_length (inner_insn
);
1339 if (inner_length
!= insn_lengths
[inner_uid
])
1341 insn_lengths
[inner_uid
] = inner_length
;
1342 something_changed
= 1;
1344 insn_current_address
+= insn_lengths
[inner_uid
];
1345 new_length
+= inner_length
;
1350 new_length
= insn_current_length (insn
);
1351 insn_current_address
+= new_length
;
1354 #ifdef ADJUST_INSN_LENGTH
1355 /* If needed, do any adjustment. */
1356 tmp_length
= new_length
;
1357 ADJUST_INSN_LENGTH (insn
, new_length
);
1358 insn_current_address
+= (new_length
- tmp_length
);
1361 if (new_length
!= insn_lengths
[uid
])
1363 insn_lengths
[uid
] = new_length
;
1364 something_changed
= 1;
1367 /* For a non-optimizing compile, do only a single pass. */
1372 free (varying_length
);
1374 #endif /* HAVE_ATTR_length */
1377 #ifdef HAVE_ATTR_length
1378 /* Given the body of an INSN known to be generated by an ASM statement, return
1379 the number of machine instructions likely to be generated for this insn.
1380 This is used to compute its length. */
1383 asm_insn_count (rtx body
)
1385 const char *template;
1388 if (GET_CODE (body
) == ASM_INPUT
)
1389 template = XSTR (body
, 0);
1391 template = decode_asm_operands (body
, NULL
, NULL
, NULL
, NULL
, NULL
);
1393 for (; *template; template++)
1394 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template, template)
1395 || *template == '\n')
1402 /* ??? This is probably the wrong place for these. */
1403 /* Structure recording the mapping from source file and directory
1404 names at compile time to those to be embedded in debug
1406 typedef struct debug_prefix_map
1408 const char *old_prefix
;
1409 const char *new_prefix
;
1412 struct debug_prefix_map
*next
;
1415 /* Linked list of such structures. */
1416 debug_prefix_map
*debug_prefix_maps
;
1419 /* Record a debug file prefix mapping. ARG is the argument to
1420 -fdebug-prefix-map and must be of the form OLD=NEW. */
1423 add_debug_prefix_map (const char *arg
)
1425 debug_prefix_map
*map
;
1428 p
= strchr (arg
, '=');
1431 error ("invalid argument %qs to -fdebug-prefix-map", arg
);
1434 map
= XNEW (debug_prefix_map
);
1435 map
->old_prefix
= ggc_alloc_string (arg
, p
- arg
);
1436 map
->old_len
= p
- arg
;
1438 map
->new_prefix
= ggc_strdup (p
);
1439 map
->new_len
= strlen (p
);
1440 map
->next
= debug_prefix_maps
;
1441 debug_prefix_maps
= map
;
1444 /* Perform user-specified mapping of debug filename prefixes. Return
1445 the new name corresponding to FILENAME. */
1448 remap_debug_filename (const char *filename
)
1450 debug_prefix_map
*map
;
1455 for (map
= debug_prefix_maps
; map
; map
= map
->next
)
1456 if (strncmp (filename
, map
->old_prefix
, map
->old_len
) == 0)
1460 name
= filename
+ map
->old_len
;
1461 name_len
= strlen (name
) + 1;
1462 s
= (char *) alloca (name_len
+ map
->new_len
);
1463 memcpy (s
, map
->new_prefix
, map
->new_len
);
1464 memcpy (s
+ map
->new_len
, name
, name_len
);
1465 return ggc_strdup (s
);
1468 /* Output assembler code for the start of a function,
1469 and initialize some of the variables in this file
1470 for the new function. The label for the function and associated
1471 assembler pseudo-ops have already been output in `assemble_start_function'.
1473 FIRST is the first insn of the rtl for the function being compiled.
1474 FILE is the file to write assembler code to.
1475 OPTIMIZE is nonzero if we should eliminate redundant
1476 test and compare insns. */
1479 final_start_function (rtx first ATTRIBUTE_UNUSED
, FILE *file
,
1480 int optimize ATTRIBUTE_UNUSED
)
1484 this_is_asm_operands
= 0;
1486 last_filename
= locator_file (prologue_locator
);
1487 last_linenum
= locator_line (prologue_locator
);
1489 high_block_linenum
= high_function_linenum
= last_linenum
;
1491 (*debug_hooks
->begin_prologue
) (last_linenum
, last_filename
);
1493 #if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1494 if (write_symbols
!= DWARF2_DEBUG
&& write_symbols
!= VMS_AND_DWARF2_DEBUG
)
1495 dwarf2out_begin_prologue (0, NULL
);
1498 #ifdef LEAF_REG_REMAP
1499 if (current_function_uses_only_leaf_regs
)
1500 leaf_renumber_regs (first
);
1503 /* The Sun386i and perhaps other machines don't work right
1504 if the profiling code comes after the prologue. */
1505 #ifdef PROFILE_BEFORE_PROLOGUE
1506 if (current_function_profile
)
1507 profile_function (file
);
1508 #endif /* PROFILE_BEFORE_PROLOGUE */
1510 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1511 if (dwarf2out_do_frame ())
1512 dwarf2out_frame_debug (NULL_RTX
, false);
1515 /* If debugging, assign block numbers to all of the blocks in this
1519 reemit_insn_block_notes ();
1520 number_blocks (current_function_decl
);
1521 /* We never actually put out begin/end notes for the top-level
1522 block in the function. But, conceptually, that block is
1524 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl
)) = 1;
1527 if (warn_frame_larger_than
1528 && get_frame_size () > frame_larger_than_size
)
1530 /* Issue a warning */
1531 warning (OPT_Wframe_larger_than_
,
1532 "the frame size of %wd bytes is larger than %wd bytes",
1533 get_frame_size (), frame_larger_than_size
);
1536 /* First output the function prologue: code to set up the stack frame. */
1537 targetm
.asm_out
.function_prologue (file
, get_frame_size ());
1539 /* If the machine represents the prologue as RTL, the profiling code must
1540 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1541 #ifdef HAVE_prologue
1542 if (! HAVE_prologue
)
1544 profile_after_prologue (file
);
1548 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED
)
1550 #ifndef PROFILE_BEFORE_PROLOGUE
1551 if (current_function_profile
)
1552 profile_function (file
);
1553 #endif /* not PROFILE_BEFORE_PROLOGUE */
1557 profile_function (FILE *file ATTRIBUTE_UNUSED
)
1559 #ifndef NO_PROFILE_COUNTERS
1560 # define NO_PROFILE_COUNTERS 0
1562 #if defined(ASM_OUTPUT_REG_PUSH)
1563 int sval
= current_function_returns_struct
;
1564 rtx svrtx
= targetm
.calls
.struct_value_rtx (TREE_TYPE (current_function_decl
), 1);
1565 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1566 int cxt
= cfun
->static_chain_decl
!= NULL
;
1568 #endif /* ASM_OUTPUT_REG_PUSH */
1570 if (! NO_PROFILE_COUNTERS
)
1572 int align
= MIN (BIGGEST_ALIGNMENT
, LONG_TYPE_SIZE
);
1573 switch_to_section (data_section
);
1574 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
1575 targetm
.asm_out
.internal_label (file
, "LP", current_function_funcdef_no
);
1576 assemble_integer (const0_rtx
, LONG_TYPE_SIZE
/ BITS_PER_UNIT
, align
, 1);
1579 switch_to_section (current_function_section ());
1581 #if defined(ASM_OUTPUT_REG_PUSH)
1582 if (sval
&& svrtx
!= NULL_RTX
&& REG_P (svrtx
))
1584 ASM_OUTPUT_REG_PUSH (file
, REGNO (svrtx
));
1588 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1590 ASM_OUTPUT_REG_PUSH (file
, STATIC_CHAIN_INCOMING_REGNUM
);
1592 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1595 ASM_OUTPUT_REG_PUSH (file
, STATIC_CHAIN_REGNUM
);
1600 FUNCTION_PROFILER (file
, current_function_funcdef_no
);
1602 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1604 ASM_OUTPUT_REG_POP (file
, STATIC_CHAIN_INCOMING_REGNUM
);
1606 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1609 ASM_OUTPUT_REG_POP (file
, STATIC_CHAIN_REGNUM
);
1614 #if defined(ASM_OUTPUT_REG_PUSH)
1615 if (sval
&& svrtx
!= NULL_RTX
&& REG_P (svrtx
))
1617 ASM_OUTPUT_REG_POP (file
, REGNO (svrtx
));
1622 /* Output assembler code for the end of a function.
1623 For clarity, args are same as those of `final_start_function'
1624 even though not all of them are needed. */
1627 final_end_function (void)
1631 (*debug_hooks
->end_function
) (high_function_linenum
);
1633 /* Finally, output the function epilogue:
1634 code to restore the stack frame and return to the caller. */
1635 targetm
.asm_out
.function_epilogue (asm_out_file
, get_frame_size ());
1637 /* And debug output. */
1638 (*debug_hooks
->end_epilogue
) (last_linenum
, last_filename
);
1640 #if defined (DWARF2_UNWIND_INFO)
1641 if (write_symbols
!= DWARF2_DEBUG
&& write_symbols
!= VMS_AND_DWARF2_DEBUG
1642 && dwarf2out_do_frame ())
1643 dwarf2out_end_epilogue (last_linenum
, last_filename
);
1647 /* Output assembler code for some insns: all or part of a function.
1648 For description of args, see `final_start_function', above. */
1651 final (rtx first
, FILE *file
, int optimize
)
1657 last_ignored_compare
= 0;
1659 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1661 if (INSN_UID (insn
) > max_uid
) /* Find largest UID. */
1662 max_uid
= INSN_UID (insn
);
1664 /* If CC tracking across branches is enabled, record the insn which
1665 jumps to each branch only reached from one place. */
1666 if (optimize
&& JUMP_P (insn
))
1668 rtx lab
= JUMP_LABEL (insn
);
1669 if (lab
&& LABEL_NUSES (lab
) == 1)
1671 LABEL_REFS (lab
) = insn
;
1681 /* Output the insns. */
1682 for (insn
= first
; insn
;)
1684 #ifdef HAVE_ATTR_length
1685 if ((unsigned) INSN_UID (insn
) >= INSN_ADDRESSES_SIZE ())
1687 /* This can be triggered by bugs elsewhere in the compiler if
1688 new insns are created after init_insn_lengths is called. */
1689 gcc_assert (NOTE_P (insn
));
1690 insn_current_address
= -1;
1693 insn_current_address
= INSN_ADDRESSES (INSN_UID (insn
));
1694 #endif /* HAVE_ATTR_length */
1696 insn
= final_scan_insn (insn
, file
, optimize
, 0, &seen
);
1701 get_insn_template (int code
, rtx insn
)
1703 switch (insn_data
[code
].output_format
)
1705 case INSN_OUTPUT_FORMAT_SINGLE
:
1706 return insn_data
[code
].output
.single
;
1707 case INSN_OUTPUT_FORMAT_MULTI
:
1708 return insn_data
[code
].output
.multi
[which_alternative
];
1709 case INSN_OUTPUT_FORMAT_FUNCTION
:
1711 return (*insn_data
[code
].output
.function
) (recog_data
.operand
, insn
);
1718 /* Emit the appropriate declaration for an alternate-entry-point
1719 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1720 LABEL_KIND != LABEL_NORMAL.
1722 The case fall-through in this function is intentional. */
1724 output_alternate_entry_point (FILE *file
, rtx insn
)
1726 const char *name
= LABEL_NAME (insn
);
1728 switch (LABEL_KIND (insn
))
1730 case LABEL_WEAK_ENTRY
:
1731 #ifdef ASM_WEAKEN_LABEL
1732 ASM_WEAKEN_LABEL (file
, name
);
1734 case LABEL_GLOBAL_ENTRY
:
1735 targetm
.asm_out
.globalize_label (file
, name
);
1736 case LABEL_STATIC_ENTRY
:
1737 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1738 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
1740 ASM_OUTPUT_LABEL (file
, name
);
1749 /* The final scan for one insn, INSN.
1750 Args are same as in `final', except that INSN
1751 is the insn being scanned.
1752 Value returned is the next insn to be scanned.
1754 NOPEEPHOLES is the flag to disallow peephole processing (currently
1755 used for within delayed branch sequence output).
1757 SEEN is used to track the end of the prologue, for emitting
1758 debug information. We force the emission of a line note after
1759 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1760 at the beginning of the second basic block, whichever comes
1764 final_scan_insn (rtx insn
, FILE *file
, int optimize ATTRIBUTE_UNUSED
,
1765 int nopeepholes ATTRIBUTE_UNUSED
, int *seen
)
1774 /* Ignore deleted insns. These can occur when we split insns (due to a
1775 template of "#") while not optimizing. */
1776 if (INSN_DELETED_P (insn
))
1777 return NEXT_INSN (insn
);
1779 switch (GET_CODE (insn
))
1782 switch (NOTE_KIND (insn
))
1784 case NOTE_INSN_DELETED
:
1787 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
1788 in_cold_section_p
= !in_cold_section_p
;
1789 #ifdef DWARF2_UNWIND_INFO
1790 if (dwarf2out_do_frame ())
1791 dwarf2out_switch_text_section ();
1794 (*debug_hooks
->switch_text_section
) ();
1796 switch_to_section (current_function_section ());
1799 case NOTE_INSN_BASIC_BLOCK
:
1800 #ifdef TARGET_UNWIND_INFO
1801 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
1805 fprintf (asm_out_file
, "\t%s basic block %d\n",
1806 ASM_COMMENT_START
, NOTE_BASIC_BLOCK (insn
)->index
);
1808 if ((*seen
& (SEEN_EMITTED
| SEEN_BB
)) == SEEN_BB
)
1810 *seen
|= SEEN_EMITTED
;
1811 force_source_line
= true;
1818 case NOTE_INSN_EH_REGION_BEG
:
1819 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHB",
1820 NOTE_EH_HANDLER (insn
));
1823 case NOTE_INSN_EH_REGION_END
:
1824 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHE",
1825 NOTE_EH_HANDLER (insn
));
1828 case NOTE_INSN_PROLOGUE_END
:
1829 targetm
.asm_out
.function_end_prologue (file
);
1830 profile_after_prologue (file
);
1832 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
1834 *seen
|= SEEN_EMITTED
;
1835 force_source_line
= true;
1842 case NOTE_INSN_EPILOGUE_BEG
:
1843 targetm
.asm_out
.function_begin_epilogue (file
);
1846 case NOTE_INSN_FUNCTION_BEG
:
1848 (*debug_hooks
->end_prologue
) (last_linenum
, last_filename
);
1850 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
1852 *seen
|= SEEN_EMITTED
;
1853 force_source_line
= true;
1860 case NOTE_INSN_BLOCK_BEG
:
1861 if (debug_info_level
== DINFO_LEVEL_NORMAL
1862 || debug_info_level
== DINFO_LEVEL_VERBOSE
1863 || write_symbols
== DWARF2_DEBUG
1864 || write_symbols
== VMS_AND_DWARF2_DEBUG
1865 || write_symbols
== VMS_DEBUG
)
1867 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
1871 high_block_linenum
= last_linenum
;
1873 /* Output debugging info about the symbol-block beginning. */
1874 (*debug_hooks
->begin_block
) (last_linenum
, n
);
1876 /* Mark this block as output. */
1877 TREE_ASM_WRITTEN (NOTE_BLOCK (insn
)) = 1;
1879 if (write_symbols
== DBX_DEBUG
1880 || write_symbols
== SDB_DEBUG
)
1882 location_t
*locus_ptr
1883 = block_nonartificial_location (NOTE_BLOCK (insn
));
1885 if (locus_ptr
!= NULL
)
1887 override_filename
= LOCATION_FILE (*locus_ptr
);
1888 override_linenum
= LOCATION_LINE (*locus_ptr
);
1893 case NOTE_INSN_BLOCK_END
:
1894 if (debug_info_level
== DINFO_LEVEL_NORMAL
1895 || debug_info_level
== DINFO_LEVEL_VERBOSE
1896 || write_symbols
== DWARF2_DEBUG
1897 || write_symbols
== VMS_AND_DWARF2_DEBUG
1898 || write_symbols
== VMS_DEBUG
)
1900 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
1904 /* End of a symbol-block. */
1906 gcc_assert (block_depth
>= 0);
1908 (*debug_hooks
->end_block
) (high_block_linenum
, n
);
1910 if (write_symbols
== DBX_DEBUG
1911 || write_symbols
== SDB_DEBUG
)
1913 tree outer_block
= BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn
));
1914 location_t
*locus_ptr
1915 = block_nonartificial_location (outer_block
);
1917 if (locus_ptr
!= NULL
)
1919 override_filename
= LOCATION_FILE (*locus_ptr
);
1920 override_linenum
= LOCATION_LINE (*locus_ptr
);
1924 override_filename
= NULL
;
1925 override_linenum
= 0;
1930 case NOTE_INSN_DELETED_LABEL
:
1931 /* Emit the label. We may have deleted the CODE_LABEL because
1932 the label could be proved to be unreachable, though still
1933 referenced (in the form of having its address taken. */
1934 ASM_OUTPUT_DEBUG_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
));
1937 case NOTE_INSN_VAR_LOCATION
:
1938 (*debug_hooks
->var_location
) (insn
);
1948 #if defined (DWARF2_UNWIND_INFO)
1949 if (dwarf2out_do_frame ())
1950 dwarf2out_frame_debug (insn
, false);
1955 /* The target port might emit labels in the output function for
1956 some insn, e.g. sh.c output_branchy_insn. */
1957 if (CODE_LABEL_NUMBER (insn
) <= max_labelno
)
1959 int align
= LABEL_TO_ALIGNMENT (insn
);
1960 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1961 int max_skip
= LABEL_TO_MAX_SKIP (insn
);
1964 if (align
&& NEXT_INSN (insn
))
1966 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1967 ASM_OUTPUT_MAX_SKIP_ALIGN (file
, align
, max_skip
);
1969 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1970 ASM_OUTPUT_ALIGN_WITH_NOP (file
, align
);
1972 ASM_OUTPUT_ALIGN (file
, align
);
1979 /* If this label is reached from only one place, set the condition
1980 codes from the instruction just before the branch. */
1982 /* Disabled because some insns set cc_status in the C output code
1983 and NOTICE_UPDATE_CC alone can set incorrect status. */
1984 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1986 rtx jump
= LABEL_REFS (insn
);
1987 rtx barrier
= prev_nonnote_insn (insn
);
1989 /* If the LABEL_REFS field of this label has been set to point
1990 at a branch, the predecessor of the branch is a regular
1991 insn, and that branch is the only way to reach this label,
1992 set the condition codes based on the branch and its
1994 if (barrier
&& BARRIER_P (barrier
)
1995 && jump
&& JUMP_P (jump
)
1996 && (prev
= prev_nonnote_insn (jump
))
1997 && NONJUMP_INSN_P (prev
))
1999 NOTICE_UPDATE_CC (PATTERN (prev
), prev
);
2000 NOTICE_UPDATE_CC (PATTERN (jump
), jump
);
2005 if (LABEL_NAME (insn
))
2006 (*debug_hooks
->label
) (insn
);
2010 fputs (ASM_APP_OFF
, file
);
2014 next
= next_nonnote_insn (insn
);
2015 if (next
!= 0 && JUMP_P (next
))
2017 rtx nextbody
= PATTERN (next
);
2019 /* If this label is followed by a jump-table,
2020 make sure we put the label in the read-only section. Also
2021 possibly write the label and jump table together. */
2023 if (GET_CODE (nextbody
) == ADDR_VEC
2024 || GET_CODE (nextbody
) == ADDR_DIFF_VEC
)
2026 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2027 /* In this case, the case vector is being moved by the
2028 target, so don't output the label at all. Leave that
2029 to the back end macros. */
2031 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2035 switch_to_section (targetm
.asm_out
.function_rodata_section
2036 (current_function_decl
));
2038 #ifdef ADDR_VEC_ALIGN
2039 log_align
= ADDR_VEC_ALIGN (next
);
2041 log_align
= exact_log2 (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2043 ASM_OUTPUT_ALIGN (file
, log_align
);
2046 switch_to_section (current_function_section ());
2048 #ifdef ASM_OUTPUT_CASE_LABEL
2049 ASM_OUTPUT_CASE_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
),
2052 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2058 if (LABEL_ALT_ENTRY_P (insn
))
2059 output_alternate_entry_point (file
, insn
);
2061 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2066 rtx body
= PATTERN (insn
);
2067 int insn_code_number
;
2068 const char *template;
2070 #ifdef HAVE_conditional_execution
2071 /* Reset this early so it is correct for ASM statements. */
2072 current_insn_predicate
= NULL_RTX
;
2074 /* An INSN, JUMP_INSN or CALL_INSN.
2075 First check for special kinds that recog doesn't recognize. */
2077 if (GET_CODE (body
) == USE
/* These are just declarations. */
2078 || GET_CODE (body
) == CLOBBER
)
2083 /* If there is a REG_CC_SETTER note on this insn, it means that
2084 the setting of the condition code was done in the delay slot
2085 of the insn that branched here. So recover the cc status
2086 from the insn that set it. */
2088 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
2091 NOTICE_UPDATE_CC (PATTERN (XEXP (note
, 0)), XEXP (note
, 0));
2092 cc_prev_status
= cc_status
;
2097 /* Detect insns that are really jump-tables
2098 and output them as such. */
2100 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
2102 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2106 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2107 switch_to_section (targetm
.asm_out
.function_rodata_section
2108 (current_function_decl
));
2110 switch_to_section (current_function_section ());
2114 fputs (ASM_APP_OFF
, file
);
2118 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2119 if (GET_CODE (body
) == ADDR_VEC
)
2121 #ifdef ASM_OUTPUT_ADDR_VEC
2122 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn
), body
);
2129 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2130 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn
), body
);
2136 vlen
= XVECLEN (body
, GET_CODE (body
) == ADDR_DIFF_VEC
);
2137 for (idx
= 0; idx
< vlen
; idx
++)
2139 if (GET_CODE (body
) == ADDR_VEC
)
2141 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2142 ASM_OUTPUT_ADDR_VEC_ELT
2143 (file
, CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 0, idx
), 0)));
2150 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2151 ASM_OUTPUT_ADDR_DIFF_ELT
2154 CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 1, idx
), 0)),
2155 CODE_LABEL_NUMBER (XEXP (XEXP (body
, 0), 0)));
2161 #ifdef ASM_OUTPUT_CASE_END
2162 ASM_OUTPUT_CASE_END (file
,
2163 CODE_LABEL_NUMBER (PREV_INSN (insn
)),
2168 switch_to_section (current_function_section ());
2172 /* Output this line note if it is the first or the last line
2174 if (notice_source_line (insn
))
2176 (*debug_hooks
->source_line
) (last_linenum
, last_filename
);
2179 if (GET_CODE (body
) == ASM_INPUT
)
2181 const char *string
= XSTR (body
, 0);
2183 /* There's no telling what that did to the condition codes. */
2188 expanded_location loc
;
2192 fputs (ASM_APP_ON
, file
);
2195 loc
= expand_location (ASM_INPUT_SOURCE_LOCATION (body
));
2196 if (*loc
.file
&& loc
.line
)
2197 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2198 ASM_COMMENT_START
, loc
.line
, loc
.file
);
2199 fprintf (asm_out_file
, "\t%s\n", string
);
2200 #if HAVE_AS_LINE_ZERO
2201 if (*loc
.file
&& loc
.line
)
2202 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2208 /* Detect `asm' construct with operands. */
2209 if (asm_noperands (body
) >= 0)
2211 unsigned int noperands
= asm_noperands (body
);
2212 rtx
*ops
= alloca (noperands
* sizeof (rtx
));
2215 expanded_location expanded
;
2217 /* There's no telling what that did to the condition codes. */
2220 /* Get out the operand values. */
2221 string
= decode_asm_operands (body
, ops
, NULL
, NULL
, NULL
, &loc
);
2222 /* Inhibit dieing on what would otherwise be compiler bugs. */
2223 insn_noperands
= noperands
;
2224 this_is_asm_operands
= insn
;
2225 expanded
= expand_location (loc
);
2227 #ifdef FINAL_PRESCAN_INSN
2228 FINAL_PRESCAN_INSN (insn
, ops
, insn_noperands
);
2231 /* Output the insn using them. */
2236 fputs (ASM_APP_ON
, file
);
2239 if (expanded
.file
&& expanded
.line
)
2240 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2241 ASM_COMMENT_START
, expanded
.line
, expanded
.file
);
2242 output_asm_insn (string
, ops
);
2243 #if HAVE_AS_LINE_ZERO
2244 if (expanded
.file
&& expanded
.line
)
2245 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2249 this_is_asm_operands
= 0;
2255 fputs (ASM_APP_OFF
, file
);
2259 if (GET_CODE (body
) == SEQUENCE
)
2261 /* A delayed-branch sequence */
2264 final_sequence
= body
;
2266 /* Record the delay slots' frame information before the branch.
2267 This is needed for delayed calls: see execute_cfa_program(). */
2268 #if defined (DWARF2_UNWIND_INFO)
2269 if (dwarf2out_do_frame ())
2270 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
2271 dwarf2out_frame_debug (XVECEXP (body
, 0, i
), false);
2274 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2275 force the restoration of a comparison that was previously
2276 thought unnecessary. If that happens, cancel this sequence
2277 and cause that insn to be restored. */
2279 next
= final_scan_insn (XVECEXP (body
, 0, 0), file
, 0, 1, seen
);
2280 if (next
!= XVECEXP (body
, 0, 1))
2286 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
2288 rtx insn
= XVECEXP (body
, 0, i
);
2289 rtx next
= NEXT_INSN (insn
);
2290 /* We loop in case any instruction in a delay slot gets
2293 insn
= final_scan_insn (insn
, file
, 0, 1, seen
);
2294 while (insn
!= next
);
2296 #ifdef DBR_OUTPUT_SEQEND
2297 DBR_OUTPUT_SEQEND (file
);
2301 /* If the insn requiring the delay slot was a CALL_INSN, the
2302 insns in the delay slot are actually executed before the
2303 called function. Hence we don't preserve any CC-setting
2304 actions in these insns and the CC must be marked as being
2305 clobbered by the function. */
2306 if (CALL_P (XVECEXP (body
, 0, 0)))
2313 /* We have a real machine instruction as rtl. */
2315 body
= PATTERN (insn
);
2318 set
= single_set (insn
);
2320 /* Check for redundant test and compare instructions
2321 (when the condition codes are already set up as desired).
2322 This is done only when optimizing; if not optimizing,
2323 it should be possible for the user to alter a variable
2324 with the debugger in between statements
2325 and the next statement should reexamine the variable
2326 to compute the condition codes. */
2331 && GET_CODE (SET_DEST (set
)) == CC0
2332 && insn
!= last_ignored_compare
)
2334 if (GET_CODE (SET_SRC (set
)) == SUBREG
)
2335 SET_SRC (set
) = alter_subreg (&SET_SRC (set
));
2336 else if (GET_CODE (SET_SRC (set
)) == COMPARE
)
2338 if (GET_CODE (XEXP (SET_SRC (set
), 0)) == SUBREG
)
2339 XEXP (SET_SRC (set
), 0)
2340 = alter_subreg (&XEXP (SET_SRC (set
), 0));
2341 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == SUBREG
)
2342 XEXP (SET_SRC (set
), 1)
2343 = alter_subreg (&XEXP (SET_SRC (set
), 1));
2345 if ((cc_status
.value1
!= 0
2346 && rtx_equal_p (SET_SRC (set
), cc_status
.value1
))
2347 || (cc_status
.value2
!= 0
2348 && rtx_equal_p (SET_SRC (set
), cc_status
.value2
)))
2350 /* Don't delete insn if it has an addressing side-effect. */
2351 if (! FIND_REG_INC_NOTE (insn
, NULL_RTX
)
2352 /* or if anything in it is volatile. */
2353 && ! volatile_refs_p (PATTERN (insn
)))
2355 /* We don't really delete the insn; just ignore it. */
2356 last_ignored_compare
= insn
;
2365 /* If this is a conditional branch, maybe modify it
2366 if the cc's are in a nonstandard state
2367 so that it accomplishes the same thing that it would
2368 do straightforwardly if the cc's were set up normally. */
2370 if (cc_status
.flags
!= 0
2372 && GET_CODE (body
) == SET
2373 && SET_DEST (body
) == pc_rtx
2374 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2375 && COMPARISON_P (XEXP (SET_SRC (body
), 0))
2376 && XEXP (XEXP (SET_SRC (body
), 0), 0) == cc0_rtx
)
2378 /* This function may alter the contents of its argument
2379 and clear some of the cc_status.flags bits.
2380 It may also return 1 meaning condition now always true
2381 or -1 meaning condition now always false
2382 or 2 meaning condition nontrivial but altered. */
2383 int result
= alter_cond (XEXP (SET_SRC (body
), 0));
2384 /* If condition now has fixed value, replace the IF_THEN_ELSE
2385 with its then-operand or its else-operand. */
2387 SET_SRC (body
) = XEXP (SET_SRC (body
), 1);
2389 SET_SRC (body
) = XEXP (SET_SRC (body
), 2);
2391 /* The jump is now either unconditional or a no-op.
2392 If it has become a no-op, don't try to output it.
2393 (It would not be recognized.) */
2394 if (SET_SRC (body
) == pc_rtx
)
2399 else if (GET_CODE (SET_SRC (body
)) == RETURN
)
2400 /* Replace (set (pc) (return)) with (return). */
2401 PATTERN (insn
) = body
= SET_SRC (body
);
2403 /* Rerecognize the instruction if it has changed. */
2405 INSN_CODE (insn
) = -1;
2408 /* If this is a conditional trap, maybe modify it if the cc's
2409 are in a nonstandard state so that it accomplishes the same
2410 thing that it would do straightforwardly if the cc's were
2412 if (cc_status
.flags
!= 0
2413 && NONJUMP_INSN_P (insn
)
2414 && GET_CODE (body
) == TRAP_IF
2415 && COMPARISON_P (TRAP_CONDITION (body
))
2416 && XEXP (TRAP_CONDITION (body
), 0) == cc0_rtx
)
2418 /* This function may alter the contents of its argument
2419 and clear some of the cc_status.flags bits.
2420 It may also return 1 meaning condition now always true
2421 or -1 meaning condition now always false
2422 or 2 meaning condition nontrivial but altered. */
2423 int result
= alter_cond (TRAP_CONDITION (body
));
2425 /* If TRAP_CONDITION has become always false, delete the
2433 /* If TRAP_CONDITION has become always true, replace
2434 TRAP_CONDITION with const_true_rtx. */
2436 TRAP_CONDITION (body
) = const_true_rtx
;
2438 /* Rerecognize the instruction if it has changed. */
2440 INSN_CODE (insn
) = -1;
2443 /* Make same adjustments to instructions that examine the
2444 condition codes without jumping and instructions that
2445 handle conditional moves (if this machine has either one). */
2447 if (cc_status
.flags
!= 0
2450 rtx cond_rtx
, then_rtx
, else_rtx
;
2453 && GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
)
2455 cond_rtx
= XEXP (SET_SRC (set
), 0);
2456 then_rtx
= XEXP (SET_SRC (set
), 1);
2457 else_rtx
= XEXP (SET_SRC (set
), 2);
2461 cond_rtx
= SET_SRC (set
);
2462 then_rtx
= const_true_rtx
;
2463 else_rtx
= const0_rtx
;
2466 switch (GET_CODE (cond_rtx
))
2480 if (XEXP (cond_rtx
, 0) != cc0_rtx
)
2482 result
= alter_cond (cond_rtx
);
2484 validate_change (insn
, &SET_SRC (set
), then_rtx
, 0);
2485 else if (result
== -1)
2486 validate_change (insn
, &SET_SRC (set
), else_rtx
, 0);
2487 else if (result
== 2)
2488 INSN_CODE (insn
) = -1;
2489 if (SET_DEST (set
) == SET_SRC (set
))
2501 #ifdef HAVE_peephole
2502 /* Do machine-specific peephole optimizations if desired. */
2504 if (optimize
&& !flag_no_peephole
&& !nopeepholes
)
2506 rtx next
= peephole (insn
);
2507 /* When peepholing, if there were notes within the peephole,
2508 emit them before the peephole. */
2509 if (next
!= 0 && next
!= NEXT_INSN (insn
))
2511 rtx note
, prev
= PREV_INSN (insn
);
2513 for (note
= NEXT_INSN (insn
); note
!= next
;
2514 note
= NEXT_INSN (note
))
2515 final_scan_insn (note
, file
, optimize
, nopeepholes
, seen
);
2517 /* Put the notes in the proper position for a later
2518 rescan. For example, the SH target can do this
2519 when generating a far jump in a delayed branch
2521 note
= NEXT_INSN (insn
);
2522 PREV_INSN (note
) = prev
;
2523 NEXT_INSN (prev
) = note
;
2524 NEXT_INSN (PREV_INSN (next
)) = insn
;
2525 PREV_INSN (insn
) = PREV_INSN (next
);
2526 NEXT_INSN (insn
) = next
;
2527 PREV_INSN (next
) = insn
;
2530 /* PEEPHOLE might have changed this. */
2531 body
= PATTERN (insn
);
2535 /* Try to recognize the instruction.
2536 If successful, verify that the operands satisfy the
2537 constraints for the instruction. Crash if they don't,
2538 since `reload' should have changed them so that they do. */
2540 insn_code_number
= recog_memoized (insn
);
2541 cleanup_subreg_operands (insn
);
2543 /* Dump the insn in the assembly for debugging. */
2544 if (flag_dump_rtl_in_asm
)
2546 print_rtx_head
= ASM_COMMENT_START
;
2547 print_rtl_single (asm_out_file
, insn
);
2548 print_rtx_head
= "";
2551 if (! constrain_operands_cached (1))
2552 fatal_insn_not_found (insn
);
2554 /* Some target machines need to prescan each insn before
2557 #ifdef FINAL_PRESCAN_INSN
2558 FINAL_PRESCAN_INSN (insn
, recog_data
.operand
, recog_data
.n_operands
);
2561 #ifdef HAVE_conditional_execution
2562 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
2563 current_insn_predicate
= COND_EXEC_TEST (PATTERN (insn
));
2567 cc_prev_status
= cc_status
;
2569 /* Update `cc_status' for this instruction.
2570 The instruction's output routine may change it further.
2571 If the output routine for a jump insn needs to depend
2572 on the cc status, it should look at cc_prev_status. */
2574 NOTICE_UPDATE_CC (body
, insn
);
2577 current_output_insn
= debug_insn
= insn
;
2579 #if defined (DWARF2_UNWIND_INFO)
2580 if (CALL_P (insn
) && dwarf2out_do_frame ())
2581 dwarf2out_frame_debug (insn
, false);
2584 /* Find the proper template for this insn. */
2585 template = get_insn_template (insn_code_number
, insn
);
2587 /* If the C code returns 0, it means that it is a jump insn
2588 which follows a deleted test insn, and that test insn
2589 needs to be reinserted. */
2594 gcc_assert (prev_nonnote_insn (insn
) == last_ignored_compare
);
2596 /* We have already processed the notes between the setter and
2597 the user. Make sure we don't process them again, this is
2598 particularly important if one of the notes is a block
2599 scope note or an EH note. */
2601 prev
!= last_ignored_compare
;
2602 prev
= PREV_INSN (prev
))
2605 delete_insn (prev
); /* Use delete_note. */
2611 /* If the template is the string "#", it means that this insn must
2613 if (template[0] == '#' && template[1] == '\0')
2615 rtx
new = try_split (body
, insn
, 0);
2617 /* If we didn't split the insn, go away. */
2618 if (new == insn
&& PATTERN (new) == body
)
2619 fatal_insn ("could not split insn", insn
);
2621 #ifdef HAVE_ATTR_length
2622 /* This instruction should have been split in shorten_branches,
2623 to ensure that we would have valid length info for the
2631 #ifdef TARGET_UNWIND_INFO
2632 /* ??? This will put the directives in the wrong place if
2633 get_insn_template outputs assembly directly. However calling it
2634 before get_insn_template breaks if the insns is split. */
2635 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
2638 /* Output assembler code from the template. */
2639 output_asm_insn (template, recog_data
.operand
);
2641 /* If necessary, report the effect that the instruction has on
2642 the unwind info. We've already done this for delay slots
2643 and call instructions. */
2644 #if defined (DWARF2_UNWIND_INFO)
2645 if (final_sequence
== 0
2646 #if !defined (HAVE_prologue)
2647 && !ACCUMULATE_OUTGOING_ARGS
2649 && dwarf2out_do_frame ())
2650 dwarf2out_frame_debug (insn
, true);
2653 current_output_insn
= debug_insn
= 0;
2656 return NEXT_INSN (insn
);
2659 /* Return whether a source line note needs to be emitted before INSN. */
2662 notice_source_line (rtx insn
)
2664 const char *filename
;
2667 if (override_filename
)
2669 filename
= override_filename
;
2670 linenum
= override_linenum
;
2674 filename
= insn_file (insn
);
2675 linenum
= insn_line (insn
);
2679 && (force_source_line
2680 || filename
!= last_filename
2681 || last_linenum
!= linenum
))
2683 force_source_line
= false;
2684 last_filename
= filename
;
2685 last_linenum
= linenum
;
2686 high_block_linenum
= MAX (last_linenum
, high_block_linenum
);
2687 high_function_linenum
= MAX (last_linenum
, high_function_linenum
);
2693 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2694 directly to the desired hard register. */
2697 cleanup_subreg_operands (rtx insn
)
2700 bool changed
= false;
2701 extract_insn_cached (insn
);
2702 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2704 /* The following test cannot use recog_data.operand when testing
2705 for a SUBREG: the underlying object might have been changed
2706 already if we are inside a match_operator expression that
2707 matches the else clause. Instead we test the underlying
2708 expression directly. */
2709 if (GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
2711 recog_data
.operand
[i
] = alter_subreg (recog_data
.operand_loc
[i
]);
2714 else if (GET_CODE (recog_data
.operand
[i
]) == PLUS
2715 || GET_CODE (recog_data
.operand
[i
]) == MULT
2716 || MEM_P (recog_data
.operand
[i
]))
2717 recog_data
.operand
[i
] = walk_alter_subreg (recog_data
.operand_loc
[i
], &changed
);
2720 for (i
= 0; i
< recog_data
.n_dups
; i
++)
2722 if (GET_CODE (*recog_data
.dup_loc
[i
]) == SUBREG
)
2724 *recog_data
.dup_loc
[i
] = alter_subreg (recog_data
.dup_loc
[i
]);
2727 else if (GET_CODE (*recog_data
.dup_loc
[i
]) == PLUS
2728 || GET_CODE (*recog_data
.dup_loc
[i
]) == MULT
2729 || MEM_P (*recog_data
.dup_loc
[i
]))
2730 *recog_data
.dup_loc
[i
] = walk_alter_subreg (recog_data
.dup_loc
[i
], &changed
);
2733 df_insn_rescan (insn
);
2736 /* If X is a SUBREG, replace it with a REG or a MEM,
2737 based on the thing it is a subreg of. */
2740 alter_subreg (rtx
*xp
)
2743 rtx y
= SUBREG_REG (x
);
2745 /* simplify_subreg does not remove subreg from volatile references.
2746 We are required to. */
2749 int offset
= SUBREG_BYTE (x
);
2751 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2752 contains 0 instead of the proper offset. See simplify_subreg. */
2754 && GET_MODE_SIZE (GET_MODE (y
)) < GET_MODE_SIZE (GET_MODE (x
)))
2756 int difference
= GET_MODE_SIZE (GET_MODE (y
))
2757 - GET_MODE_SIZE (GET_MODE (x
));
2758 if (WORDS_BIG_ENDIAN
)
2759 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
2760 if (BYTES_BIG_ENDIAN
)
2761 offset
+= difference
% UNITS_PER_WORD
;
2764 *xp
= adjust_address (y
, GET_MODE (x
), offset
);
2768 rtx
new = simplify_subreg (GET_MODE (x
), y
, GET_MODE (y
),
2775 /* Simplify_subreg can't handle some REG cases, but we have to. */
2777 HOST_WIDE_INT offset
;
2779 regno
= subreg_regno (x
);
2780 if (subreg_lowpart_p (x
))
2781 offset
= byte_lowpart_offset (GET_MODE (x
), GET_MODE (y
));
2783 offset
= SUBREG_BYTE (x
);
2784 *xp
= gen_rtx_REG_offset (y
, GET_MODE (x
), regno
, offset
);
2791 /* Do alter_subreg on all the SUBREGs contained in X. */
2794 walk_alter_subreg (rtx
*xp
, bool *changed
)
2797 switch (GET_CODE (x
))
2802 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
2803 XEXP (x
, 1) = walk_alter_subreg (&XEXP (x
, 1), changed
);
2808 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
2813 return alter_subreg (xp
);
2824 /* Given BODY, the body of a jump instruction, alter the jump condition
2825 as required by the bits that are set in cc_status.flags.
2826 Not all of the bits there can be handled at this level in all cases.
2828 The value is normally 0.
2829 1 means that the condition has become always true.
2830 -1 means that the condition has become always false.
2831 2 means that COND has been altered. */
2834 alter_cond (rtx cond
)
2838 if (cc_status
.flags
& CC_REVERSED
)
2841 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
2844 if (cc_status
.flags
& CC_INVERTED
)
2847 PUT_CODE (cond
, reverse_condition (GET_CODE (cond
)));
2850 if (cc_status
.flags
& CC_NOT_POSITIVE
)
2851 switch (GET_CODE (cond
))
2856 /* Jump becomes unconditional. */
2862 /* Jump becomes no-op. */
2866 PUT_CODE (cond
, EQ
);
2871 PUT_CODE (cond
, NE
);
2879 if (cc_status
.flags
& CC_NOT_NEGATIVE
)
2880 switch (GET_CODE (cond
))
2884 /* Jump becomes unconditional. */
2889 /* Jump becomes no-op. */
2894 PUT_CODE (cond
, EQ
);
2900 PUT_CODE (cond
, NE
);
2908 if (cc_status
.flags
& CC_NO_OVERFLOW
)
2909 switch (GET_CODE (cond
))
2912 /* Jump becomes unconditional. */
2916 PUT_CODE (cond
, EQ
);
2921 PUT_CODE (cond
, NE
);
2926 /* Jump becomes no-op. */
2933 if (cc_status
.flags
& (CC_Z_IN_NOT_N
| CC_Z_IN_N
))
2934 switch (GET_CODE (cond
))
2940 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? GE
: LT
);
2945 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? LT
: GE
);
2950 if (cc_status
.flags
& CC_NOT_SIGNED
)
2951 /* The flags are valid if signed condition operators are converted
2953 switch (GET_CODE (cond
))
2956 PUT_CODE (cond
, LEU
);
2961 PUT_CODE (cond
, LTU
);
2966 PUT_CODE (cond
, GTU
);
2971 PUT_CODE (cond
, GEU
);
2983 /* Report inconsistency between the assembler template and the operands.
2984 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2987 output_operand_lossage (const char *cmsgid
, ...)
2991 const char *pfx_str
;
2994 va_start (ap
, cmsgid
);
2996 pfx_str
= this_is_asm_operands
? _("invalid 'asm': ") : "output_operand: ";
2997 asprintf (&fmt_string
, "%s%s", pfx_str
, _(cmsgid
));
2998 vasprintf (&new_message
, fmt_string
, ap
);
3000 if (this_is_asm_operands
)
3001 error_for_asm (this_is_asm_operands
, "%s", new_message
);
3003 internal_error ("%s", new_message
);
3010 /* Output of assembler code from a template, and its subroutines. */
3012 /* Annotate the assembly with a comment describing the pattern and
3013 alternative used. */
3016 output_asm_name (void)
3020 int num
= INSN_CODE (debug_insn
);
3021 fprintf (asm_out_file
, "\t%s %d\t%s",
3022 ASM_COMMENT_START
, INSN_UID (debug_insn
),
3023 insn_data
[num
].name
);
3024 if (insn_data
[num
].n_alternatives
> 1)
3025 fprintf (asm_out_file
, "/%d", which_alternative
+ 1);
3026 #ifdef HAVE_ATTR_length
3027 fprintf (asm_out_file
, "\t[length = %d]",
3028 get_attr_length (debug_insn
));
3030 /* Clear this so only the first assembler insn
3031 of any rtl insn will get the special comment for -dp. */
3036 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3037 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3038 corresponds to the address of the object and 0 if to the object. */
3041 get_mem_expr_from_op (rtx op
, int *paddressp
)
3049 return REG_EXPR (op
);
3050 else if (!MEM_P (op
))
3053 if (MEM_EXPR (op
) != 0)
3054 return MEM_EXPR (op
);
3056 /* Otherwise we have an address, so indicate it and look at the address. */
3060 /* First check if we have a decl for the address, then look at the right side
3061 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3062 But don't allow the address to itself be indirect. */
3063 if ((expr
= get_mem_expr_from_op (op
, &inner_addressp
)) && ! inner_addressp
)
3065 else if (GET_CODE (op
) == PLUS
3066 && (expr
= get_mem_expr_from_op (XEXP (op
, 1), &inner_addressp
)))
3069 while (GET_RTX_CLASS (GET_CODE (op
)) == RTX_UNARY
3070 || GET_RTX_CLASS (GET_CODE (op
)) == RTX_BIN_ARITH
)
3073 expr
= get_mem_expr_from_op (op
, &inner_addressp
);
3074 return inner_addressp
? 0 : expr
;
3077 /* Output operand names for assembler instructions. OPERANDS is the
3078 operand vector, OPORDER is the order to write the operands, and NOPS
3079 is the number of operands to write. */
3082 output_asm_operand_names (rtx
*operands
, int *oporder
, int nops
)
3087 for (i
= 0; i
< nops
; i
++)
3090 rtx op
= operands
[oporder
[i
]];
3091 tree expr
= get_mem_expr_from_op (op
, &addressp
);
3093 fprintf (asm_out_file
, "%c%s",
3094 wrote
? ',' : '\t', wrote
? "" : ASM_COMMENT_START
);
3098 fprintf (asm_out_file
, "%s",
3099 addressp
? "*" : "");
3100 print_mem_expr (asm_out_file
, expr
);
3103 else if (REG_P (op
) && ORIGINAL_REGNO (op
)
3104 && ORIGINAL_REGNO (op
) != REGNO (op
))
3105 fprintf (asm_out_file
, " tmp%i", ORIGINAL_REGNO (op
));
3109 /* Output text from TEMPLATE to the assembler output file,
3110 obeying %-directions to substitute operands taken from
3111 the vector OPERANDS.
3113 %N (for N a digit) means print operand N in usual manner.
3114 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3115 and print the label name with no punctuation.
3116 %cN means require operand N to be a constant
3117 and print the constant expression with no punctuation.
3118 %aN means expect operand N to be a memory address
3119 (not a memory reference!) and print a reference
3121 %nN means expect operand N to be a constant
3122 and print a constant expression for minus the value
3123 of the operand, with no other punctuation. */
3126 output_asm_insn (const char *template, rtx
*operands
)
3130 #ifdef ASSEMBLER_DIALECT
3133 int oporder
[MAX_RECOG_OPERANDS
];
3134 char opoutput
[MAX_RECOG_OPERANDS
];
3137 /* An insn may return a null string template
3138 in a case where no assembler code is needed. */
3142 memset (opoutput
, 0, sizeof opoutput
);
3144 putc ('\t', asm_out_file
);
3146 #ifdef ASM_OUTPUT_OPCODE
3147 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3154 if (flag_verbose_asm
)
3155 output_asm_operand_names (operands
, oporder
, ops
);
3156 if (flag_print_asm_name
)
3160 memset (opoutput
, 0, sizeof opoutput
);
3162 putc (c
, asm_out_file
);
3163 #ifdef ASM_OUTPUT_OPCODE
3164 while ((c
= *p
) == '\t')
3166 putc (c
, asm_out_file
);
3169 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3173 #ifdef ASSEMBLER_DIALECT
3179 output_operand_lossage ("nested assembly dialect alternatives");
3183 /* If we want the first dialect, do nothing. Otherwise, skip
3184 DIALECT_NUMBER of strings ending with '|'. */
3185 for (i
= 0; i
< dialect_number
; i
++)
3187 while (*p
&& *p
!= '}' && *p
++ != '|')
3196 output_operand_lossage ("unterminated assembly dialect alternative");
3203 /* Skip to close brace. */
3208 output_operand_lossage ("unterminated assembly dialect alternative");
3212 while (*p
++ != '}');
3216 putc (c
, asm_out_file
);
3221 putc (c
, asm_out_file
);
3227 /* %% outputs a single %. */
3231 putc (c
, asm_out_file
);
3233 /* %= outputs a number which is unique to each insn in the entire
3234 compilation. This is useful for making local labels that are
3235 referred to more than once in a given insn. */
3239 fprintf (asm_out_file
, "%d", insn_counter
);
3241 /* % followed by a letter and some digits
3242 outputs an operand in a special way depending on the letter.
3243 Letters `acln' are implemented directly.
3244 Other letters are passed to `output_operand' so that
3245 the PRINT_OPERAND macro can define them. */
3246 else if (ISALPHA (*p
))
3249 unsigned long opnum
;
3252 opnum
= strtoul (p
, &endptr
, 10);
3255 output_operand_lossage ("operand number missing "
3257 else if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3258 output_operand_lossage ("operand number out of range");
3259 else if (letter
== 'l')
3260 output_asm_label (operands
[opnum
]);
3261 else if (letter
== 'a')
3262 output_address (operands
[opnum
]);
3263 else if (letter
== 'c')
3265 if (CONSTANT_ADDRESS_P (operands
[opnum
]))
3266 output_addr_const (asm_out_file
, operands
[opnum
]);
3268 output_operand (operands
[opnum
], 'c');
3270 else if (letter
== 'n')
3272 if (GET_CODE (operands
[opnum
]) == CONST_INT
)
3273 fprintf (asm_out_file
, HOST_WIDE_INT_PRINT_DEC
,
3274 - INTVAL (operands
[opnum
]));
3277 putc ('-', asm_out_file
);
3278 output_addr_const (asm_out_file
, operands
[opnum
]);
3282 output_operand (operands
[opnum
], letter
);
3284 if (!opoutput
[opnum
])
3285 oporder
[ops
++] = opnum
;
3286 opoutput
[opnum
] = 1;
3291 /* % followed by a digit outputs an operand the default way. */
3292 else if (ISDIGIT (*p
))
3294 unsigned long opnum
;
3297 opnum
= strtoul (p
, &endptr
, 10);
3298 if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3299 output_operand_lossage ("operand number out of range");
3301 output_operand (operands
[opnum
], 0);
3303 if (!opoutput
[opnum
])
3304 oporder
[ops
++] = opnum
;
3305 opoutput
[opnum
] = 1;
3310 /* % followed by punctuation: output something for that
3311 punctuation character alone, with no operand.
3312 The PRINT_OPERAND macro decides what is actually done. */
3313 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3314 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p
))
3315 output_operand (NULL_RTX
, *p
++);
3318 output_operand_lossage ("invalid %%-code");
3322 putc (c
, asm_out_file
);
3325 /* Write out the variable names for operands, if we know them. */
3326 if (flag_verbose_asm
)
3327 output_asm_operand_names (operands
, oporder
, ops
);
3328 if (flag_print_asm_name
)
3331 putc ('\n', asm_out_file
);
3334 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3337 output_asm_label (rtx x
)
3341 if (GET_CODE (x
) == LABEL_REF
)
3345 && NOTE_KIND (x
) == NOTE_INSN_DELETED_LABEL
))
3346 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3348 output_operand_lossage ("'%%l' operand isn't a label");
3350 assemble_name (asm_out_file
, buf
);
3353 /* Print operand X using machine-dependent assembler syntax.
3354 The macro PRINT_OPERAND is defined just to control this function.
3355 CODE is a non-digit that preceded the operand-number in the % spec,
3356 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3357 between the % and the digits.
3358 When CODE is a non-letter, X is 0.
3360 The meanings of the letters are machine-dependent and controlled
3361 by PRINT_OPERAND. */
3364 output_operand (rtx x
, int code ATTRIBUTE_UNUSED
)
3366 if (x
&& GET_CODE (x
) == SUBREG
)
3367 x
= alter_subreg (&x
);
3369 /* X must not be a pseudo reg. */
3370 gcc_assert (!x
|| !REG_P (x
) || REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3372 PRINT_OPERAND (asm_out_file
, x
, code
);
3375 /* Print a memory reference operand for address X
3376 using machine-dependent assembler syntax.
3377 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3380 output_address (rtx x
)
3382 bool changed
= false;
3383 walk_alter_subreg (&x
, &changed
);
3384 PRINT_OPERAND_ADDRESS (asm_out_file
, x
);
3387 /* Print an integer constant expression in assembler syntax.
3388 Addition and subtraction are the only arithmetic
3389 that may appear in these expressions. */
3392 output_addr_const (FILE *file
, rtx x
)
3397 switch (GET_CODE (x
))
3404 if (SYMBOL_REF_DECL (x
))
3405 mark_decl_referenced (SYMBOL_REF_DECL (x
));
3406 #ifdef ASM_OUTPUT_SYMBOL_REF
3407 ASM_OUTPUT_SYMBOL_REF (file
, x
);
3409 assemble_name (file
, XSTR (x
, 0));
3417 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3418 #ifdef ASM_OUTPUT_LABEL_REF
3419 ASM_OUTPUT_LABEL_REF (file
, buf
);
3421 assemble_name (file
, buf
);
3426 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3430 /* This used to output parentheses around the expression,
3431 but that does not work on the 386 (either ATT or BSD assembler). */
3432 output_addr_const (file
, XEXP (x
, 0));
3436 if (GET_MODE (x
) == VOIDmode
)
3438 /* We can use %d if the number is one word and positive. */
3439 if (CONST_DOUBLE_HIGH (x
))
3440 fprintf (file
, HOST_WIDE_INT_PRINT_DOUBLE_HEX
,
3441 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
3442 else if (CONST_DOUBLE_LOW (x
) < 0)
3443 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
3445 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3448 /* We can't handle floating point constants;
3449 PRINT_OPERAND must handle them. */
3450 output_operand_lossage ("floating constant misused");
3454 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_FIXED_VALUE_LOW (x
));
3458 /* Some assemblers need integer constants to appear last (eg masm). */
3459 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3461 output_addr_const (file
, XEXP (x
, 1));
3462 if (INTVAL (XEXP (x
, 0)) >= 0)
3463 fprintf (file
, "+");
3464 output_addr_const (file
, XEXP (x
, 0));
3468 output_addr_const (file
, XEXP (x
, 0));
3469 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
3470 || INTVAL (XEXP (x
, 1)) >= 0)
3471 fprintf (file
, "+");
3472 output_addr_const (file
, XEXP (x
, 1));
3477 /* Avoid outputting things like x-x or x+5-x,
3478 since some assemblers can't handle that. */
3479 x
= simplify_subtraction (x
);
3480 if (GET_CODE (x
) != MINUS
)
3483 output_addr_const (file
, XEXP (x
, 0));
3484 fprintf (file
, "-");
3485 if ((GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) >= 0)
3486 || GET_CODE (XEXP (x
, 1)) == PC
3487 || GET_CODE (XEXP (x
, 1)) == SYMBOL_REF
)
3488 output_addr_const (file
, XEXP (x
, 1));
3491 fputs (targetm
.asm_out
.open_paren
, file
);
3492 output_addr_const (file
, XEXP (x
, 1));
3493 fputs (targetm
.asm_out
.close_paren
, file
);
3501 output_addr_const (file
, XEXP (x
, 0));
3505 #ifdef OUTPUT_ADDR_CONST_EXTRA
3506 OUTPUT_ADDR_CONST_EXTRA (file
, x
, fail
);
3511 output_operand_lossage ("invalid expression as operand");
3515 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3516 %R prints the value of REGISTER_PREFIX.
3517 %L prints the value of LOCAL_LABEL_PREFIX.
3518 %U prints the value of USER_LABEL_PREFIX.
3519 %I prints the value of IMMEDIATE_PREFIX.
3520 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3521 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3523 We handle alternate assembler dialects here, just like output_asm_insn. */
3526 asm_fprintf (FILE *file
, const char *p
, ...)
3532 va_start (argptr
, p
);
3539 #ifdef ASSEMBLER_DIALECT
3544 /* If we want the first dialect, do nothing. Otherwise, skip
3545 DIALECT_NUMBER of strings ending with '|'. */
3546 for (i
= 0; i
< dialect_number
; i
++)
3548 while (*p
&& *p
++ != '|')
3558 /* Skip to close brace. */
3559 while (*p
&& *p
++ != '}')
3570 while (strchr ("-+ #0", c
))
3575 while (ISDIGIT (c
) || c
== '.')
3586 case 'd': case 'i': case 'u':
3587 case 'x': case 'X': case 'o':
3591 fprintf (file
, buf
, va_arg (argptr
, int));
3595 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3596 'o' cases, but we do not check for those cases. It
3597 means that the value is a HOST_WIDE_INT, which may be
3598 either `long' or `long long'. */
3599 memcpy (q
, HOST_WIDE_INT_PRINT
, strlen (HOST_WIDE_INT_PRINT
));
3600 q
+= strlen (HOST_WIDE_INT_PRINT
);
3603 fprintf (file
, buf
, va_arg (argptr
, HOST_WIDE_INT
));
3608 #ifdef HAVE_LONG_LONG
3614 fprintf (file
, buf
, va_arg (argptr
, long long));
3621 fprintf (file
, buf
, va_arg (argptr
, long));
3629 fprintf (file
, buf
, va_arg (argptr
, char *));
3633 #ifdef ASM_OUTPUT_OPCODE
3634 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3639 #ifdef REGISTER_PREFIX
3640 fprintf (file
, "%s", REGISTER_PREFIX
);
3645 #ifdef IMMEDIATE_PREFIX
3646 fprintf (file
, "%s", IMMEDIATE_PREFIX
);
3651 #ifdef LOCAL_LABEL_PREFIX
3652 fprintf (file
, "%s", LOCAL_LABEL_PREFIX
);
3657 fputs (user_label_prefix
, file
);
3660 #ifdef ASM_FPRINTF_EXTENSIONS
3661 /* Uppercase letters are reserved for general use by asm_fprintf
3662 and so are not available to target specific code. In order to
3663 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3664 they are defined here. As they get turned into real extensions
3665 to asm_fprintf they should be removed from this list. */
3666 case 'A': case 'B': case 'C': case 'D': case 'E':
3667 case 'F': case 'G': case 'H': case 'J': case 'K':
3668 case 'M': case 'N': case 'P': case 'Q': case 'S':
3669 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3672 ASM_FPRINTF_EXTENSIONS (file
, argptr
, p
)
3685 /* Split up a CONST_DOUBLE or integer constant rtx
3686 into two rtx's for single words,
3687 storing in *FIRST the word that comes first in memory in the target
3688 and in *SECOND the other. */
3691 split_double (rtx value
, rtx
*first
, rtx
*second
)
3693 if (GET_CODE (value
) == CONST_INT
)
3695 if (HOST_BITS_PER_WIDE_INT
>= (2 * BITS_PER_WORD
))
3697 /* In this case the CONST_INT holds both target words.
3698 Extract the bits from it into two word-sized pieces.
3699 Sign extend each half to HOST_WIDE_INT. */
3700 unsigned HOST_WIDE_INT low
, high
;
3701 unsigned HOST_WIDE_INT mask
, sign_bit
, sign_extend
;
3703 /* Set sign_bit to the most significant bit of a word. */
3705 sign_bit
<<= BITS_PER_WORD
- 1;
3707 /* Set mask so that all bits of the word are set. We could
3708 have used 1 << BITS_PER_WORD instead of basing the
3709 calculation on sign_bit. However, on machines where
3710 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3711 compiler warning, even though the code would never be
3713 mask
= sign_bit
<< 1;
3716 /* Set sign_extend as any remaining bits. */
3717 sign_extend
= ~mask
;
3719 /* Pick the lower word and sign-extend it. */
3720 low
= INTVAL (value
);
3725 /* Pick the higher word, shifted to the least significant
3726 bits, and sign-extend it. */
3727 high
= INTVAL (value
);
3728 high
>>= BITS_PER_WORD
- 1;
3731 if (high
& sign_bit
)
3732 high
|= sign_extend
;
3734 /* Store the words in the target machine order. */
3735 if (WORDS_BIG_ENDIAN
)
3737 *first
= GEN_INT (high
);
3738 *second
= GEN_INT (low
);
3742 *first
= GEN_INT (low
);
3743 *second
= GEN_INT (high
);
3748 /* The rule for using CONST_INT for a wider mode
3749 is that we regard the value as signed.
3750 So sign-extend it. */
3751 rtx high
= (INTVAL (value
) < 0 ? constm1_rtx
: const0_rtx
);
3752 if (WORDS_BIG_ENDIAN
)
3764 else if (GET_CODE (value
) != CONST_DOUBLE
)
3766 if (WORDS_BIG_ENDIAN
)
3768 *first
= const0_rtx
;
3774 *second
= const0_rtx
;
3777 else if (GET_MODE (value
) == VOIDmode
3778 /* This is the old way we did CONST_DOUBLE integers. */
3779 || GET_MODE_CLASS (GET_MODE (value
)) == MODE_INT
)
3781 /* In an integer, the words are defined as most and least significant.
3782 So order them by the target's convention. */
3783 if (WORDS_BIG_ENDIAN
)
3785 *first
= GEN_INT (CONST_DOUBLE_HIGH (value
));
3786 *second
= GEN_INT (CONST_DOUBLE_LOW (value
));
3790 *first
= GEN_INT (CONST_DOUBLE_LOW (value
));
3791 *second
= GEN_INT (CONST_DOUBLE_HIGH (value
));
3798 REAL_VALUE_FROM_CONST_DOUBLE (r
, value
);
3800 /* Note, this converts the REAL_VALUE_TYPE to the target's
3801 format, splits up the floating point double and outputs
3802 exactly 32 bits of it into each of l[0] and l[1] --
3803 not necessarily BITS_PER_WORD bits. */
3804 REAL_VALUE_TO_TARGET_DOUBLE (r
, l
);
3806 /* If 32 bits is an entire word for the target, but not for the host,
3807 then sign-extend on the host so that the number will look the same
3808 way on the host that it would on the target. See for instance
3809 simplify_unary_operation. The #if is needed to avoid compiler
3812 #if HOST_BITS_PER_LONG > 32
3813 if (BITS_PER_WORD
< HOST_BITS_PER_LONG
&& BITS_PER_WORD
== 32)
3815 if (l
[0] & ((long) 1 << 31))
3816 l
[0] |= ((long) (-1) << 32);
3817 if (l
[1] & ((long) 1 << 31))
3818 l
[1] |= ((long) (-1) << 32);
3822 *first
= GEN_INT (l
[0]);
3823 *second
= GEN_INT (l
[1]);
3827 /* Return nonzero if this function has no function calls. */
3830 leaf_function_p (void)
3835 if (current_function_profile
|| profile_arc_flag
)
3838 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3841 && ! SIBLING_CALL_P (insn
))
3843 if (NONJUMP_INSN_P (insn
)
3844 && GET_CODE (PATTERN (insn
)) == SEQUENCE
3845 && CALL_P (XVECEXP (PATTERN (insn
), 0, 0))
3846 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
3849 for (link
= current_function_epilogue_delay_list
;
3851 link
= XEXP (link
, 1))
3853 insn
= XEXP (link
, 0);
3856 && ! SIBLING_CALL_P (insn
))
3858 if (NONJUMP_INSN_P (insn
)
3859 && GET_CODE (PATTERN (insn
)) == SEQUENCE
3860 && CALL_P (XVECEXP (PATTERN (insn
), 0, 0))
3861 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
3868 /* Return 1 if branch is a forward branch.
3869 Uses insn_shuid array, so it works only in the final pass. May be used by
3870 output templates to customary add branch prediction hints.
3873 final_forward_branch_p (rtx insn
)
3875 int insn_id
, label_id
;
3877 gcc_assert (uid_shuid
);
3878 insn_id
= INSN_SHUID (insn
);
3879 label_id
= INSN_SHUID (JUMP_LABEL (insn
));
3880 /* We've hit some insns that does not have id information available. */
3881 gcc_assert (insn_id
&& label_id
);
3882 return insn_id
< label_id
;
3885 /* On some machines, a function with no call insns
3886 can run faster if it doesn't create its own register window.
3887 When output, the leaf function should use only the "output"
3888 registers. Ordinarily, the function would be compiled to use
3889 the "input" registers to find its arguments; it is a candidate
3890 for leaf treatment if it uses only the "input" registers.
3891 Leaf function treatment means renumbering so the function
3892 uses the "output" registers instead. */
3894 #ifdef LEAF_REGISTERS
3896 /* Return 1 if this function uses only the registers that can be
3897 safely renumbered. */
3900 only_leaf_regs_used (void)
3903 const char *const permitted_reg_in_leaf_functions
= LEAF_REGISTERS
;
3905 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3906 if ((df_regs_ever_live_p (i
) || global_regs
[i
])
3907 && ! permitted_reg_in_leaf_functions
[i
])
3910 if (current_function_uses_pic_offset_table
3911 && pic_offset_table_rtx
!= 0
3912 && REG_P (pic_offset_table_rtx
)
3913 && ! permitted_reg_in_leaf_functions
[REGNO (pic_offset_table_rtx
)])
3919 /* Scan all instructions and renumber all registers into those
3920 available in leaf functions. */
3923 leaf_renumber_regs (rtx first
)
3927 /* Renumber only the actual patterns.
3928 The reg-notes can contain frame pointer refs,
3929 and renumbering them could crash, and should not be needed. */
3930 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3932 leaf_renumber_regs_insn (PATTERN (insn
));
3933 for (insn
= current_function_epilogue_delay_list
;
3935 insn
= XEXP (insn
, 1))
3936 if (INSN_P (XEXP (insn
, 0)))
3937 leaf_renumber_regs_insn (PATTERN (XEXP (insn
, 0)));
3940 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3941 available in leaf functions. */
3944 leaf_renumber_regs_insn (rtx in_rtx
)
3947 const char *format_ptr
;
3952 /* Renumber all input-registers into output-registers.
3953 renumbered_regs would be 1 for an output-register;
3960 /* Don't renumber the same reg twice. */
3964 newreg
= REGNO (in_rtx
);
3965 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3966 to reach here as part of a REG_NOTE. */
3967 if (newreg
>= FIRST_PSEUDO_REGISTER
)
3972 newreg
= LEAF_REG_REMAP (newreg
);
3973 gcc_assert (newreg
>= 0);
3974 df_set_regs_ever_live (REGNO (in_rtx
), false);
3975 df_set_regs_ever_live (newreg
, true);
3976 SET_REGNO (in_rtx
, newreg
);
3980 if (INSN_P (in_rtx
))
3982 /* Inside a SEQUENCE, we find insns.
3983 Renumber just the patterns of these insns,
3984 just as we do for the top-level insns. */
3985 leaf_renumber_regs_insn (PATTERN (in_rtx
));
3989 format_ptr
= GET_RTX_FORMAT (GET_CODE (in_rtx
));
3991 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (in_rtx
)); i
++)
3992 switch (*format_ptr
++)
3995 leaf_renumber_regs_insn (XEXP (in_rtx
, i
));
3999 if (NULL
!= XVEC (in_rtx
, i
))
4001 for (j
= 0; j
< XVECLEN (in_rtx
, i
); j
++)
4002 leaf_renumber_regs_insn (XVECEXP (in_rtx
, i
, j
));
4022 /* When -gused is used, emit debug info for only used symbols. But in
4023 addition to the standard intercepted debug_hooks there are some direct
4024 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
4025 Those routines may also be called from a higher level intercepted routine. So
4026 to prevent recording data for an inner call to one of these for an intercept,
4027 we maintain an intercept nesting counter (debug_nesting). We only save the
4028 intercepted arguments if the nesting is 1. */
4029 int debug_nesting
= 0;
4031 static tree
*symbol_queue
;
4032 int symbol_queue_index
= 0;
4033 static int symbol_queue_size
= 0;
4035 /* Generate the symbols for any queued up type symbols we encountered
4036 while generating the type info for some originally used symbol.
4037 This might generate additional entries in the queue. Only when
4038 the nesting depth goes to 0 is this routine called. */
4041 debug_flush_symbol_queue (void)
4045 /* Make sure that additionally queued items are not flushed
4050 for (i
= 0; i
< symbol_queue_index
; ++i
)
4052 /* If we pushed queued symbols then such symbols must be
4053 output no matter what anyone else says. Specifically,
4054 we need to make sure dbxout_symbol() thinks the symbol was
4055 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
4056 which may be set for outside reasons. */
4057 int saved_tree_used
= TREE_USED (symbol_queue
[i
]);
4058 int saved_suppress_debug
= TYPE_DECL_SUPPRESS_DEBUG (symbol_queue
[i
]);
4059 TREE_USED (symbol_queue
[i
]) = 1;
4060 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue
[i
]) = 0;
4062 #ifdef DBX_DEBUGGING_INFO
4063 dbxout_symbol (symbol_queue
[i
], 0);
4066 TREE_USED (symbol_queue
[i
]) = saved_tree_used
;
4067 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue
[i
]) = saved_suppress_debug
;
4070 symbol_queue_index
= 0;
4074 /* Queue a type symbol needed as part of the definition of a decl
4075 symbol. These symbols are generated when debug_flush_symbol_queue()
4079 debug_queue_symbol (tree decl
)
4081 if (symbol_queue_index
>= symbol_queue_size
)
4083 symbol_queue_size
+= 10;
4084 symbol_queue
= xrealloc (symbol_queue
,
4085 symbol_queue_size
* sizeof (tree
));
4088 symbol_queue
[symbol_queue_index
++] = decl
;
4091 /* Free symbol queue. */
4093 debug_free_queue (void)
4097 free (symbol_queue
);
4098 symbol_queue
= NULL
;
4099 symbol_queue_size
= 0;
4103 /* Turn the RTL into assembly. */
4105 rest_of_handle_final (void)
4110 /* Get the function's name, as described by its RTL. This may be
4111 different from the DECL_NAME name used in the source file. */
4113 x
= DECL_RTL (current_function_decl
);
4114 gcc_assert (MEM_P (x
));
4116 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
4117 fnname
= XSTR (x
, 0);
4119 assemble_start_function (current_function_decl
, fnname
);
4120 final_start_function (get_insns (), asm_out_file
, optimize
);
4121 final (get_insns (), asm_out_file
, optimize
);
4122 final_end_function ();
4124 #ifdef TARGET_UNWIND_INFO
4125 /* ??? The IA-64 ".handlerdata" directive must be issued before
4126 the ".endp" directive that closes the procedure descriptor. */
4127 output_function_exception_table (fnname
);
4130 assemble_end_function (current_function_decl
, fnname
);
4132 #ifndef TARGET_UNWIND_INFO
4133 /* Otherwise, it feels unclean to switch sections in the middle. */
4134 output_function_exception_table (fnname
);
4137 user_defined_section_attribute
= false;
4139 /* Free up reg info memory. */
4143 fflush (asm_out_file
);
4145 /* Write DBX symbols if requested. */
4147 /* Note that for those inline functions where we don't initially
4148 know for certain that we will be generating an out-of-line copy,
4149 the first invocation of this routine (rest_of_compilation) will
4150 skip over this code by doing a `goto exit_rest_of_compilation;'.
4151 Later on, wrapup_global_declarations will (indirectly) call
4152 rest_of_compilation again for those inline functions that need
4153 to have out-of-line copies generated. During that call, we
4154 *will* be routed past here. */
4156 timevar_push (TV_SYMOUT
);
4157 (*debug_hooks
->function_decl
) (current_function_decl
);
4158 timevar_pop (TV_SYMOUT
);
4159 if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
4160 && targetm
.have_ctors_dtors
)
4161 targetm
.asm_out
.constructor (XEXP (DECL_RTL (current_function_decl
), 0),
4162 decl_init_priority_lookup
4163 (current_function_decl
));
4164 if (DECL_STATIC_DESTRUCTOR (current_function_decl
)
4165 && targetm
.have_ctors_dtors
)
4166 targetm
.asm_out
.destructor (XEXP (DECL_RTL (current_function_decl
), 0),
4167 decl_fini_priority_lookup
4168 (current_function_decl
));
4172 struct tree_opt_pass pass_final
=
4176 rest_of_handle_final
, /* execute */
4179 0, /* static_pass_number */
4180 TV_FINAL
, /* tv_id */
4181 0, /* properties_required */
4182 0, /* properties_provided */
4183 0, /* properties_destroyed */
4184 0, /* todo_flags_start */
4185 TODO_ggc_collect
, /* todo_flags_finish */
4191 rest_of_handle_shorten_branches (void)
4193 /* Shorten branches. */
4194 shorten_branches (get_insns ());
4198 struct tree_opt_pass pass_shorten_branches
=
4200 "shorten", /* name */
4202 rest_of_handle_shorten_branches
, /* execute */
4205 0, /* static_pass_number */
4206 TV_FINAL
, /* tv_id */
4207 0, /* properties_required */
4208 0, /* properties_provided */
4209 0, /* properties_destroyed */
4210 0, /* todo_flags_start */
4211 TODO_dump_func
, /* todo_flags_finish */
4217 rest_of_clean_state (void)
4221 /* It is very important to decompose the RTL instruction chain here:
4222 debug information keeps pointing into CODE_LABEL insns inside the function
4223 body. If these remain pointing to the other insns, we end up preserving
4224 whole RTL chain and attached detailed debug info in memory. */
4225 for (insn
= get_insns (); insn
; insn
= next
)
4227 next
= NEXT_INSN (insn
);
4228 NEXT_INSN (insn
) = NULL
;
4229 PREV_INSN (insn
) = NULL
;
4232 /* In case the function was not output,
4233 don't leave any temporary anonymous types
4234 queued up for sdb output. */
4235 #ifdef SDB_DEBUGGING_INFO
4236 if (write_symbols
== SDB_DEBUG
)
4237 sdbout_types (NULL_TREE
);
4240 reload_completed
= 0;
4241 epilogue_completed
= 0;
4243 regstack_completed
= 0;
4246 /* Clear out the insn_length contents now that they are no
4248 init_insn_lengths ();
4250 /* Show no temporary slots allocated. */
4253 free_bb_for_insn ();
4255 if (targetm
.binds_local_p (current_function_decl
))
4257 int pref
= cfun
->preferred_stack_boundary
;
4258 if (cfun
->stack_alignment_needed
> cfun
->preferred_stack_boundary
)
4259 pref
= cfun
->stack_alignment_needed
;
4260 cgraph_rtl_info (current_function_decl
)->preferred_incoming_stack_boundary
4264 /* Make sure volatile mem refs aren't considered valid operands for
4265 arithmetic insns. We must call this here if this is a nested inline
4266 function, since the above code leaves us in the init_recog state,
4267 and the function context push/pop code does not save/restore volatile_ok.
4269 ??? Maybe it isn't necessary for expand_start_function to call this
4270 anymore if we do it here? */
4272 init_recog_no_volatile ();
4274 /* We're done with this function. Free up memory if we can. */
4275 free_after_parsing (cfun
);
4276 free_after_compilation (cfun
);
4280 struct tree_opt_pass pass_clean_state
=
4284 rest_of_clean_state
, /* execute */
4287 0, /* static_pass_number */
4288 TV_FINAL
, /* tv_id */
4289 0, /* properties_required */
4290 0, /* properties_provided */
4291 PROP_rtl
, /* properties_destroyed */
4292 0, /* todo_flags_start */
4293 0, /* todo_flags_finish */