1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
49 #include "coretypes.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
59 #include "conditions.h"
62 #include "hard-reg-set.h"
69 #include "basic-block.h"
73 #include "cfglayout.h"
75 #ifdef XCOFF_DEBUGGING_INFO
76 #include "xcoffout.h" /* Needed for external data
77 declarations for e.g. AIX 4.x. */
80 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
81 #include "dwarf2out.h"
84 #ifdef DBX_DEBUGGING_INFO
88 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
89 null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
94 /* How to start an assembler comment. */
95 #ifndef ASM_COMMENT_START
96 #define ASM_COMMENT_START ";#"
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
108 #if defined(READONLY_DATA_SECTION) || defined(READONLY_DATA_SECTION_ASM_OP)
109 #define HAVE_READONLY_DATA_SECTION 1
111 #define HAVE_READONLY_DATA_SECTION 0
114 /* Last insn processed by final_scan_insn. */
115 static rtx debug_insn
;
116 rtx current_output_insn
;
118 /* Line number of last NOTE. */
119 static int last_linenum
;
121 /* Highest line number in current block. */
122 static int high_block_linenum
;
124 /* Likewise for function. */
125 static int high_function_linenum
;
127 /* Filename of last NOTE. */
128 static const char *last_filename
;
130 extern int length_unit_log
; /* This is defined in insn-attrtab.c. */
132 /* Nonzero while outputting an `asm' with operands.
133 This means that inconsistencies are the user's fault, so don't abort.
134 The precise value is the insn being output, to pass to error_for_asm. */
135 rtx this_is_asm_operands
;
137 /* Number of operands of this insn, for an `asm' with operands. */
138 static unsigned int insn_noperands
;
140 /* Compare optimization flag. */
142 static rtx last_ignored_compare
= 0;
144 /* Assign a unique number to each insn that is output.
145 This can be used to generate unique local labels. */
147 static int insn_counter
= 0;
150 /* This variable contains machine-dependent flags (defined in tm.h)
151 set and examined by output routines
152 that describe how to interpret the condition codes properly. */
156 /* During output of an insn, this contains a copy of cc_status
157 from before the insn. */
159 CC_STATUS cc_prev_status
;
162 /* Indexed by hardware reg number, is 1 if that register is ever
163 used in the current function.
165 In life_analysis, or in stupid_life_analysis, this is set
166 up to record the hard regs used explicitly. Reload adds
167 in the hard regs used for holding pseudo regs. Final uses
168 it to generate the code in the function prologue and epilogue
169 to save and restore registers as needed. */
171 char regs_ever_live
[FIRST_PSEUDO_REGISTER
];
173 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
174 Unlike regs_ever_live, elements of this array corresponding to
175 eliminable regs like the frame pointer are set if an asm sets them. */
177 char regs_asm_clobbered
[FIRST_PSEUDO_REGISTER
];
179 /* Nonzero means current function must be given a frame pointer.
180 Initialized in function.c to 0. Set only in reload1.c as per
181 the needs of the function. */
183 int frame_pointer_needed
;
185 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
187 static int block_depth
;
189 /* Nonzero if have enabled APP processing of our assembler output. */
193 /* If we are outputting an insn sequence, this contains the sequence rtx.
198 #ifdef ASSEMBLER_DIALECT
200 /* Number of the assembler dialect to use, starting at 0. */
201 static int dialect_number
;
204 /* Indexed by line number, nonzero if there is a note for that line. */
206 static char *line_note_exists
;
208 #ifdef HAVE_conditional_execution
209 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
210 rtx current_insn_predicate
;
213 #ifdef HAVE_ATTR_length
214 static int asm_insn_count (rtx
);
216 static void profile_function (FILE *);
217 static void profile_after_prologue (FILE *);
218 static bool notice_source_line (rtx
);
219 static rtx
walk_alter_subreg (rtx
*);
220 static void output_asm_name (void);
221 static void output_alternate_entry_point (FILE *, rtx
);
222 static tree
get_mem_expr_from_op (rtx
, int *);
223 static void output_asm_operand_names (rtx
*, int *, int);
224 static void output_operand (rtx
, int);
225 #ifdef LEAF_REGISTERS
226 static void leaf_renumber_regs (rtx
);
229 static int alter_cond (rtx
);
231 #ifndef ADDR_VEC_ALIGN
232 static int final_addr_vec_align (rtx
);
234 #ifdef HAVE_ATTR_length
235 static int align_fuzz (rtx
, rtx
, int, unsigned);
238 /* Initialize data in final at the beginning of a compilation. */
241 init_final (const char *filename ATTRIBUTE_UNUSED
)
246 #ifdef ASSEMBLER_DIALECT
247 dialect_number
= ASSEMBLER_DIALECT
;
251 /* Default target function prologue and epilogue assembler output.
253 If not overridden for epilogue code, then the function body itself
254 contains return instructions wherever needed. */
256 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED
,
257 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
261 /* Default target hook that outputs nothing to a stream. */
263 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED
)
267 /* Enable APP processing of subsequent output.
268 Used before the output from an `asm' statement. */
275 fputs (ASM_APP_ON
, asm_out_file
);
280 /* Disable APP processing of subsequent output.
281 Called from varasm.c before most kinds of output. */
288 fputs (ASM_APP_OFF
, asm_out_file
);
293 /* Return the number of slots filled in the current
294 delayed branch sequence (we don't count the insn needing the
295 delay slot). Zero if not in a delayed branch sequence. */
299 dbr_sequence_length (void)
301 if (final_sequence
!= 0)
302 return XVECLEN (final_sequence
, 0) - 1;
308 /* The next two pages contain routines used to compute the length of an insn
309 and to shorten branches. */
311 /* Arrays for insn lengths, and addresses. The latter is referenced by
312 `insn_current_length'. */
314 static int *insn_lengths
;
316 varray_type insn_addresses_
;
318 /* Max uid for which the above arrays are valid. */
319 static int insn_lengths_max_uid
;
321 /* Address of insn being processed. Used by `insn_current_length'. */
322 int insn_current_address
;
324 /* Address of insn being processed in previous iteration. */
325 int insn_last_address
;
327 /* known invariant alignment of insn being processed. */
328 int insn_current_align
;
330 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
331 gives the next following alignment insn that increases the known
332 alignment, or NULL_RTX if there is no such insn.
333 For any alignment obtained this way, we can again index uid_align with
334 its uid to obtain the next following align that in turn increases the
335 alignment, till we reach NULL_RTX; the sequence obtained this way
336 for each insn we'll call the alignment chain of this insn in the following
339 struct label_alignment
345 static rtx
*uid_align
;
346 static int *uid_shuid
;
347 static struct label_alignment
*label_align
;
349 /* Indicate that branch shortening hasn't yet been done. */
352 init_insn_lengths (void)
363 insn_lengths_max_uid
= 0;
365 #ifdef HAVE_ATTR_length
366 INSN_ADDRESSES_FREE ();
375 /* Obtain the current length of an insn. If branch shortening has been done,
376 get its actual length. Otherwise, get its maximum length. */
379 get_attr_length (rtx insn ATTRIBUTE_UNUSED
)
381 #ifdef HAVE_ATTR_length
386 if (insn_lengths_max_uid
> INSN_UID (insn
))
387 return insn_lengths
[INSN_UID (insn
)];
389 switch (GET_CODE (insn
))
397 length
= insn_default_length (insn
);
401 body
= PATTERN (insn
);
402 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
404 /* Alignment is machine-dependent and should be handled by
408 length
= insn_default_length (insn
);
412 body
= PATTERN (insn
);
413 if (GET_CODE (body
) == USE
|| GET_CODE (body
) == CLOBBER
)
416 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
417 length
= asm_insn_count (body
) * insn_default_length (insn
);
418 else if (GET_CODE (body
) == SEQUENCE
)
419 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
420 length
+= get_attr_length (XVECEXP (body
, 0, i
));
422 length
= insn_default_length (insn
);
429 #ifdef ADJUST_INSN_LENGTH
430 ADJUST_INSN_LENGTH (insn
, length
);
433 #else /* not HAVE_ATTR_length */
435 #endif /* not HAVE_ATTR_length */
438 /* Code to handle alignment inside shorten_branches. */
440 /* Here is an explanation how the algorithm in align_fuzz can give
443 Call a sequence of instructions beginning with alignment point X
444 and continuing until the next alignment point `block X'. When `X'
445 is used in an expression, it means the alignment value of the
448 Call the distance between the start of the first insn of block X, and
449 the end of the last insn of block X `IX', for the `inner size of X'.
450 This is clearly the sum of the instruction lengths.
452 Likewise with the next alignment-delimited block following X, which we
455 Call the distance between the start of the first insn of block X, and
456 the start of the first insn of block Y `OX', for the `outer size of X'.
458 The estimated padding is then OX - IX.
460 OX can be safely estimated as
465 OX = round_up(IX, X) + Y - X
467 Clearly est(IX) >= real(IX), because that only depends on the
468 instruction lengths, and those being overestimated is a given.
470 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
471 we needn't worry about that when thinking about OX.
473 When X >= Y, the alignment provided by Y adds no uncertainty factor
474 for branch ranges starting before X, so we can just round what we have.
475 But when X < Y, we don't know anything about the, so to speak,
476 `middle bits', so we have to assume the worst when aligning up from an
477 address mod X to one mod Y, which is Y - X. */
480 #define LABEL_ALIGN(LABEL) align_labels_log
483 #ifndef LABEL_ALIGN_MAX_SKIP
484 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
488 #define LOOP_ALIGN(LABEL) align_loops_log
491 #ifndef LOOP_ALIGN_MAX_SKIP
492 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
495 #ifndef LABEL_ALIGN_AFTER_BARRIER
496 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
499 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
500 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
504 #define JUMP_ALIGN(LABEL) align_jumps_log
507 #ifndef JUMP_ALIGN_MAX_SKIP
508 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
511 #ifndef ADDR_VEC_ALIGN
513 final_addr_vec_align (rtx addr_vec
)
515 int align
= GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec
)));
517 if (align
> BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
518 align
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
519 return exact_log2 (align
);
523 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
526 #ifndef INSN_LENGTH_ALIGNMENT
527 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
530 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
532 static int min_labelno
, max_labelno
;
534 #define LABEL_TO_ALIGNMENT(LABEL) \
535 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
537 #define LABEL_TO_MAX_SKIP(LABEL) \
538 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
540 /* For the benefit of port specific code do this also as a function. */
543 label_to_alignment (rtx label
)
545 return LABEL_TO_ALIGNMENT (label
);
548 #ifdef HAVE_ATTR_length
549 /* The differences in addresses
550 between a branch and its target might grow or shrink depending on
551 the alignment the start insn of the range (the branch for a forward
552 branch or the label for a backward branch) starts out on; if these
553 differences are used naively, they can even oscillate infinitely.
554 We therefore want to compute a 'worst case' address difference that
555 is independent of the alignment the start insn of the range end
556 up on, and that is at least as large as the actual difference.
557 The function align_fuzz calculates the amount we have to add to the
558 naively computed difference, by traversing the part of the alignment
559 chain of the start insn of the range that is in front of the end insn
560 of the range, and considering for each alignment the maximum amount
561 that it might contribute to a size increase.
563 For casesi tables, we also want to know worst case minimum amounts of
564 address difference, in case a machine description wants to introduce
565 some common offset that is added to all offsets in a table.
566 For this purpose, align_fuzz with a growth argument of 0 computes the
567 appropriate adjustment. */
569 /* Compute the maximum delta by which the difference of the addresses of
570 START and END might grow / shrink due to a different address for start
571 which changes the size of alignment insns between START and END.
572 KNOWN_ALIGN_LOG is the alignment known for START.
573 GROWTH should be ~0 if the objective is to compute potential code size
574 increase, and 0 if the objective is to compute potential shrink.
575 The return value is undefined for any other value of GROWTH. */
578 align_fuzz (rtx start
, rtx end
, int known_align_log
, unsigned int growth
)
580 int uid
= INSN_UID (start
);
582 int known_align
= 1 << known_align_log
;
583 int end_shuid
= INSN_SHUID (end
);
586 for (align_label
= uid_align
[uid
]; align_label
; align_label
= uid_align
[uid
])
588 int align_addr
, new_align
;
590 uid
= INSN_UID (align_label
);
591 align_addr
= INSN_ADDRESSES (uid
) - insn_lengths
[uid
];
592 if (uid_shuid
[uid
] > end_shuid
)
594 known_align_log
= LABEL_TO_ALIGNMENT (align_label
);
595 new_align
= 1 << known_align_log
;
596 if (new_align
< known_align
)
598 fuzz
+= (-align_addr
^ growth
) & (new_align
- known_align
);
599 known_align
= new_align
;
604 /* Compute a worst-case reference address of a branch so that it
605 can be safely used in the presence of aligned labels. Since the
606 size of the branch itself is unknown, the size of the branch is
607 not included in the range. I.e. for a forward branch, the reference
608 address is the end address of the branch as known from the previous
609 branch shortening pass, minus a value to account for possible size
610 increase due to alignment. For a backward branch, it is the start
611 address of the branch as known from the current pass, plus a value
612 to account for possible size increase due to alignment.
613 NB.: Therefore, the maximum offset allowed for backward branches needs
614 to exclude the branch size. */
617 insn_current_reference_address (rtx branch
)
622 if (! INSN_ADDRESSES_SET_P ())
625 seq
= NEXT_INSN (PREV_INSN (branch
));
626 seq_uid
= INSN_UID (seq
);
627 if (GET_CODE (branch
) != JUMP_INSN
)
628 /* This can happen for example on the PA; the objective is to know the
629 offset to address something in front of the start of the function.
630 Thus, we can treat it like a backward branch.
631 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
632 any alignment we'd encounter, so we skip the call to align_fuzz. */
633 return insn_current_address
;
634 dest
= JUMP_LABEL (branch
);
636 /* BRANCH has no proper alignment chain set, so use SEQ.
637 BRANCH also has no INSN_SHUID. */
638 if (INSN_SHUID (seq
) < INSN_SHUID (dest
))
640 /* Forward branch. */
641 return (insn_last_address
+ insn_lengths
[seq_uid
]
642 - align_fuzz (seq
, dest
, length_unit_log
, ~0));
646 /* Backward branch. */
647 return (insn_current_address
648 + align_fuzz (dest
, seq
, length_unit_log
, ~0));
651 #endif /* HAVE_ATTR_length */
654 compute_alignments (void)
656 int log
, max_skip
, max_log
;
665 max_labelno
= max_label_num ();
666 min_labelno
= get_first_label_num ();
667 label_align
= xcalloc (max_labelno
- min_labelno
+ 1,
668 sizeof (struct label_alignment
));
670 /* If not optimizing or optimizing for size, don't assign any alignments. */
671 if (! optimize
|| optimize_size
)
676 rtx label
= BB_HEAD (bb
);
677 int fallthru_frequency
= 0, branch_frequency
= 0, has_fallthru
= 0;
680 if (GET_CODE (label
) != CODE_LABEL
681 || probably_never_executed_bb_p (bb
))
683 max_log
= LABEL_ALIGN (label
);
684 max_skip
= LABEL_ALIGN_MAX_SKIP
;
686 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
688 if (e
->flags
& EDGE_FALLTHRU
)
689 has_fallthru
= 1, fallthru_frequency
+= EDGE_FREQUENCY (e
);
691 branch_frequency
+= EDGE_FREQUENCY (e
);
694 /* There are two purposes to align block with no fallthru incoming edge:
695 1) to avoid fetch stalls when branch destination is near cache boundary
696 2) to improve cache efficiency in case the previous block is not executed
697 (so it does not need to be in the cache).
699 We to catch first case, we align frequently executed blocks.
700 To catch the second, we align blocks that are executed more frequently
701 than the predecessor and the predecessor is likely to not be executed
702 when function is called. */
705 && (branch_frequency
> BB_FREQ_MAX
/ 10
706 || (bb
->frequency
> bb
->prev_bb
->frequency
* 10
707 && (bb
->prev_bb
->frequency
708 <= ENTRY_BLOCK_PTR
->frequency
/ 2))))
710 log
= JUMP_ALIGN (label
);
714 max_skip
= JUMP_ALIGN_MAX_SKIP
;
717 /* In case block is frequent and reached mostly by non-fallthru edge,
718 align it. It is most likely a first block of loop. */
720 && maybe_hot_bb_p (bb
)
721 && branch_frequency
+ fallthru_frequency
> BB_FREQ_MAX
/ 10
722 && branch_frequency
> fallthru_frequency
* 2)
724 log
= LOOP_ALIGN (label
);
728 max_skip
= LOOP_ALIGN_MAX_SKIP
;
731 LABEL_TO_ALIGNMENT (label
) = max_log
;
732 LABEL_TO_MAX_SKIP (label
) = max_skip
;
736 /* Make a pass over all insns and compute their actual lengths by shortening
737 any branches of variable length if possible. */
739 /* shorten_branches might be called multiple times: for example, the SH
740 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
741 In order to do this, it needs proper length information, which it obtains
742 by calling shorten_branches. This cannot be collapsed with
743 shorten_branches itself into a single pass unless we also want to integrate
744 reorg.c, since the branch splitting exposes new instructions with delay
748 shorten_branches (rtx first ATTRIBUTE_UNUSED
)
755 #ifdef HAVE_ATTR_length
756 #define MAX_CODE_ALIGN 16
758 int something_changed
= 1;
759 char *varying_length
;
762 rtx align_tab
[MAX_CODE_ALIGN
];
766 /* Compute maximum UID and allocate label_align / uid_shuid. */
767 max_uid
= get_max_uid ();
769 uid_shuid
= xmalloc (max_uid
* sizeof *uid_shuid
);
771 if (max_labelno
!= max_label_num ())
773 int old
= max_labelno
;
777 max_labelno
= max_label_num ();
779 n_labels
= max_labelno
- min_labelno
+ 1;
780 n_old_labels
= old
- min_labelno
+ 1;
782 label_align
= xrealloc (label_align
,
783 n_labels
* sizeof (struct label_alignment
));
785 /* Range of labels grows monotonically in the function. Abort here
786 means that the initialization of array got lost. */
787 if (n_old_labels
> n_labels
)
790 memset (label_align
+ n_old_labels
, 0,
791 (n_labels
- n_old_labels
) * sizeof (struct label_alignment
));
794 /* Initialize label_align and set up uid_shuid to be strictly
795 monotonically rising with insn order. */
796 /* We use max_log here to keep track of the maximum alignment we want to
797 impose on the next CODE_LABEL (or the current one if we are processing
798 the CODE_LABEL itself). */
803 for (insn
= get_insns (), i
= 1; insn
; insn
= NEXT_INSN (insn
))
807 INSN_SHUID (insn
) = i
++;
810 /* reorg might make the first insn of a loop being run once only,
811 and delete the label in front of it. Then we want to apply
812 the loop alignment to the new label created by reorg, which
813 is separated by the former loop start insn from the
814 NOTE_INSN_LOOP_BEG. */
816 else if (GET_CODE (insn
) == CODE_LABEL
)
820 /* Merge in alignments computed by compute_alignments. */
821 log
= LABEL_TO_ALIGNMENT (insn
);
825 max_skip
= LABEL_TO_MAX_SKIP (insn
);
828 log
= LABEL_ALIGN (insn
);
832 max_skip
= LABEL_ALIGN_MAX_SKIP
;
834 next
= NEXT_INSN (insn
);
835 /* ADDR_VECs only take room if read-only data goes into the text
837 if (JUMP_TABLES_IN_TEXT_SECTION
|| !HAVE_READONLY_DATA_SECTION
)
838 if (next
&& GET_CODE (next
) == JUMP_INSN
)
840 rtx nextbody
= PATTERN (next
);
841 if (GET_CODE (nextbody
) == ADDR_VEC
842 || GET_CODE (nextbody
) == ADDR_DIFF_VEC
)
844 log
= ADDR_VEC_ALIGN (next
);
848 max_skip
= LABEL_ALIGN_MAX_SKIP
;
852 LABEL_TO_ALIGNMENT (insn
) = max_log
;
853 LABEL_TO_MAX_SKIP (insn
) = max_skip
;
857 else if (GET_CODE (insn
) == BARRIER
)
861 for (label
= insn
; label
&& ! INSN_P (label
);
862 label
= NEXT_INSN (label
))
863 if (GET_CODE (label
) == CODE_LABEL
)
865 log
= LABEL_ALIGN_AFTER_BARRIER (insn
);
869 max_skip
= LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
;
875 #ifdef HAVE_ATTR_length
877 /* Allocate the rest of the arrays. */
878 insn_lengths
= xmalloc (max_uid
* sizeof (*insn_lengths
));
879 insn_lengths_max_uid
= max_uid
;
880 /* Syntax errors can lead to labels being outside of the main insn stream.
881 Initialize insn_addresses, so that we get reproducible results. */
882 INSN_ADDRESSES_ALLOC (max_uid
);
884 varying_length
= xcalloc (max_uid
, sizeof (char));
886 /* Initialize uid_align. We scan instructions
887 from end to start, and keep in align_tab[n] the last seen insn
888 that does an alignment of at least n+1, i.e. the successor
889 in the alignment chain for an insn that does / has a known
891 uid_align
= xcalloc (max_uid
, sizeof *uid_align
);
893 for (i
= MAX_CODE_ALIGN
; --i
>= 0;)
894 align_tab
[i
] = NULL_RTX
;
895 seq
= get_last_insn ();
896 for (; seq
; seq
= PREV_INSN (seq
))
898 int uid
= INSN_UID (seq
);
900 log
= (GET_CODE (seq
) == CODE_LABEL
? LABEL_TO_ALIGNMENT (seq
) : 0);
901 uid_align
[uid
] = align_tab
[0];
904 /* Found an alignment label. */
905 uid_align
[uid
] = align_tab
[log
];
906 for (i
= log
- 1; i
>= 0; i
--)
910 #ifdef CASE_VECTOR_SHORTEN_MODE
913 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
916 int min_shuid
= INSN_SHUID (get_insns ()) - 1;
917 int max_shuid
= INSN_SHUID (get_last_insn ()) + 1;
920 for (insn
= first
; insn
!= 0; insn
= NEXT_INSN (insn
))
922 rtx min_lab
= NULL_RTX
, max_lab
= NULL_RTX
, pat
;
923 int len
, i
, min
, max
, insn_shuid
;
925 addr_diff_vec_flags flags
;
927 if (GET_CODE (insn
) != JUMP_INSN
928 || GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
930 pat
= PATTERN (insn
);
931 len
= XVECLEN (pat
, 1);
934 min_align
= MAX_CODE_ALIGN
;
935 for (min
= max_shuid
, max
= min_shuid
, i
= len
- 1; i
>= 0; i
--)
937 rtx lab
= XEXP (XVECEXP (pat
, 1, i
), 0);
938 int shuid
= INSN_SHUID (lab
);
949 if (min_align
> LABEL_TO_ALIGNMENT (lab
))
950 min_align
= LABEL_TO_ALIGNMENT (lab
);
952 XEXP (pat
, 2) = gen_rtx_LABEL_REF (VOIDmode
, min_lab
);
953 XEXP (pat
, 3) = gen_rtx_LABEL_REF (VOIDmode
, max_lab
);
954 insn_shuid
= INSN_SHUID (insn
);
955 rel
= INSN_SHUID (XEXP (XEXP (pat
, 0), 0));
956 flags
.min_align
= min_align
;
957 flags
.base_after_vec
= rel
> insn_shuid
;
958 flags
.min_after_vec
= min
> insn_shuid
;
959 flags
.max_after_vec
= max
> insn_shuid
;
960 flags
.min_after_base
= min
> rel
;
961 flags
.max_after_base
= max
> rel
;
962 ADDR_DIFF_VEC_FLAGS (pat
) = flags
;
965 #endif /* CASE_VECTOR_SHORTEN_MODE */
967 /* Compute initial lengths, addresses, and varying flags for each insn. */
968 for (insn_current_address
= 0, insn
= first
;
970 insn_current_address
+= insn_lengths
[uid
], insn
= NEXT_INSN (insn
))
972 uid
= INSN_UID (insn
);
974 insn_lengths
[uid
] = 0;
976 if (GET_CODE (insn
) == CODE_LABEL
)
978 int log
= LABEL_TO_ALIGNMENT (insn
);
981 int align
= 1 << log
;
982 int new_address
= (insn_current_address
+ align
- 1) & -align
;
983 insn_lengths
[uid
] = new_address
- insn_current_address
;
987 INSN_ADDRESSES (uid
) = insn_current_address
+ insn_lengths
[uid
];
989 if (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == BARRIER
990 || GET_CODE (insn
) == CODE_LABEL
)
992 if (INSN_DELETED_P (insn
))
995 body
= PATTERN (insn
);
996 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
998 /* This only takes room if read-only data goes into the text
1000 if (JUMP_TABLES_IN_TEXT_SECTION
|| !HAVE_READONLY_DATA_SECTION
)
1001 insn_lengths
[uid
] = (XVECLEN (body
,
1002 GET_CODE (body
) == ADDR_DIFF_VEC
)
1003 * GET_MODE_SIZE (GET_MODE (body
)));
1004 /* Alignment is handled by ADDR_VEC_ALIGN. */
1006 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
1007 insn_lengths
[uid
] = asm_insn_count (body
) * insn_default_length (insn
);
1008 else if (GET_CODE (body
) == SEQUENCE
)
1011 int const_delay_slots
;
1013 const_delay_slots
= const_num_delay_slots (XVECEXP (body
, 0, 0));
1015 const_delay_slots
= 0;
1017 /* Inside a delay slot sequence, we do not do any branch shortening
1018 if the shortening could change the number of delay slots
1020 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1022 rtx inner_insn
= XVECEXP (body
, 0, i
);
1023 int inner_uid
= INSN_UID (inner_insn
);
1026 if (GET_CODE (body
) == ASM_INPUT
1027 || asm_noperands (PATTERN (XVECEXP (body
, 0, i
))) >= 0)
1028 inner_length
= (asm_insn_count (PATTERN (inner_insn
))
1029 * insn_default_length (inner_insn
));
1031 inner_length
= insn_default_length (inner_insn
);
1033 insn_lengths
[inner_uid
] = inner_length
;
1034 if (const_delay_slots
)
1036 if ((varying_length
[inner_uid
]
1037 = insn_variable_length_p (inner_insn
)) != 0)
1038 varying_length
[uid
] = 1;
1039 INSN_ADDRESSES (inner_uid
) = (insn_current_address
1040 + insn_lengths
[uid
]);
1043 varying_length
[inner_uid
] = 0;
1044 insn_lengths
[uid
] += inner_length
;
1047 else if (GET_CODE (body
) != USE
&& GET_CODE (body
) != CLOBBER
)
1049 insn_lengths
[uid
] = insn_default_length (insn
);
1050 varying_length
[uid
] = insn_variable_length_p (insn
);
1053 /* If needed, do any adjustment. */
1054 #ifdef ADJUST_INSN_LENGTH
1055 ADJUST_INSN_LENGTH (insn
, insn_lengths
[uid
]);
1056 if (insn_lengths
[uid
] < 0)
1057 fatal_insn ("negative insn length", insn
);
1061 /* Now loop over all the insns finding varying length insns. For each,
1062 get the current insn length. If it has changed, reflect the change.
1063 When nothing changes for a full pass, we are done. */
1065 while (something_changed
)
1067 something_changed
= 0;
1068 insn_current_align
= MAX_CODE_ALIGN
- 1;
1069 for (insn_current_address
= 0, insn
= first
;
1071 insn
= NEXT_INSN (insn
))
1074 #ifdef ADJUST_INSN_LENGTH
1079 uid
= INSN_UID (insn
);
1081 if (GET_CODE (insn
) == CODE_LABEL
)
1083 int log
= LABEL_TO_ALIGNMENT (insn
);
1084 if (log
> insn_current_align
)
1086 int align
= 1 << log
;
1087 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1088 insn_lengths
[uid
] = new_address
- insn_current_address
;
1089 insn_current_align
= log
;
1090 insn_current_address
= new_address
;
1093 insn_lengths
[uid
] = 0;
1094 INSN_ADDRESSES (uid
) = insn_current_address
;
1098 length_align
= INSN_LENGTH_ALIGNMENT (insn
);
1099 if (length_align
< insn_current_align
)
1100 insn_current_align
= length_align
;
1102 insn_last_address
= INSN_ADDRESSES (uid
);
1103 INSN_ADDRESSES (uid
) = insn_current_address
;
1105 #ifdef CASE_VECTOR_SHORTEN_MODE
1106 if (optimize
&& GET_CODE (insn
) == JUMP_INSN
1107 && GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1109 rtx body
= PATTERN (insn
);
1110 int old_length
= insn_lengths
[uid
];
1111 rtx rel_lab
= XEXP (XEXP (body
, 0), 0);
1112 rtx min_lab
= XEXP (XEXP (body
, 2), 0);
1113 rtx max_lab
= XEXP (XEXP (body
, 3), 0);
1114 int rel_addr
= INSN_ADDRESSES (INSN_UID (rel_lab
));
1115 int min_addr
= INSN_ADDRESSES (INSN_UID (min_lab
));
1116 int max_addr
= INSN_ADDRESSES (INSN_UID (max_lab
));
1119 addr_diff_vec_flags flags
;
1121 /* Avoid automatic aggregate initialization. */
1122 flags
= ADDR_DIFF_VEC_FLAGS (body
);
1124 /* Try to find a known alignment for rel_lab. */
1125 for (prev
= rel_lab
;
1127 && ! insn_lengths
[INSN_UID (prev
)]
1128 && ! (varying_length
[INSN_UID (prev
)] & 1);
1129 prev
= PREV_INSN (prev
))
1130 if (varying_length
[INSN_UID (prev
)] & 2)
1132 rel_align
= LABEL_TO_ALIGNMENT (prev
);
1136 /* See the comment on addr_diff_vec_flags in rtl.h for the
1137 meaning of the flags values. base: REL_LAB vec: INSN */
1138 /* Anything after INSN has still addresses from the last
1139 pass; adjust these so that they reflect our current
1140 estimate for this pass. */
1141 if (flags
.base_after_vec
)
1142 rel_addr
+= insn_current_address
- insn_last_address
;
1143 if (flags
.min_after_vec
)
1144 min_addr
+= insn_current_address
- insn_last_address
;
1145 if (flags
.max_after_vec
)
1146 max_addr
+= insn_current_address
- insn_last_address
;
1147 /* We want to know the worst case, i.e. lowest possible value
1148 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1149 its offset is positive, and we have to be wary of code shrink;
1150 otherwise, it is negative, and we have to be vary of code
1152 if (flags
.min_after_base
)
1154 /* If INSN is between REL_LAB and MIN_LAB, the size
1155 changes we are about to make can change the alignment
1156 within the observed offset, therefore we have to break
1157 it up into two parts that are independent. */
1158 if (! flags
.base_after_vec
&& flags
.min_after_vec
)
1160 min_addr
-= align_fuzz (rel_lab
, insn
, rel_align
, 0);
1161 min_addr
-= align_fuzz (insn
, min_lab
, 0, 0);
1164 min_addr
-= align_fuzz (rel_lab
, min_lab
, rel_align
, 0);
1168 if (flags
.base_after_vec
&& ! flags
.min_after_vec
)
1170 min_addr
-= align_fuzz (min_lab
, insn
, 0, ~0);
1171 min_addr
-= align_fuzz (insn
, rel_lab
, 0, ~0);
1174 min_addr
-= align_fuzz (min_lab
, rel_lab
, 0, ~0);
1176 /* Likewise, determine the highest lowest possible value
1177 for the offset of MAX_LAB. */
1178 if (flags
.max_after_base
)
1180 if (! flags
.base_after_vec
&& flags
.max_after_vec
)
1182 max_addr
+= align_fuzz (rel_lab
, insn
, rel_align
, ~0);
1183 max_addr
+= align_fuzz (insn
, max_lab
, 0, ~0);
1186 max_addr
+= align_fuzz (rel_lab
, max_lab
, rel_align
, ~0);
1190 if (flags
.base_after_vec
&& ! flags
.max_after_vec
)
1192 max_addr
+= align_fuzz (max_lab
, insn
, 0, 0);
1193 max_addr
+= align_fuzz (insn
, rel_lab
, 0, 0);
1196 max_addr
+= align_fuzz (max_lab
, rel_lab
, 0, 0);
1198 PUT_MODE (body
, CASE_VECTOR_SHORTEN_MODE (min_addr
- rel_addr
,
1199 max_addr
- rel_addr
,
1201 if (JUMP_TABLES_IN_TEXT_SECTION
|| !HAVE_READONLY_DATA_SECTION
)
1204 = (XVECLEN (body
, 1) * GET_MODE_SIZE (GET_MODE (body
)));
1205 insn_current_address
+= insn_lengths
[uid
];
1206 if (insn_lengths
[uid
] != old_length
)
1207 something_changed
= 1;
1212 #endif /* CASE_VECTOR_SHORTEN_MODE */
1214 if (! (varying_length
[uid
]))
1216 if (GET_CODE (insn
) == INSN
1217 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1221 body
= PATTERN (insn
);
1222 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1224 rtx inner_insn
= XVECEXP (body
, 0, i
);
1225 int inner_uid
= INSN_UID (inner_insn
);
1227 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1229 insn_current_address
+= insn_lengths
[inner_uid
];
1233 insn_current_address
+= insn_lengths
[uid
];
1238 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1242 body
= PATTERN (insn
);
1244 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1246 rtx inner_insn
= XVECEXP (body
, 0, i
);
1247 int inner_uid
= INSN_UID (inner_insn
);
1250 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1252 /* insn_current_length returns 0 for insns with a
1253 non-varying length. */
1254 if (! varying_length
[inner_uid
])
1255 inner_length
= insn_lengths
[inner_uid
];
1257 inner_length
= insn_current_length (inner_insn
);
1259 if (inner_length
!= insn_lengths
[inner_uid
])
1261 insn_lengths
[inner_uid
] = inner_length
;
1262 something_changed
= 1;
1264 insn_current_address
+= insn_lengths
[inner_uid
];
1265 new_length
+= inner_length
;
1270 new_length
= insn_current_length (insn
);
1271 insn_current_address
+= new_length
;
1274 #ifdef ADJUST_INSN_LENGTH
1275 /* If needed, do any adjustment. */
1276 tmp_length
= new_length
;
1277 ADJUST_INSN_LENGTH (insn
, new_length
);
1278 insn_current_address
+= (new_length
- tmp_length
);
1281 if (new_length
!= insn_lengths
[uid
])
1283 insn_lengths
[uid
] = new_length
;
1284 something_changed
= 1;
1287 /* For a non-optimizing compile, do only a single pass. */
1292 free (varying_length
);
1294 #endif /* HAVE_ATTR_length */
1297 #ifdef HAVE_ATTR_length
1298 /* Given the body of an INSN known to be generated by an ASM statement, return
1299 the number of machine instructions likely to be generated for this insn.
1300 This is used to compute its length. */
1303 asm_insn_count (rtx body
)
1305 const char *template;
1308 if (GET_CODE (body
) == ASM_INPUT
)
1309 template = XSTR (body
, 0);
1311 template = decode_asm_operands (body
, NULL
, NULL
, NULL
, NULL
);
1313 for (; *template; template++)
1314 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1321 /* Output assembler code for the start of a function,
1322 and initialize some of the variables in this file
1323 for the new function. The label for the function and associated
1324 assembler pseudo-ops have already been output in `assemble_start_function'.
1326 FIRST is the first insn of the rtl for the function being compiled.
1327 FILE is the file to write assembler code to.
1328 OPTIMIZE is nonzero if we should eliminate redundant
1329 test and compare insns. */
1332 final_start_function (rtx first ATTRIBUTE_UNUSED
, FILE *file
,
1333 int optimize ATTRIBUTE_UNUSED
)
1337 this_is_asm_operands
= 0;
1339 last_filename
= locator_file (prologue_locator
);
1340 last_linenum
= locator_line (prologue_locator
);
1342 high_block_linenum
= high_function_linenum
= last_linenum
;
1344 (*debug_hooks
->begin_prologue
) (last_linenum
, last_filename
);
1346 #if defined (DWARF2_UNWIND_INFO) || defined (IA64_UNWIND_INFO)
1347 if (write_symbols
!= DWARF2_DEBUG
&& write_symbols
!= VMS_AND_DWARF2_DEBUG
)
1348 dwarf2out_begin_prologue (0, NULL
);
1351 #ifdef LEAF_REG_REMAP
1352 if (current_function_uses_only_leaf_regs
)
1353 leaf_renumber_regs (first
);
1356 /* The Sun386i and perhaps other machines don't work right
1357 if the profiling code comes after the prologue. */
1358 #ifdef PROFILE_BEFORE_PROLOGUE
1359 if (current_function_profile
)
1360 profile_function (file
);
1361 #endif /* PROFILE_BEFORE_PROLOGUE */
1363 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1364 if (dwarf2out_do_frame ())
1365 dwarf2out_frame_debug (NULL_RTX
);
1368 /* If debugging, assign block numbers to all of the blocks in this
1372 remove_unnecessary_notes ();
1373 reemit_insn_block_notes ();
1374 number_blocks (current_function_decl
);
1375 /* We never actually put out begin/end notes for the top-level
1376 block in the function. But, conceptually, that block is
1378 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl
)) = 1;
1381 /* First output the function prologue: code to set up the stack frame. */
1382 (*targetm
.asm_out
.function_prologue
) (file
, get_frame_size ());
1384 /* If the machine represents the prologue as RTL, the profiling code must
1385 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1386 #ifdef HAVE_prologue
1387 if (! HAVE_prologue
)
1389 profile_after_prologue (file
);
1393 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED
)
1395 #ifndef PROFILE_BEFORE_PROLOGUE
1396 if (current_function_profile
)
1397 profile_function (file
);
1398 #endif /* not PROFILE_BEFORE_PROLOGUE */
1402 profile_function (FILE *file ATTRIBUTE_UNUSED
)
1404 #ifndef NO_PROFILE_COUNTERS
1405 # define NO_PROFILE_COUNTERS 0
1407 #if defined(ASM_OUTPUT_REG_PUSH)
1408 int sval
= current_function_returns_struct
;
1409 rtx svrtx
= targetm
.calls
.struct_value_rtx (TREE_TYPE (current_function_decl
), 1);
1410 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1411 int cxt
= current_function_needs_context
;
1413 #endif /* ASM_OUTPUT_REG_PUSH */
1415 if (! NO_PROFILE_COUNTERS
)
1417 int align
= MIN (BIGGEST_ALIGNMENT
, LONG_TYPE_SIZE
);
1419 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
1420 (*targetm
.asm_out
.internal_label
) (file
, "LP", current_function_funcdef_no
);
1421 assemble_integer (const0_rtx
, LONG_TYPE_SIZE
/ BITS_PER_UNIT
, align
, 1);
1424 function_section (current_function_decl
);
1426 #if defined(ASM_OUTPUT_REG_PUSH)
1427 if (sval
&& svrtx
!= NULL_RTX
&& GET_CODE (svrtx
) == REG
)
1428 ASM_OUTPUT_REG_PUSH (file
, REGNO (svrtx
));
1431 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1433 ASM_OUTPUT_REG_PUSH (file
, STATIC_CHAIN_INCOMING_REGNUM
);
1435 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1438 ASM_OUTPUT_REG_PUSH (file
, STATIC_CHAIN_REGNUM
);
1443 FUNCTION_PROFILER (file
, current_function_funcdef_no
);
1445 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1447 ASM_OUTPUT_REG_POP (file
, STATIC_CHAIN_INCOMING_REGNUM
);
1449 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1452 ASM_OUTPUT_REG_POP (file
, STATIC_CHAIN_REGNUM
);
1457 #if defined(ASM_OUTPUT_REG_PUSH)
1458 if (sval
&& svrtx
!= NULL_RTX
&& GET_CODE (svrtx
) == REG
)
1459 ASM_OUTPUT_REG_POP (file
, REGNO (svrtx
));
1463 /* Output assembler code for the end of a function.
1464 For clarity, args are same as those of `final_start_function'
1465 even though not all of them are needed. */
1468 final_end_function (void)
1472 (*debug_hooks
->end_function
) (high_function_linenum
);
1474 /* Finally, output the function epilogue:
1475 code to restore the stack frame and return to the caller. */
1476 (*targetm
.asm_out
.function_epilogue
) (asm_out_file
, get_frame_size ());
1478 /* And debug output. */
1479 (*debug_hooks
->end_epilogue
) (last_linenum
, last_filename
);
1481 #if defined (DWARF2_UNWIND_INFO)
1482 if (write_symbols
!= DWARF2_DEBUG
&& write_symbols
!= VMS_AND_DWARF2_DEBUG
1483 && dwarf2out_do_frame ())
1484 dwarf2out_end_epilogue (last_linenum
, last_filename
);
1488 /* Output assembler code for some insns: all or part of a function.
1489 For description of args, see `final_start_function', above.
1491 PRESCAN is 1 if we are not really outputting,
1492 just scanning as if we were outputting.
1493 Prescanning deletes and rearranges insns just like ordinary output.
1494 PRESCAN is -2 if we are outputting after having prescanned.
1495 In this case, don't try to delete or rearrange insns
1496 because that has already been done.
1497 Prescanning is done only on certain machines. */
1500 final (rtx first
, FILE *file
, int optimize
, int prescan
)
1506 last_ignored_compare
= 0;
1508 /* Make a map indicating which line numbers appear in this function.
1509 When producing SDB debugging info, delete troublesome line number
1510 notes from inlined functions in other files as well as duplicate
1511 line number notes. */
1512 #ifdef SDB_DEBUGGING_INFO
1513 if (write_symbols
== SDB_DEBUG
)
1516 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1517 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
1519 if ((RTX_INTEGRATED_P (insn
)
1520 && strcmp (NOTE_SOURCE_FILE (insn
), main_input_filename
) != 0)
1522 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last
)
1523 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last
)))
1525 delete_insn (insn
); /* Use delete_note. */
1529 if (NOTE_LINE_NUMBER (insn
) > max_line
)
1530 max_line
= NOTE_LINE_NUMBER (insn
);
1536 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1537 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > max_line
)
1538 max_line
= NOTE_LINE_NUMBER (insn
);
1541 line_note_exists
= xcalloc (max_line
+ 1, sizeof (char));
1543 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1545 if (INSN_UID (insn
) > max_uid
) /* Find largest UID. */
1546 max_uid
= INSN_UID (insn
);
1547 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) > 0)
1548 line_note_exists
[NOTE_LINE_NUMBER (insn
)] = 1;
1550 /* If CC tracking across branches is enabled, record the insn which
1551 jumps to each branch only reached from one place. */
1552 if (optimize
&& GET_CODE (insn
) == JUMP_INSN
)
1554 rtx lab
= JUMP_LABEL (insn
);
1555 if (lab
&& LABEL_NUSES (lab
) == 1)
1557 LABEL_REFS (lab
) = insn
;
1567 /* Output the insns. */
1568 for (insn
= NEXT_INSN (first
); insn
;)
1570 #ifdef HAVE_ATTR_length
1571 if ((unsigned) INSN_UID (insn
) >= INSN_ADDRESSES_SIZE ())
1573 /* This can be triggered by bugs elsewhere in the compiler if
1574 new insns are created after init_insn_lengths is called. */
1575 if (GET_CODE (insn
) == NOTE
)
1576 insn_current_address
= -1;
1581 insn_current_address
= INSN_ADDRESSES (INSN_UID (insn
));
1582 #endif /* HAVE_ATTR_length */
1584 insn
= final_scan_insn (insn
, file
, optimize
, prescan
, 0);
1587 free (line_note_exists
);
1588 line_note_exists
= NULL
;
1592 get_insn_template (int code
, rtx insn
)
1594 switch (insn_data
[code
].output_format
)
1596 case INSN_OUTPUT_FORMAT_SINGLE
:
1597 return insn_data
[code
].output
.single
;
1598 case INSN_OUTPUT_FORMAT_MULTI
:
1599 return insn_data
[code
].output
.multi
[which_alternative
];
1600 case INSN_OUTPUT_FORMAT_FUNCTION
:
1603 return (*insn_data
[code
].output
.function
) (recog_data
.operand
, insn
);
1610 /* Emit the appropriate declaration for an alternate-entry-point
1611 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1612 LABEL_KIND != LABEL_NORMAL.
1614 The case fall-through in this function is intentional. */
1616 output_alternate_entry_point (FILE *file
, rtx insn
)
1618 const char *name
= LABEL_NAME (insn
);
1620 switch (LABEL_KIND (insn
))
1622 case LABEL_WEAK_ENTRY
:
1623 #ifdef ASM_WEAKEN_LABEL
1624 ASM_WEAKEN_LABEL (file
, name
);
1626 case LABEL_GLOBAL_ENTRY
:
1627 (*targetm
.asm_out
.globalize_label
) (file
, name
);
1628 case LABEL_STATIC_ENTRY
:
1629 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1630 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
1632 ASM_OUTPUT_LABEL (file
, name
);
1641 /* The final scan for one insn, INSN.
1642 Args are same as in `final', except that INSN
1643 is the insn being scanned.
1644 Value returned is the next insn to be scanned.
1646 NOPEEPHOLES is the flag to disallow peephole processing (currently
1647 used for within delayed branch sequence output). */
1650 final_scan_insn (rtx insn
, FILE *file
, int optimize ATTRIBUTE_UNUSED
,
1651 int prescan
, int nopeepholes ATTRIBUTE_UNUSED
)
1659 /* Ignore deleted insns. These can occur when we split insns (due to a
1660 template of "#") while not optimizing. */
1661 if (INSN_DELETED_P (insn
))
1662 return NEXT_INSN (insn
);
1664 switch (GET_CODE (insn
))
1670 switch (NOTE_LINE_NUMBER (insn
))
1672 case NOTE_INSN_DELETED
:
1673 case NOTE_INSN_LOOP_BEG
:
1674 case NOTE_INSN_LOOP_END
:
1675 case NOTE_INSN_LOOP_END_TOP_COND
:
1676 case NOTE_INSN_LOOP_CONT
:
1677 case NOTE_INSN_LOOP_VTOP
:
1678 case NOTE_INSN_FUNCTION_END
:
1679 case NOTE_INSN_REPEATED_LINE_NUMBER
:
1680 case NOTE_INSN_EXPECTED_VALUE
:
1683 case NOTE_INSN_BASIC_BLOCK
:
1684 #ifdef IA64_UNWIND_INFO
1685 IA64_UNWIND_EMIT (asm_out_file
, insn
);
1688 fprintf (asm_out_file
, "\t%s basic block %d\n",
1689 ASM_COMMENT_START
, NOTE_BASIC_BLOCK (insn
)->index
);
1692 case NOTE_INSN_EH_REGION_BEG
:
1693 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHB",
1694 NOTE_EH_HANDLER (insn
));
1697 case NOTE_INSN_EH_REGION_END
:
1698 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHE",
1699 NOTE_EH_HANDLER (insn
));
1702 case NOTE_INSN_PROLOGUE_END
:
1703 (*targetm
.asm_out
.function_end_prologue
) (file
);
1704 profile_after_prologue (file
);
1707 case NOTE_INSN_EPILOGUE_BEG
:
1708 (*targetm
.asm_out
.function_begin_epilogue
) (file
);
1711 case NOTE_INSN_FUNCTION_BEG
:
1713 (*debug_hooks
->end_prologue
) (last_linenum
, last_filename
);
1716 case NOTE_INSN_BLOCK_BEG
:
1717 if (debug_info_level
== DINFO_LEVEL_NORMAL
1718 || debug_info_level
== DINFO_LEVEL_VERBOSE
1719 || write_symbols
== DWARF_DEBUG
1720 || write_symbols
== DWARF2_DEBUG
1721 || write_symbols
== VMS_AND_DWARF2_DEBUG
1722 || write_symbols
== VMS_DEBUG
)
1724 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
1728 high_block_linenum
= last_linenum
;
1730 /* Output debugging info about the symbol-block beginning. */
1731 (*debug_hooks
->begin_block
) (last_linenum
, n
);
1733 /* Mark this block as output. */
1734 TREE_ASM_WRITTEN (NOTE_BLOCK (insn
)) = 1;
1738 case NOTE_INSN_BLOCK_END
:
1739 if (debug_info_level
== DINFO_LEVEL_NORMAL
1740 || debug_info_level
== DINFO_LEVEL_VERBOSE
1741 || write_symbols
== DWARF_DEBUG
1742 || write_symbols
== DWARF2_DEBUG
1743 || write_symbols
== VMS_AND_DWARF2_DEBUG
1744 || write_symbols
== VMS_DEBUG
)
1746 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
1750 /* End of a symbol-block. */
1752 if (block_depth
< 0)
1755 (*debug_hooks
->end_block
) (high_block_linenum
, n
);
1759 case NOTE_INSN_DELETED_LABEL
:
1760 /* Emit the label. We may have deleted the CODE_LABEL because
1761 the label could be proved to be unreachable, though still
1762 referenced (in the form of having its address taken. */
1763 ASM_OUTPUT_DEBUG_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
));
1770 if (NOTE_LINE_NUMBER (insn
) <= 0)
1777 #if defined (DWARF2_UNWIND_INFO)
1778 if (dwarf2out_do_frame ())
1779 dwarf2out_frame_debug (insn
);
1784 /* The target port might emit labels in the output function for
1785 some insn, e.g. sh.c output_branchy_insn. */
1786 if (CODE_LABEL_NUMBER (insn
) <= max_labelno
)
1788 int align
= LABEL_TO_ALIGNMENT (insn
);
1789 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1790 int max_skip
= LABEL_TO_MAX_SKIP (insn
);
1793 if (align
&& NEXT_INSN (insn
))
1795 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1796 ASM_OUTPUT_MAX_SKIP_ALIGN (file
, align
, max_skip
);
1798 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1799 ASM_OUTPUT_ALIGN_WITH_NOP (file
, align
);
1801 ASM_OUTPUT_ALIGN (file
, align
);
1808 /* If this label is reached from only one place, set the condition
1809 codes from the instruction just before the branch. */
1811 /* Disabled because some insns set cc_status in the C output code
1812 and NOTICE_UPDATE_CC alone can set incorrect status. */
1813 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1815 rtx jump
= LABEL_REFS (insn
);
1816 rtx barrier
= prev_nonnote_insn (insn
);
1818 /* If the LABEL_REFS field of this label has been set to point
1819 at a branch, the predecessor of the branch is a regular
1820 insn, and that branch is the only way to reach this label,
1821 set the condition codes based on the branch and its
1823 if (barrier
&& GET_CODE (barrier
) == BARRIER
1824 && jump
&& GET_CODE (jump
) == JUMP_INSN
1825 && (prev
= prev_nonnote_insn (jump
))
1826 && GET_CODE (prev
) == INSN
)
1828 NOTICE_UPDATE_CC (PATTERN (prev
), prev
);
1829 NOTICE_UPDATE_CC (PATTERN (jump
), jump
);
1836 if (LABEL_NAME (insn
))
1837 (*debug_hooks
->label
) (insn
);
1841 fputs (ASM_APP_OFF
, file
);
1844 if (NEXT_INSN (insn
) != 0
1845 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
)
1847 rtx nextbody
= PATTERN (NEXT_INSN (insn
));
1849 /* If this label is followed by a jump-table,
1850 make sure we put the label in the read-only section. Also
1851 possibly write the label and jump table together. */
1853 if (GET_CODE (nextbody
) == ADDR_VEC
1854 || GET_CODE (nextbody
) == ADDR_DIFF_VEC
)
1856 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1857 /* In this case, the case vector is being moved by the
1858 target, so don't output the label at all. Leave that
1859 to the back end macros. */
1861 if (! JUMP_TABLES_IN_TEXT_SECTION
)
1865 readonly_data_section ();
1867 #ifdef ADDR_VEC_ALIGN
1868 log_align
= ADDR_VEC_ALIGN (NEXT_INSN (insn
));
1870 log_align
= exact_log2 (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
1872 ASM_OUTPUT_ALIGN (file
, log_align
);
1875 function_section (current_function_decl
);
1877 #ifdef ASM_OUTPUT_CASE_LABEL
1878 ASM_OUTPUT_CASE_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
),
1881 (*targetm
.asm_out
.internal_label
) (file
, "L", CODE_LABEL_NUMBER (insn
));
1887 if (LABEL_ALT_ENTRY_P (insn
))
1888 output_alternate_entry_point (file
, insn
);
1890 (*targetm
.asm_out
.internal_label
) (file
, "L", CODE_LABEL_NUMBER (insn
));
1895 rtx body
= PATTERN (insn
);
1896 int insn_code_number
;
1897 const char *template;
1900 /* An INSN, JUMP_INSN or CALL_INSN.
1901 First check for special kinds that recog doesn't recognize. */
1903 if (GET_CODE (body
) == USE
/* These are just declarations. */
1904 || GET_CODE (body
) == CLOBBER
)
1908 /* If there is a REG_CC_SETTER note on this insn, it means that
1909 the setting of the condition code was done in the delay slot
1910 of the insn that branched here. So recover the cc status
1911 from the insn that set it. */
1913 note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
1916 NOTICE_UPDATE_CC (PATTERN (XEXP (note
, 0)), XEXP (note
, 0));
1917 cc_prev_status
= cc_status
;
1921 /* Detect insns that are really jump-tables
1922 and output them as such. */
1924 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
1926 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1935 fputs (ASM_APP_OFF
, file
);
1939 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1940 if (GET_CODE (body
) == ADDR_VEC
)
1942 #ifdef ASM_OUTPUT_ADDR_VEC
1943 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn
), body
);
1950 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
1951 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn
), body
);
1957 vlen
= XVECLEN (body
, GET_CODE (body
) == ADDR_DIFF_VEC
);
1958 for (idx
= 0; idx
< vlen
; idx
++)
1960 if (GET_CODE (body
) == ADDR_VEC
)
1962 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
1963 ASM_OUTPUT_ADDR_VEC_ELT
1964 (file
, CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 0, idx
), 0)));
1971 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
1972 ASM_OUTPUT_ADDR_DIFF_ELT
1975 CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 1, idx
), 0)),
1976 CODE_LABEL_NUMBER (XEXP (XEXP (body
, 0), 0)));
1982 #ifdef ASM_OUTPUT_CASE_END
1983 ASM_OUTPUT_CASE_END (file
,
1984 CODE_LABEL_NUMBER (PREV_INSN (insn
)),
1989 function_section (current_function_decl
);
1993 /* Output this line note if it is the first or the last line
1995 if (notice_source_line (insn
))
1997 (*debug_hooks
->source_line
) (last_linenum
, last_filename
);
2000 if (GET_CODE (body
) == ASM_INPUT
)
2002 const char *string
= XSTR (body
, 0);
2004 /* There's no telling what that did to the condition codes. */
2013 fputs (ASM_APP_ON
, file
);
2016 fprintf (asm_out_file
, "\t%s\n", string
);
2021 /* Detect `asm' construct with operands. */
2022 if (asm_noperands (body
) >= 0)
2024 unsigned int noperands
= asm_noperands (body
);
2025 rtx
*ops
= alloca (noperands
* sizeof (rtx
));
2028 /* There's no telling what that did to the condition codes. */
2033 /* Get out the operand values. */
2034 string
= decode_asm_operands (body
, ops
, NULL
, NULL
, NULL
);
2035 /* Inhibit aborts on what would otherwise be compiler bugs. */
2036 insn_noperands
= noperands
;
2037 this_is_asm_operands
= insn
;
2039 #ifdef FINAL_PRESCAN_INSN
2040 FINAL_PRESCAN_INSN (insn
, ops
, insn_noperands
);
2043 /* Output the insn using them. */
2048 fputs (ASM_APP_ON
, file
);
2051 output_asm_insn (string
, ops
);
2054 this_is_asm_operands
= 0;
2058 if (prescan
<= 0 && app_on
)
2060 fputs (ASM_APP_OFF
, file
);
2064 if (GET_CODE (body
) == SEQUENCE
)
2066 /* A delayed-branch sequence */
2072 final_sequence
= body
;
2074 /* Record the delay slots' frame information before the branch.
2075 This is needed for delayed calls: see execute_cfa_program(). */
2076 #if defined (DWARF2_UNWIND_INFO)
2077 if (dwarf2out_do_frame ())
2078 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
2079 dwarf2out_frame_debug (XVECEXP (body
, 0, i
));
2082 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2083 force the restoration of a comparison that was previously
2084 thought unnecessary. If that happens, cancel this sequence
2085 and cause that insn to be restored. */
2087 next
= final_scan_insn (XVECEXP (body
, 0, 0), file
, 0, prescan
, 1);
2088 if (next
!= XVECEXP (body
, 0, 1))
2094 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
2096 rtx insn
= XVECEXP (body
, 0, i
);
2097 rtx next
= NEXT_INSN (insn
);
2098 /* We loop in case any instruction in a delay slot gets
2101 insn
= final_scan_insn (insn
, file
, 0, prescan
, 1);
2102 while (insn
!= next
);
2104 #ifdef DBR_OUTPUT_SEQEND
2105 DBR_OUTPUT_SEQEND (file
);
2109 /* If the insn requiring the delay slot was a CALL_INSN, the
2110 insns in the delay slot are actually executed before the
2111 called function. Hence we don't preserve any CC-setting
2112 actions in these insns and the CC must be marked as being
2113 clobbered by the function. */
2114 if (GET_CODE (XVECEXP (body
, 0, 0)) == CALL_INSN
)
2121 /* We have a real machine instruction as rtl. */
2123 body
= PATTERN (insn
);
2126 set
= single_set (insn
);
2128 /* Check for redundant test and compare instructions
2129 (when the condition codes are already set up as desired).
2130 This is done only when optimizing; if not optimizing,
2131 it should be possible for the user to alter a variable
2132 with the debugger in between statements
2133 and the next statement should reexamine the variable
2134 to compute the condition codes. */
2139 && GET_CODE (SET_DEST (set
)) == CC0
2140 && insn
!= last_ignored_compare
)
2142 if (GET_CODE (SET_SRC (set
)) == SUBREG
)
2143 SET_SRC (set
) = alter_subreg (&SET_SRC (set
));
2144 else if (GET_CODE (SET_SRC (set
)) == COMPARE
)
2146 if (GET_CODE (XEXP (SET_SRC (set
), 0)) == SUBREG
)
2147 XEXP (SET_SRC (set
), 0)
2148 = alter_subreg (&XEXP (SET_SRC (set
), 0));
2149 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == SUBREG
)
2150 XEXP (SET_SRC (set
), 1)
2151 = alter_subreg (&XEXP (SET_SRC (set
), 1));
2153 if ((cc_status
.value1
!= 0
2154 && rtx_equal_p (SET_SRC (set
), cc_status
.value1
))
2155 || (cc_status
.value2
!= 0
2156 && rtx_equal_p (SET_SRC (set
), cc_status
.value2
)))
2158 /* Don't delete insn if it has an addressing side-effect. */
2159 if (! FIND_REG_INC_NOTE (insn
, NULL_RTX
)
2160 /* or if anything in it is volatile. */
2161 && ! volatile_refs_p (PATTERN (insn
)))
2163 /* We don't really delete the insn; just ignore it. */
2164 last_ignored_compare
= insn
;
2173 /* Don't bother outputting obvious no-ops, even without -O.
2174 This optimization is fast and doesn't interfere with debugging.
2175 Don't do this if the insn is in a delay slot, since this
2176 will cause an improper number of delay insns to be written. */
2177 if (final_sequence
== 0
2179 && GET_CODE (insn
) == INSN
&& GET_CODE (body
) == SET
2180 && GET_CODE (SET_SRC (body
)) == REG
2181 && GET_CODE (SET_DEST (body
)) == REG
2182 && REGNO (SET_SRC (body
)) == REGNO (SET_DEST (body
)))
2187 /* If this is a conditional branch, maybe modify it
2188 if the cc's are in a nonstandard state
2189 so that it accomplishes the same thing that it would
2190 do straightforwardly if the cc's were set up normally. */
2192 if (cc_status
.flags
!= 0
2193 && GET_CODE (insn
) == JUMP_INSN
2194 && GET_CODE (body
) == SET
2195 && SET_DEST (body
) == pc_rtx
2196 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2197 && GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (body
), 0))) == '<'
2198 && XEXP (XEXP (SET_SRC (body
), 0), 0) == cc0_rtx
2199 /* This is done during prescan; it is not done again
2200 in final scan when prescan has been done. */
2203 /* This function may alter the contents of its argument
2204 and clear some of the cc_status.flags bits.
2205 It may also return 1 meaning condition now always true
2206 or -1 meaning condition now always false
2207 or 2 meaning condition nontrivial but altered. */
2208 int result
= alter_cond (XEXP (SET_SRC (body
), 0));
2209 /* If condition now has fixed value, replace the IF_THEN_ELSE
2210 with its then-operand or its else-operand. */
2212 SET_SRC (body
) = XEXP (SET_SRC (body
), 1);
2214 SET_SRC (body
) = XEXP (SET_SRC (body
), 2);
2216 /* The jump is now either unconditional or a no-op.
2217 If it has become a no-op, don't try to output it.
2218 (It would not be recognized.) */
2219 if (SET_SRC (body
) == pc_rtx
)
2224 else if (GET_CODE (SET_SRC (body
)) == RETURN
)
2225 /* Replace (set (pc) (return)) with (return). */
2226 PATTERN (insn
) = body
= SET_SRC (body
);
2228 /* Rerecognize the instruction if it has changed. */
2230 INSN_CODE (insn
) = -1;
2233 /* Make same adjustments to instructions that examine the
2234 condition codes without jumping and instructions that
2235 handle conditional moves (if this machine has either one). */
2237 if (cc_status
.flags
!= 0
2240 rtx cond_rtx
, then_rtx
, else_rtx
;
2242 if (GET_CODE (insn
) != JUMP_INSN
2243 && GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
)
2245 cond_rtx
= XEXP (SET_SRC (set
), 0);
2246 then_rtx
= XEXP (SET_SRC (set
), 1);
2247 else_rtx
= XEXP (SET_SRC (set
), 2);
2251 cond_rtx
= SET_SRC (set
);
2252 then_rtx
= const_true_rtx
;
2253 else_rtx
= const0_rtx
;
2256 switch (GET_CODE (cond_rtx
))
2270 if (XEXP (cond_rtx
, 0) != cc0_rtx
)
2272 result
= alter_cond (cond_rtx
);
2274 validate_change (insn
, &SET_SRC (set
), then_rtx
, 0);
2275 else if (result
== -1)
2276 validate_change (insn
, &SET_SRC (set
), else_rtx
, 0);
2277 else if (result
== 2)
2278 INSN_CODE (insn
) = -1;
2279 if (SET_DEST (set
) == SET_SRC (set
))
2291 #ifdef HAVE_peephole
2292 /* Do machine-specific peephole optimizations if desired. */
2294 if (optimize
&& !flag_no_peephole
&& !nopeepholes
)
2296 rtx next
= peephole (insn
);
2297 /* When peepholing, if there were notes within the peephole,
2298 emit them before the peephole. */
2299 if (next
!= 0 && next
!= NEXT_INSN (insn
))
2301 rtx prev
= PREV_INSN (insn
);
2303 for (note
= NEXT_INSN (insn
); note
!= next
;
2304 note
= NEXT_INSN (note
))
2305 final_scan_insn (note
, file
, optimize
, prescan
, nopeepholes
);
2307 /* In case this is prescan, put the notes
2308 in proper position for later rescan. */
2309 note
= NEXT_INSN (insn
);
2310 PREV_INSN (note
) = prev
;
2311 NEXT_INSN (prev
) = note
;
2312 NEXT_INSN (PREV_INSN (next
)) = insn
;
2313 PREV_INSN (insn
) = PREV_INSN (next
);
2314 NEXT_INSN (insn
) = next
;
2315 PREV_INSN (next
) = insn
;
2318 /* PEEPHOLE might have changed this. */
2319 body
= PATTERN (insn
);
2323 /* Try to recognize the instruction.
2324 If successful, verify that the operands satisfy the
2325 constraints for the instruction. Crash if they don't,
2326 since `reload' should have changed them so that they do. */
2328 insn_code_number
= recog_memoized (insn
);
2329 cleanup_subreg_operands (insn
);
2331 /* Dump the insn in the assembly for debugging. */
2332 if (flag_dump_rtl_in_asm
)
2334 print_rtx_head
= ASM_COMMENT_START
;
2335 print_rtl_single (asm_out_file
, insn
);
2336 print_rtx_head
= "";
2339 if (! constrain_operands_cached (1))
2340 fatal_insn_not_found (insn
);
2342 /* Some target machines need to prescan each insn before
2345 #ifdef FINAL_PRESCAN_INSN
2346 FINAL_PRESCAN_INSN (insn
, recog_data
.operand
, recog_data
.n_operands
);
2349 #ifdef HAVE_conditional_execution
2350 if (GET_CODE (PATTERN (insn
)) == COND_EXEC
)
2351 current_insn_predicate
= COND_EXEC_TEST (PATTERN (insn
));
2353 current_insn_predicate
= NULL_RTX
;
2357 cc_prev_status
= cc_status
;
2359 /* Update `cc_status' for this instruction.
2360 The instruction's output routine may change it further.
2361 If the output routine for a jump insn needs to depend
2362 on the cc status, it should look at cc_prev_status. */
2364 NOTICE_UPDATE_CC (body
, insn
);
2367 current_output_insn
= debug_insn
= insn
;
2369 #if defined (DWARF2_UNWIND_INFO)
2370 if (GET_CODE (insn
) == CALL_INSN
&& dwarf2out_do_frame ())
2371 dwarf2out_frame_debug (insn
);
2374 /* Find the proper template for this insn. */
2375 template = get_insn_template (insn_code_number
, insn
);
2377 /* If the C code returns 0, it means that it is a jump insn
2378 which follows a deleted test insn, and that test insn
2379 needs to be reinserted. */
2384 if (prev_nonnote_insn (insn
) != last_ignored_compare
)
2387 /* We have already processed the notes between the setter and
2388 the user. Make sure we don't process them again, this is
2389 particularly important if one of the notes is a block
2390 scope note or an EH note. */
2392 prev
!= last_ignored_compare
;
2393 prev
= PREV_INSN (prev
))
2395 if (GET_CODE (prev
) == NOTE
)
2396 delete_insn (prev
); /* Use delete_note. */
2402 /* If the template is the string "#", it means that this insn must
2404 if (template[0] == '#' && template[1] == '\0')
2406 rtx
new = try_split (body
, insn
, 0);
2408 /* If we didn't split the insn, go away. */
2409 if (new == insn
&& PATTERN (new) == body
)
2410 fatal_insn ("could not split insn", insn
);
2412 #ifdef HAVE_ATTR_length
2413 /* This instruction should have been split in shorten_branches,
2414 to ensure that we would have valid length info for the
2425 #ifdef IA64_UNWIND_INFO
2426 IA64_UNWIND_EMIT (asm_out_file
, insn
);
2428 /* Output assembler code from the template. */
2430 output_asm_insn (template, recog_data
.operand
);
2432 /* If necessary, report the effect that the instruction has on
2433 the unwind info. We've already done this for delay slots
2434 and call instructions. */
2435 #if defined (DWARF2_UNWIND_INFO)
2436 if (GET_CODE (insn
) == INSN
2437 #if !defined (HAVE_prologue)
2438 && !ACCUMULATE_OUTGOING_ARGS
2440 && final_sequence
== 0
2441 && dwarf2out_do_frame ())
2442 dwarf2out_frame_debug (insn
);
2446 /* It's not at all clear why we did this and doing so used to
2447 interfere with tests that used REG_WAS_0 notes, which are
2448 now gone, so let's try with this out. */
2450 /* Mark this insn as having been output. */
2451 INSN_DELETED_P (insn
) = 1;
2454 /* Emit information for vtable gc. */
2455 note
= find_reg_note (insn
, REG_VTABLE_REF
, NULL_RTX
);
2457 current_output_insn
= debug_insn
= 0;
2460 return NEXT_INSN (insn
);
2463 /* Output debugging info to the assembler file FILE
2464 based on the NOTE-insn INSN, assumed to be a line number. */
2467 notice_source_line (rtx insn
)
2469 const char *filename
= insn_file (insn
);
2470 int linenum
= insn_line (insn
);
2472 if (filename
&& (filename
!= last_filename
|| last_linenum
!= linenum
))
2474 last_filename
= filename
;
2475 last_linenum
= linenum
;
2476 high_block_linenum
= MAX (last_linenum
, high_block_linenum
);
2477 high_function_linenum
= MAX (last_linenum
, high_function_linenum
);
2483 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2484 directly to the desired hard register. */
2487 cleanup_subreg_operands (rtx insn
)
2490 extract_insn_cached (insn
);
2491 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2493 /* The following test cannot use recog_data.operand when testing
2494 for a SUBREG: the underlying object might have been changed
2495 already if we are inside a match_operator expression that
2496 matches the else clause. Instead we test the underlying
2497 expression directly. */
2498 if (GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
2499 recog_data
.operand
[i
] = alter_subreg (recog_data
.operand_loc
[i
]);
2500 else if (GET_CODE (recog_data
.operand
[i
]) == PLUS
2501 || GET_CODE (recog_data
.operand
[i
]) == MULT
2502 || GET_CODE (recog_data
.operand
[i
]) == MEM
)
2503 recog_data
.operand
[i
] = walk_alter_subreg (recog_data
.operand_loc
[i
]);
2506 for (i
= 0; i
< recog_data
.n_dups
; i
++)
2508 if (GET_CODE (*recog_data
.dup_loc
[i
]) == SUBREG
)
2509 *recog_data
.dup_loc
[i
] = alter_subreg (recog_data
.dup_loc
[i
]);
2510 else if (GET_CODE (*recog_data
.dup_loc
[i
]) == PLUS
2511 || GET_CODE (*recog_data
.dup_loc
[i
]) == MULT
2512 || GET_CODE (*recog_data
.dup_loc
[i
]) == MEM
)
2513 *recog_data
.dup_loc
[i
] = walk_alter_subreg (recog_data
.dup_loc
[i
]);
2517 /* If X is a SUBREG, replace it with a REG or a MEM,
2518 based on the thing it is a subreg of. */
2521 alter_subreg (rtx
*xp
)
2524 rtx y
= SUBREG_REG (x
);
2526 /* simplify_subreg does not remove subreg from volatile references.
2527 We are required to. */
2528 if (GET_CODE (y
) == MEM
)
2529 *xp
= adjust_address (y
, GET_MODE (x
), SUBREG_BYTE (x
));
2532 rtx
new = simplify_subreg (GET_MODE (x
), y
, GET_MODE (y
),
2537 /* Simplify_subreg can't handle some REG cases, but we have to. */
2538 else if (GET_CODE (y
) == REG
)
2540 unsigned int regno
= subreg_hard_regno (x
, 1);
2541 *xp
= gen_rtx_REG_offset (y
, GET_MODE (x
), regno
, SUBREG_BYTE (x
));
2550 /* Do alter_subreg on all the SUBREGs contained in X. */
2553 walk_alter_subreg (rtx
*xp
)
2556 switch (GET_CODE (x
))
2560 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0));
2561 XEXP (x
, 1) = walk_alter_subreg (&XEXP (x
, 1));
2565 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0));
2569 return alter_subreg (xp
);
2580 /* Given BODY, the body of a jump instruction, alter the jump condition
2581 as required by the bits that are set in cc_status.flags.
2582 Not all of the bits there can be handled at this level in all cases.
2584 The value is normally 0.
2585 1 means that the condition has become always true.
2586 -1 means that the condition has become always false.
2587 2 means that COND has been altered. */
2590 alter_cond (rtx cond
)
2594 if (cc_status
.flags
& CC_REVERSED
)
2597 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
2600 if (cc_status
.flags
& CC_INVERTED
)
2603 PUT_CODE (cond
, reverse_condition (GET_CODE (cond
)));
2606 if (cc_status
.flags
& CC_NOT_POSITIVE
)
2607 switch (GET_CODE (cond
))
2612 /* Jump becomes unconditional. */
2618 /* Jump becomes no-op. */
2622 PUT_CODE (cond
, EQ
);
2627 PUT_CODE (cond
, NE
);
2635 if (cc_status
.flags
& CC_NOT_NEGATIVE
)
2636 switch (GET_CODE (cond
))
2640 /* Jump becomes unconditional. */
2645 /* Jump becomes no-op. */
2650 PUT_CODE (cond
, EQ
);
2656 PUT_CODE (cond
, NE
);
2664 if (cc_status
.flags
& CC_NO_OVERFLOW
)
2665 switch (GET_CODE (cond
))
2668 /* Jump becomes unconditional. */
2672 PUT_CODE (cond
, EQ
);
2677 PUT_CODE (cond
, NE
);
2682 /* Jump becomes no-op. */
2689 if (cc_status
.flags
& (CC_Z_IN_NOT_N
| CC_Z_IN_N
))
2690 switch (GET_CODE (cond
))
2696 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? GE
: LT
);
2701 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? LT
: GE
);
2706 if (cc_status
.flags
& CC_NOT_SIGNED
)
2707 /* The flags are valid if signed condition operators are converted
2709 switch (GET_CODE (cond
))
2712 PUT_CODE (cond
, LEU
);
2717 PUT_CODE (cond
, LTU
);
2722 PUT_CODE (cond
, GTU
);
2727 PUT_CODE (cond
, GEU
);
2739 /* Report inconsistency between the assembler template and the operands.
2740 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2743 output_operand_lossage (const char *msgid
, ...)
2747 const char *pfx_str
;
2750 va_start (ap
, msgid
);
2752 pfx_str
= this_is_asm_operands
? _("invalid `asm': ") : "output_operand: ";
2753 asprintf (&fmt_string
, "%s%s", pfx_str
, _(msgid
));
2754 vasprintf (&new_message
, fmt_string
, ap
);
2756 if (this_is_asm_operands
)
2757 error_for_asm (this_is_asm_operands
, "%s", new_message
);
2759 internal_error ("%s", new_message
);
2766 /* Output of assembler code from a template, and its subroutines. */
2768 /* Annotate the assembly with a comment describing the pattern and
2769 alternative used. */
2772 output_asm_name (void)
2776 int num
= INSN_CODE (debug_insn
);
2777 fprintf (asm_out_file
, "\t%s %d\t%s",
2778 ASM_COMMENT_START
, INSN_UID (debug_insn
),
2779 insn_data
[num
].name
);
2780 if (insn_data
[num
].n_alternatives
> 1)
2781 fprintf (asm_out_file
, "/%d", which_alternative
+ 1);
2782 #ifdef HAVE_ATTR_length
2783 fprintf (asm_out_file
, "\t[length = %d]",
2784 get_attr_length (debug_insn
));
2786 /* Clear this so only the first assembler insn
2787 of any rtl insn will get the special comment for -dp. */
2792 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2793 or its address, return that expr . Set *PADDRESSP to 1 if the expr
2794 corresponds to the address of the object and 0 if to the object. */
2797 get_mem_expr_from_op (rtx op
, int *paddressp
)
2804 if (GET_CODE (op
) == REG
)
2805 return REG_EXPR (op
);
2806 else if (GET_CODE (op
) != MEM
)
2809 if (MEM_EXPR (op
) != 0)
2810 return MEM_EXPR (op
);
2812 /* Otherwise we have an address, so indicate it and look at the address. */
2816 /* First check if we have a decl for the address, then look at the right side
2817 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
2818 But don't allow the address to itself be indirect. */
2819 if ((expr
= get_mem_expr_from_op (op
, &inner_addressp
)) && ! inner_addressp
)
2821 else if (GET_CODE (op
) == PLUS
2822 && (expr
= get_mem_expr_from_op (XEXP (op
, 1), &inner_addressp
)))
2825 while (GET_RTX_CLASS (GET_CODE (op
)) == '1'
2826 || GET_RTX_CLASS (GET_CODE (op
)) == '2')
2829 expr
= get_mem_expr_from_op (op
, &inner_addressp
);
2830 return inner_addressp
? 0 : expr
;
2833 /* Output operand names for assembler instructions. OPERANDS is the
2834 operand vector, OPORDER is the order to write the operands, and NOPS
2835 is the number of operands to write. */
2838 output_asm_operand_names (rtx
*operands
, int *oporder
, int nops
)
2843 for (i
= 0; i
< nops
; i
++)
2846 rtx op
= operands
[oporder
[i
]];
2847 tree expr
= get_mem_expr_from_op (op
, &addressp
);
2849 fprintf (asm_out_file
, "%c%s",
2850 wrote
? ',' : '\t', wrote
? "" : ASM_COMMENT_START
);
2854 fprintf (asm_out_file
, "%s",
2855 addressp
? "*" : "");
2856 print_mem_expr (asm_out_file
, expr
);
2859 else if (REG_P (op
) && ORIGINAL_REGNO (op
)
2860 && ORIGINAL_REGNO (op
) != REGNO (op
))
2861 fprintf (asm_out_file
, " tmp%i", ORIGINAL_REGNO (op
));
2865 /* Output text from TEMPLATE to the assembler output file,
2866 obeying %-directions to substitute operands taken from
2867 the vector OPERANDS.
2869 %N (for N a digit) means print operand N in usual manner.
2870 %lN means require operand N to be a CODE_LABEL or LABEL_REF
2871 and print the label name with no punctuation.
2872 %cN means require operand N to be a constant
2873 and print the constant expression with no punctuation.
2874 %aN means expect operand N to be a memory address
2875 (not a memory reference!) and print a reference
2877 %nN means expect operand N to be a constant
2878 and print a constant expression for minus the value
2879 of the operand, with no other punctuation. */
2882 output_asm_insn (const char *template, rtx
*operands
)
2886 #ifdef ASSEMBLER_DIALECT
2889 int oporder
[MAX_RECOG_OPERANDS
];
2890 char opoutput
[MAX_RECOG_OPERANDS
];
2893 /* An insn may return a null string template
2894 in a case where no assembler code is needed. */
2898 memset (opoutput
, 0, sizeof opoutput
);
2900 putc ('\t', asm_out_file
);
2902 #ifdef ASM_OUTPUT_OPCODE
2903 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
2910 if (flag_verbose_asm
)
2911 output_asm_operand_names (operands
, oporder
, ops
);
2912 if (flag_print_asm_name
)
2916 memset (opoutput
, 0, sizeof opoutput
);
2918 putc (c
, asm_out_file
);
2919 #ifdef ASM_OUTPUT_OPCODE
2920 while ((c
= *p
) == '\t')
2922 putc (c
, asm_out_file
);
2925 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
2929 #ifdef ASSEMBLER_DIALECT
2935 output_operand_lossage ("nested assembly dialect alternatives");
2939 /* If we want the first dialect, do nothing. Otherwise, skip
2940 DIALECT_NUMBER of strings ending with '|'. */
2941 for (i
= 0; i
< dialect_number
; i
++)
2943 while (*p
&& *p
!= '}' && *p
++ != '|')
2952 output_operand_lossage ("unterminated assembly dialect alternative");
2959 /* Skip to close brace. */
2964 output_operand_lossage ("unterminated assembly dialect alternative");
2968 while (*p
++ != '}');
2972 putc (c
, asm_out_file
);
2977 putc (c
, asm_out_file
);
2983 /* %% outputs a single %. */
2987 putc (c
, asm_out_file
);
2989 /* %= outputs a number which is unique to each insn in the entire
2990 compilation. This is useful for making local labels that are
2991 referred to more than once in a given insn. */
2995 fprintf (asm_out_file
, "%d", insn_counter
);
2997 /* % followed by a letter and some digits
2998 outputs an operand in a special way depending on the letter.
2999 Letters `acln' are implemented directly.
3000 Other letters are passed to `output_operand' so that
3001 the PRINT_OPERAND macro can define them. */
3002 else if (ISALPHA (*p
))
3008 output_operand_lossage ("operand number missing after %%-letter");
3009 else if (this_is_asm_operands
3010 && (c
< 0 || (unsigned int) c
>= insn_noperands
))
3011 output_operand_lossage ("operand number out of range");
3012 else if (letter
== 'l')
3013 output_asm_label (operands
[c
]);
3014 else if (letter
== 'a')
3015 output_address (operands
[c
]);
3016 else if (letter
== 'c')
3018 if (CONSTANT_ADDRESS_P (operands
[c
]))
3019 output_addr_const (asm_out_file
, operands
[c
]);
3021 output_operand (operands
[c
], 'c');
3023 else if (letter
== 'n')
3025 if (GET_CODE (operands
[c
]) == CONST_INT
)
3026 fprintf (asm_out_file
, HOST_WIDE_INT_PRINT_DEC
,
3027 - INTVAL (operands
[c
]));
3030 putc ('-', asm_out_file
);
3031 output_addr_const (asm_out_file
, operands
[c
]);
3035 output_operand (operands
[c
], letter
);
3041 while (ISDIGIT (c
= *p
))
3044 /* % followed by a digit outputs an operand the default way. */
3045 else if (ISDIGIT (*p
))
3048 if (this_is_asm_operands
3049 && (c
< 0 || (unsigned int) c
>= insn_noperands
))
3050 output_operand_lossage ("operand number out of range");
3052 output_operand (operands
[c
], 0);
3058 while (ISDIGIT (c
= *p
))
3061 /* % followed by punctuation: output something for that
3062 punctuation character alone, with no operand.
3063 The PRINT_OPERAND macro decides what is actually done. */
3064 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3065 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p
))
3066 output_operand (NULL_RTX
, *p
++);
3069 output_operand_lossage ("invalid %%-code");
3073 putc (c
, asm_out_file
);
3076 /* Write out the variable names for operands, if we know them. */
3077 if (flag_verbose_asm
)
3078 output_asm_operand_names (operands
, oporder
, ops
);
3079 if (flag_print_asm_name
)
3082 putc ('\n', asm_out_file
);
3085 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3088 output_asm_label (rtx x
)
3092 if (GET_CODE (x
) == LABEL_REF
)
3094 if (GET_CODE (x
) == CODE_LABEL
3095 || (GET_CODE (x
) == NOTE
3096 && NOTE_LINE_NUMBER (x
) == NOTE_INSN_DELETED_LABEL
))
3097 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3099 output_operand_lossage ("`%%l' operand isn't a label");
3101 assemble_name (asm_out_file
, buf
);
3104 /* Print operand X using machine-dependent assembler syntax.
3105 The macro PRINT_OPERAND is defined just to control this function.
3106 CODE is a non-digit that preceded the operand-number in the % spec,
3107 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3108 between the % and the digits.
3109 When CODE is a non-letter, X is 0.
3111 The meanings of the letters are machine-dependent and controlled
3112 by PRINT_OPERAND. */
3115 output_operand (rtx x
, int code ATTRIBUTE_UNUSED
)
3117 if (x
&& GET_CODE (x
) == SUBREG
)
3118 x
= alter_subreg (&x
);
3120 /* If X is a pseudo-register, abort now rather than writing trash to the
3123 if (x
&& GET_CODE (x
) == REG
&& REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
3126 PRINT_OPERAND (asm_out_file
, x
, code
);
3129 /* Print a memory reference operand for address X
3130 using machine-dependent assembler syntax.
3131 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3134 output_address (rtx x
)
3136 walk_alter_subreg (&x
);
3137 PRINT_OPERAND_ADDRESS (asm_out_file
, x
);
3140 /* Print an integer constant expression in assembler syntax.
3141 Addition and subtraction are the only arithmetic
3142 that may appear in these expressions. */
3145 output_addr_const (FILE *file
, rtx x
)
3150 switch (GET_CODE (x
))
3157 #ifdef ASM_OUTPUT_SYMBOL_REF
3158 ASM_OUTPUT_SYMBOL_REF (file
, x
);
3160 assemble_name (file
, XSTR (x
, 0));
3168 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3169 #ifdef ASM_OUTPUT_LABEL_REF
3170 ASM_OUTPUT_LABEL_REF (file
, buf
);
3172 assemble_name (file
, buf
);
3177 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3181 /* This used to output parentheses around the expression,
3182 but that does not work on the 386 (either ATT or BSD assembler). */
3183 output_addr_const (file
, XEXP (x
, 0));
3187 if (GET_MODE (x
) == VOIDmode
)
3189 /* We can use %d if the number is one word and positive. */
3190 if (CONST_DOUBLE_HIGH (x
))
3191 fprintf (file
, HOST_WIDE_INT_PRINT_DOUBLE_HEX
,
3192 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
3193 else if (CONST_DOUBLE_LOW (x
) < 0)
3194 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
3196 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3199 /* We can't handle floating point constants;
3200 PRINT_OPERAND must handle them. */
3201 output_operand_lossage ("floating constant misused");
3205 /* Some assemblers need integer constants to appear last (eg masm). */
3206 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3208 output_addr_const (file
, XEXP (x
, 1));
3209 if (INTVAL (XEXP (x
, 0)) >= 0)
3210 fprintf (file
, "+");
3211 output_addr_const (file
, XEXP (x
, 0));
3215 output_addr_const (file
, XEXP (x
, 0));
3216 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
3217 || INTVAL (XEXP (x
, 1)) >= 0)
3218 fprintf (file
, "+");
3219 output_addr_const (file
, XEXP (x
, 1));
3224 /* Avoid outputting things like x-x or x+5-x,
3225 since some assemblers can't handle that. */
3226 x
= simplify_subtraction (x
);
3227 if (GET_CODE (x
) != MINUS
)
3230 output_addr_const (file
, XEXP (x
, 0));
3231 fprintf (file
, "-");
3232 if ((GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) >= 0)
3233 || GET_CODE (XEXP (x
, 1)) == PC
3234 || GET_CODE (XEXP (x
, 1)) == SYMBOL_REF
)
3235 output_addr_const (file
, XEXP (x
, 1));
3238 fputs (targetm
.asm_out
.open_paren
, file
);
3239 output_addr_const (file
, XEXP (x
, 1));
3240 fputs (targetm
.asm_out
.close_paren
, file
);
3247 output_addr_const (file
, XEXP (x
, 0));
3251 #ifdef OUTPUT_ADDR_CONST_EXTRA
3252 OUTPUT_ADDR_CONST_EXTRA (file
, x
, fail
);
3257 output_operand_lossage ("invalid expression as operand");
3261 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3262 %R prints the value of REGISTER_PREFIX.
3263 %L prints the value of LOCAL_LABEL_PREFIX.
3264 %U prints the value of USER_LABEL_PREFIX.
3265 %I prints the value of IMMEDIATE_PREFIX.
3266 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3267 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3269 We handle alternate assembler dialects here, just like output_asm_insn. */
3272 asm_fprintf (FILE *file
, const char *p
, ...)
3278 va_start (argptr
, p
);
3285 #ifdef ASSEMBLER_DIALECT
3290 /* If we want the first dialect, do nothing. Otherwise, skip
3291 DIALECT_NUMBER of strings ending with '|'. */
3292 for (i
= 0; i
< dialect_number
; i
++)
3294 while (*p
&& *p
++ != '|')
3304 /* Skip to close brace. */
3305 while (*p
&& *p
++ != '}')
3316 while (strchr ("-+ #0", c
))
3321 while (ISDIGIT (c
) || c
== '.')
3332 case 'd': case 'i': case 'u':
3333 case 'x': case 'X': case 'o':
3337 fprintf (file
, buf
, va_arg (argptr
, int));
3341 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3342 'o' cases, but we do not check for those cases. It
3343 means that the value is a HOST_WIDE_INT, which may be
3344 either `long' or `long long'. */
3345 memcpy (q
, HOST_WIDE_INT_PRINT
, strlen (HOST_WIDE_INT_PRINT
));
3346 q
+= strlen (HOST_WIDE_INT_PRINT
);
3349 fprintf (file
, buf
, va_arg (argptr
, HOST_WIDE_INT
));
3354 #ifdef HAVE_LONG_LONG
3360 fprintf (file
, buf
, va_arg (argptr
, long long));
3367 fprintf (file
, buf
, va_arg (argptr
, long));
3375 fprintf (file
, buf
, va_arg (argptr
, char *));
3379 #ifdef ASM_OUTPUT_OPCODE
3380 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3385 #ifdef REGISTER_PREFIX
3386 fprintf (file
, "%s", REGISTER_PREFIX
);
3391 #ifdef IMMEDIATE_PREFIX
3392 fprintf (file
, "%s", IMMEDIATE_PREFIX
);
3397 #ifdef LOCAL_LABEL_PREFIX
3398 fprintf (file
, "%s", LOCAL_LABEL_PREFIX
);
3403 fputs (user_label_prefix
, file
);
3406 #ifdef ASM_FPRINTF_EXTENSIONS
3407 /* Uppercase letters are reserved for general use by asm_fprintf
3408 and so are not available to target specific code. In order to
3409 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3410 they are defined here. As they get turned into real extensions
3411 to asm_fprintf they should be removed from this list. */
3412 case 'A': case 'B': case 'C': case 'D': case 'E':
3413 case 'F': case 'G': case 'H': case 'J': case 'K':
3414 case 'M': case 'N': case 'P': case 'Q': case 'S':
3415 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3418 ASM_FPRINTF_EXTENSIONS (file
, argptr
, p
)
3431 /* Split up a CONST_DOUBLE or integer constant rtx
3432 into two rtx's for single words,
3433 storing in *FIRST the word that comes first in memory in the target
3434 and in *SECOND the other. */
3437 split_double (rtx value
, rtx
*first
, rtx
*second
)
3439 if (GET_CODE (value
) == CONST_INT
)
3441 if (HOST_BITS_PER_WIDE_INT
>= (2 * BITS_PER_WORD
))
3443 /* In this case the CONST_INT holds both target words.
3444 Extract the bits from it into two word-sized pieces.
3445 Sign extend each half to HOST_WIDE_INT. */
3446 unsigned HOST_WIDE_INT low
, high
;
3447 unsigned HOST_WIDE_INT mask
, sign_bit
, sign_extend
;
3449 /* Set sign_bit to the most significant bit of a word. */
3451 sign_bit
<<= BITS_PER_WORD
- 1;
3453 /* Set mask so that all bits of the word are set. We could
3454 have used 1 << BITS_PER_WORD instead of basing the
3455 calculation on sign_bit. However, on machines where
3456 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3457 compiler warning, even though the code would never be
3459 mask
= sign_bit
<< 1;
3462 /* Set sign_extend as any remaining bits. */
3463 sign_extend
= ~mask
;
3465 /* Pick the lower word and sign-extend it. */
3466 low
= INTVAL (value
);
3471 /* Pick the higher word, shifted to the least significant
3472 bits, and sign-extend it. */
3473 high
= INTVAL (value
);
3474 high
>>= BITS_PER_WORD
- 1;
3477 if (high
& sign_bit
)
3478 high
|= sign_extend
;
3480 /* Store the words in the target machine order. */
3481 if (WORDS_BIG_ENDIAN
)
3483 *first
= GEN_INT (high
);
3484 *second
= GEN_INT (low
);
3488 *first
= GEN_INT (low
);
3489 *second
= GEN_INT (high
);
3494 /* The rule for using CONST_INT for a wider mode
3495 is that we regard the value as signed.
3496 So sign-extend it. */
3497 rtx high
= (INTVAL (value
) < 0 ? constm1_rtx
: const0_rtx
);
3498 if (WORDS_BIG_ENDIAN
)
3510 else if (GET_CODE (value
) != CONST_DOUBLE
)
3512 if (WORDS_BIG_ENDIAN
)
3514 *first
= const0_rtx
;
3520 *second
= const0_rtx
;
3523 else if (GET_MODE (value
) == VOIDmode
3524 /* This is the old way we did CONST_DOUBLE integers. */
3525 || GET_MODE_CLASS (GET_MODE (value
)) == MODE_INT
)
3527 /* In an integer, the words are defined as most and least significant.
3528 So order them by the target's convention. */
3529 if (WORDS_BIG_ENDIAN
)
3531 *first
= GEN_INT (CONST_DOUBLE_HIGH (value
));
3532 *second
= GEN_INT (CONST_DOUBLE_LOW (value
));
3536 *first
= GEN_INT (CONST_DOUBLE_LOW (value
));
3537 *second
= GEN_INT (CONST_DOUBLE_HIGH (value
));
3544 REAL_VALUE_FROM_CONST_DOUBLE (r
, value
);
3546 /* Note, this converts the REAL_VALUE_TYPE to the target's
3547 format, splits up the floating point double and outputs
3548 exactly 32 bits of it into each of l[0] and l[1] --
3549 not necessarily BITS_PER_WORD bits. */
3550 REAL_VALUE_TO_TARGET_DOUBLE (r
, l
);
3552 /* If 32 bits is an entire word for the target, but not for the host,
3553 then sign-extend on the host so that the number will look the same
3554 way on the host that it would on the target. See for instance
3555 simplify_unary_operation. The #if is needed to avoid compiler
3558 #if HOST_BITS_PER_LONG > 32
3559 if (BITS_PER_WORD
< HOST_BITS_PER_LONG
&& BITS_PER_WORD
== 32)
3561 if (l
[0] & ((long) 1 << 31))
3562 l
[0] |= ((long) (-1) << 32);
3563 if (l
[1] & ((long) 1 << 31))
3564 l
[1] |= ((long) (-1) << 32);
3568 *first
= GEN_INT ((HOST_WIDE_INT
) l
[0]);
3569 *second
= GEN_INT ((HOST_WIDE_INT
) l
[1]);
3573 /* Return nonzero if this function has no function calls. */
3576 leaf_function_p (void)
3581 if (current_function_profile
|| profile_arc_flag
)
3584 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3586 if (GET_CODE (insn
) == CALL_INSN
3587 && ! SIBLING_CALL_P (insn
))
3589 if (GET_CODE (insn
) == INSN
3590 && GET_CODE (PATTERN (insn
)) == SEQUENCE
3591 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == CALL_INSN
3592 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
3595 for (link
= current_function_epilogue_delay_list
;
3597 link
= XEXP (link
, 1))
3599 insn
= XEXP (link
, 0);
3601 if (GET_CODE (insn
) == CALL_INSN
3602 && ! SIBLING_CALL_P (insn
))
3604 if (GET_CODE (insn
) == INSN
3605 && GET_CODE (PATTERN (insn
)) == SEQUENCE
3606 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == CALL_INSN
3607 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
3614 /* Return 1 if branch is a forward branch.
3615 Uses insn_shuid array, so it works only in the final pass. May be used by
3616 output templates to customary add branch prediction hints.
3619 final_forward_branch_p (rtx insn
)
3621 int insn_id
, label_id
;
3624 insn_id
= INSN_SHUID (insn
);
3625 label_id
= INSN_SHUID (JUMP_LABEL (insn
));
3626 /* We've hit some insns that does not have id information available. */
3627 if (!insn_id
|| !label_id
)
3629 return insn_id
< label_id
;
3632 /* On some machines, a function with no call insns
3633 can run faster if it doesn't create its own register window.
3634 When output, the leaf function should use only the "output"
3635 registers. Ordinarily, the function would be compiled to use
3636 the "input" registers to find its arguments; it is a candidate
3637 for leaf treatment if it uses only the "input" registers.
3638 Leaf function treatment means renumbering so the function
3639 uses the "output" registers instead. */
3641 #ifdef LEAF_REGISTERS
3643 /* Return 1 if this function uses only the registers that can be
3644 safely renumbered. */
3647 only_leaf_regs_used (void)
3650 const char *const permitted_reg_in_leaf_functions
= LEAF_REGISTERS
;
3652 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3653 if ((regs_ever_live
[i
] || global_regs
[i
])
3654 && ! permitted_reg_in_leaf_functions
[i
])
3657 if (current_function_uses_pic_offset_table
3658 && pic_offset_table_rtx
!= 0
3659 && GET_CODE (pic_offset_table_rtx
) == REG
3660 && ! permitted_reg_in_leaf_functions
[REGNO (pic_offset_table_rtx
)])
3666 /* Scan all instructions and renumber all registers into those
3667 available in leaf functions. */
3670 leaf_renumber_regs (rtx first
)
3674 /* Renumber only the actual patterns.
3675 The reg-notes can contain frame pointer refs,
3676 and renumbering them could crash, and should not be needed. */
3677 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3679 leaf_renumber_regs_insn (PATTERN (insn
));
3680 for (insn
= current_function_epilogue_delay_list
;
3682 insn
= XEXP (insn
, 1))
3683 if (INSN_P (XEXP (insn
, 0)))
3684 leaf_renumber_regs_insn (PATTERN (XEXP (insn
, 0)));
3687 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3688 available in leaf functions. */
3691 leaf_renumber_regs_insn (rtx in_rtx
)
3694 const char *format_ptr
;
3699 /* Renumber all input-registers into output-registers.
3700 renumbered_regs would be 1 for an output-register;
3703 if (GET_CODE (in_rtx
) == REG
)
3707 /* Don't renumber the same reg twice. */
3711 newreg
= REGNO (in_rtx
);
3712 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3713 to reach here as part of a REG_NOTE. */
3714 if (newreg
>= FIRST_PSEUDO_REGISTER
)
3719 newreg
= LEAF_REG_REMAP (newreg
);
3722 regs_ever_live
[REGNO (in_rtx
)] = 0;
3723 regs_ever_live
[newreg
] = 1;
3724 REGNO (in_rtx
) = newreg
;
3728 if (INSN_P (in_rtx
))
3730 /* Inside a SEQUENCE, we find insns.
3731 Renumber just the patterns of these insns,
3732 just as we do for the top-level insns. */
3733 leaf_renumber_regs_insn (PATTERN (in_rtx
));
3737 format_ptr
= GET_RTX_FORMAT (GET_CODE (in_rtx
));
3739 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (in_rtx
)); i
++)
3740 switch (*format_ptr
++)
3743 leaf_renumber_regs_insn (XEXP (in_rtx
, i
));
3747 if (NULL
!= XVEC (in_rtx
, i
))
3749 for (j
= 0; j
< XVECLEN (in_rtx
, i
); j
++)
3750 leaf_renumber_regs_insn (XVECEXP (in_rtx
, i
, j
));
3770 /* When -gused is used, emit debug info for only used symbols. But in
3771 addition to the standard intercepted debug_hooks there are some direct
3772 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3773 Those routines may also be called from a higher level intercepted routine. So
3774 to prevent recording data for an inner call to one of these for an intercept,
3775 we maintain an intercept nesting counter (debug_nesting). We only save the
3776 intercepted arguments if the nesting is 1. */
3777 int debug_nesting
= 0;
3779 static tree
*symbol_queue
;
3780 int symbol_queue_index
= 0;
3781 static int symbol_queue_size
= 0;
3783 /* Generate the symbols for any queued up type symbols we encountered
3784 while generating the type info for some originally used symbol.
3785 This might generate additional entries in the queue. Only when
3786 the nesting depth goes to 0 is this routine called. */
3789 debug_flush_symbol_queue (void)
3793 /* Make sure that additionally queued items are not flushed
3798 for (i
= 0; i
< symbol_queue_index
; ++i
)
3800 /* If we pushed queued symbols then such symbols are must be
3801 output no matter what anyone else says. Specifically,
3802 we need to make sure dbxout_symbol() thinks the symbol was
3803 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3804 which may be set for outside reasons. */
3805 int saved_tree_used
= TREE_USED (symbol_queue
[i
]);
3806 int saved_suppress_debug
= TYPE_DECL_SUPPRESS_DEBUG (symbol_queue
[i
]);
3807 TREE_USED (symbol_queue
[i
]) = 1;
3808 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue
[i
]) = 0;
3810 #ifdef DBX_DEBUGGING_INFO
3811 dbxout_symbol (symbol_queue
[i
], 0);
3814 TREE_USED (symbol_queue
[i
]) = saved_tree_used
;
3815 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue
[i
]) = saved_suppress_debug
;
3818 symbol_queue_index
= 0;
3822 /* Queue a type symbol needed as part of the definition of a decl
3823 symbol. These symbols are generated when debug_flush_symbol_queue()
3827 debug_queue_symbol (tree decl
)
3829 if (symbol_queue_index
>= symbol_queue_size
)
3831 symbol_queue_size
+= 10;
3832 symbol_queue
= xrealloc (symbol_queue
,
3833 symbol_queue_size
* sizeof (tree
));
3836 symbol_queue
[symbol_queue_index
++] = decl
;
3839 /* Free symbol queue. */
3841 debug_free_queue (void)
3845 free (symbol_queue
);
3846 symbol_queue
= NULL
;
3847 symbol_queue_size
= 0;