1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This is the final pass of the compiler.
24 It looks at the rtl code for a function and outputs assembler code.
26 Call `final_start_function' to output the assembler code for function entry,
27 `final' to output assembler code for some RTL code,
28 `final_end_function' to output assembler code for function exit.
29 If a function is compiled in several pieces, each piece is
30 output separately with `final'.
32 Some optimizations are also done at this level.
33 Move instructions that were made unnecessary by good register allocation
34 are detected and omitted from the output. (Though most of these
35 are removed by the last jump pass.)
37 Instructions to set the condition codes are omitted when it can be
38 seen that the condition codes already had the desired values.
40 In some cases it is sufficient if the inherited condition codes
41 have related values, but this may require the following insn
42 (the one that tests the condition codes) to be modified.
44 The code for the function prologue and epilogue are generated
45 directly in assembler by the target functions function_prologue and
46 function_epilogue. Those instructions never exist as rtl. */
50 #include "coretypes.h"
57 #include "insn-config.h"
58 #include "insn-attr.h"
60 #include "conditions.h"
62 #include "hard-reg-set.h"
66 #include "rtl-error.h"
67 #include "toplev.h" /* exact_log2, floor_log2 */
70 #include "basic-block.h"
72 #include "targhooks.h"
75 #include "tree-pass.h"
76 #include "tree-flow.h"
83 #include "tree-pretty-print.h" /* for dump_function_header */
85 #ifdef XCOFF_DEBUGGING_INFO
86 #include "xcoffout.h" /* Needed for external data
87 declarations for e.g. AIX 4.x. */
90 #include "dwarf2out.h"
92 #ifdef DBX_DEBUGGING_INFO
96 #ifdef SDB_DEBUGGING_INFO
100 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
101 So define a null default for it to save conditionalization later. */
102 #ifndef CC_STATUS_INIT
103 #define CC_STATUS_INIT
106 /* Is the given character a logical line separator for the assembler? */
107 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
108 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
111 #ifndef JUMP_TABLES_IN_TEXT_SECTION
112 #define JUMP_TABLES_IN_TEXT_SECTION 0
115 /* Bitflags used by final_scan_insn. */
118 #define SEEN_EMITTED 4
120 /* Last insn processed by final_scan_insn. */
121 static rtx debug_insn
;
122 rtx current_output_insn
;
124 /* Line number of last NOTE. */
125 static int last_linenum
;
127 /* Last discriminator written to assembly. */
128 static int last_discriminator
;
130 /* Discriminator of current block. */
131 static int discriminator
;
133 /* Highest line number in current block. */
134 static int high_block_linenum
;
136 /* Likewise for function. */
137 static int high_function_linenum
;
139 /* Filename of last NOTE. */
140 static const char *last_filename
;
142 /* Override filename and line number. */
143 static const char *override_filename
;
144 static int override_linenum
;
146 /* Whether to force emission of a line note before the next insn. */
147 static bool force_source_line
= false;
149 extern const int length_unit_log
; /* This is defined in insn-attrtab.c. */
151 /* Nonzero while outputting an `asm' with operands.
152 This means that inconsistencies are the user's fault, so don't die.
153 The precise value is the insn being output, to pass to error_for_asm. */
154 rtx this_is_asm_operands
;
156 /* Number of operands of this insn, for an `asm' with operands. */
157 static unsigned int insn_noperands
;
159 /* Compare optimization flag. */
161 static rtx last_ignored_compare
= 0;
163 /* Assign a unique number to each insn that is output.
164 This can be used to generate unique local labels. */
166 static int insn_counter
= 0;
169 /* This variable contains machine-dependent flags (defined in tm.h)
170 set and examined by output routines
171 that describe how to interpret the condition codes properly. */
175 /* During output of an insn, this contains a copy of cc_status
176 from before the insn. */
178 CC_STATUS cc_prev_status
;
181 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
183 static int block_depth
;
185 /* Nonzero if have enabled APP processing of our assembler output. */
189 /* If we are outputting an insn sequence, this contains the sequence rtx.
194 #ifdef ASSEMBLER_DIALECT
196 /* Number of the assembler dialect to use, starting at 0. */
197 static int dialect_number
;
200 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
201 rtx current_insn_predicate
;
203 /* True if printing into -fdump-final-insns= dump. */
204 bool final_insns_dump_p
;
206 static int asm_insn_count (rtx
);
207 static void profile_function (FILE *);
208 static void profile_after_prologue (FILE *);
209 static bool notice_source_line (rtx
, bool *);
210 static rtx
walk_alter_subreg (rtx
*, bool *);
211 static void output_asm_name (void);
212 static void output_alternate_entry_point (FILE *, rtx
);
213 static tree
get_mem_expr_from_op (rtx
, int *);
214 static void output_asm_operand_names (rtx
*, int *, int);
215 #ifdef LEAF_REGISTERS
216 static void leaf_renumber_regs (rtx
);
219 static int alter_cond (rtx
);
221 #ifndef ADDR_VEC_ALIGN
222 static int final_addr_vec_align (rtx
);
224 static int align_fuzz (rtx
, rtx
, int, unsigned);
226 /* Initialize data in final at the beginning of a compilation. */
229 init_final (const char *filename ATTRIBUTE_UNUSED
)
234 #ifdef ASSEMBLER_DIALECT
235 dialect_number
= ASSEMBLER_DIALECT
;
239 /* Default target function prologue and epilogue assembler output.
241 If not overridden for epilogue code, then the function body itself
242 contains return instructions wherever needed. */
244 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED
,
245 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
250 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED
,
251 tree decl ATTRIBUTE_UNUSED
,
252 bool new_is_cold ATTRIBUTE_UNUSED
)
256 /* Default target hook that outputs nothing to a stream. */
258 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED
)
262 /* Enable APP processing of subsequent output.
263 Used before the output from an `asm' statement. */
270 fputs (ASM_APP_ON
, asm_out_file
);
275 /* Disable APP processing of subsequent output.
276 Called from varasm.c before most kinds of output. */
283 fputs (ASM_APP_OFF
, asm_out_file
);
288 /* Return the number of slots filled in the current
289 delayed branch sequence (we don't count the insn needing the
290 delay slot). Zero if not in a delayed branch sequence. */
294 dbr_sequence_length (void)
296 if (final_sequence
!= 0)
297 return XVECLEN (final_sequence
, 0) - 1;
303 /* The next two pages contain routines used to compute the length of an insn
304 and to shorten branches. */
306 /* Arrays for insn lengths, and addresses. The latter is referenced by
307 `insn_current_length'. */
309 static int *insn_lengths
;
311 vec
<int> insn_addresses_
;
313 /* Max uid for which the above arrays are valid. */
314 static int insn_lengths_max_uid
;
316 /* Address of insn being processed. Used by `insn_current_length'. */
317 int insn_current_address
;
319 /* Address of insn being processed in previous iteration. */
320 int insn_last_address
;
322 /* known invariant alignment of insn being processed. */
323 int insn_current_align
;
325 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
326 gives the next following alignment insn that increases the known
327 alignment, or NULL_RTX if there is no such insn.
328 For any alignment obtained this way, we can again index uid_align with
329 its uid to obtain the next following align that in turn increases the
330 alignment, till we reach NULL_RTX; the sequence obtained this way
331 for each insn we'll call the alignment chain of this insn in the following
334 struct label_alignment
340 static rtx
*uid_align
;
341 static int *uid_shuid
;
342 static struct label_alignment
*label_align
;
344 /* Indicate that branch shortening hasn't yet been done. */
347 init_insn_lengths (void)
358 insn_lengths_max_uid
= 0;
360 if (HAVE_ATTR_length
)
361 INSN_ADDRESSES_FREE ();
369 /* Obtain the current length of an insn. If branch shortening has been done,
370 get its actual length. Otherwise, use FALLBACK_FN to calculate the
373 get_attr_length_1 (rtx insn
, int (*fallback_fn
) (rtx
))
379 if (!HAVE_ATTR_length
)
382 if (insn_lengths_max_uid
> INSN_UID (insn
))
383 return insn_lengths
[INSN_UID (insn
)];
385 switch (GET_CODE (insn
))
394 length
= fallback_fn (insn
);
398 body
= PATTERN (insn
);
399 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
401 /* Alignment is machine-dependent and should be handled by
405 length
= fallback_fn (insn
);
409 body
= PATTERN (insn
);
410 if (GET_CODE (body
) == USE
|| GET_CODE (body
) == CLOBBER
)
413 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
414 length
= asm_insn_count (body
) * fallback_fn (insn
);
415 else if (GET_CODE (body
) == SEQUENCE
)
416 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
417 length
+= get_attr_length_1 (XVECEXP (body
, 0, i
), fallback_fn
);
419 length
= fallback_fn (insn
);
426 #ifdef ADJUST_INSN_LENGTH
427 ADJUST_INSN_LENGTH (insn
, length
);
432 /* Obtain the current length of an insn. If branch shortening has been done,
433 get its actual length. Otherwise, get its maximum length. */
435 get_attr_length (rtx insn
)
437 return get_attr_length_1 (insn
, insn_default_length
);
440 /* Obtain the current length of an insn. If branch shortening has been done,
441 get its actual length. Otherwise, get its minimum length. */
443 get_attr_min_length (rtx insn
)
445 return get_attr_length_1 (insn
, insn_min_length
);
448 /* Code to handle alignment inside shorten_branches. */
450 /* Here is an explanation how the algorithm in align_fuzz can give
453 Call a sequence of instructions beginning with alignment point X
454 and continuing until the next alignment point `block X'. When `X'
455 is used in an expression, it means the alignment value of the
458 Call the distance between the start of the first insn of block X, and
459 the end of the last insn of block X `IX', for the `inner size of X'.
460 This is clearly the sum of the instruction lengths.
462 Likewise with the next alignment-delimited block following X, which we
465 Call the distance between the start of the first insn of block X, and
466 the start of the first insn of block Y `OX', for the `outer size of X'.
468 The estimated padding is then OX - IX.
470 OX can be safely estimated as
475 OX = round_up(IX, X) + Y - X
477 Clearly est(IX) >= real(IX), because that only depends on the
478 instruction lengths, and those being overestimated is a given.
480 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
481 we needn't worry about that when thinking about OX.
483 When X >= Y, the alignment provided by Y adds no uncertainty factor
484 for branch ranges starting before X, so we can just round what we have.
485 But when X < Y, we don't know anything about the, so to speak,
486 `middle bits', so we have to assume the worst when aligning up from an
487 address mod X to one mod Y, which is Y - X. */
490 #define LABEL_ALIGN(LABEL) align_labels_log
494 #define LOOP_ALIGN(LABEL) align_loops_log
497 #ifndef LABEL_ALIGN_AFTER_BARRIER
498 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
502 #define JUMP_ALIGN(LABEL) align_jumps_log
506 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED
)
512 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED
)
514 return align_loops_max_skip
;
518 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED
)
520 return align_labels_max_skip
;
524 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED
)
526 return align_jumps_max_skip
;
529 #ifndef ADDR_VEC_ALIGN
531 final_addr_vec_align (rtx addr_vec
)
533 int align
= GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec
)));
535 if (align
> BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
536 align
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
537 return exact_log2 (align
);
541 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
544 #ifndef INSN_LENGTH_ALIGNMENT
545 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
548 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
550 static int min_labelno
, max_labelno
;
552 #define LABEL_TO_ALIGNMENT(LABEL) \
553 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
555 #define LABEL_TO_MAX_SKIP(LABEL) \
556 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
558 /* For the benefit of port specific code do this also as a function. */
561 label_to_alignment (rtx label
)
563 if (CODE_LABEL_NUMBER (label
) <= max_labelno
)
564 return LABEL_TO_ALIGNMENT (label
);
569 label_to_max_skip (rtx label
)
571 if (CODE_LABEL_NUMBER (label
) <= max_labelno
)
572 return LABEL_TO_MAX_SKIP (label
);
576 /* The differences in addresses
577 between a branch and its target might grow or shrink depending on
578 the alignment the start insn of the range (the branch for a forward
579 branch or the label for a backward branch) starts out on; if these
580 differences are used naively, they can even oscillate infinitely.
581 We therefore want to compute a 'worst case' address difference that
582 is independent of the alignment the start insn of the range end
583 up on, and that is at least as large as the actual difference.
584 The function align_fuzz calculates the amount we have to add to the
585 naively computed difference, by traversing the part of the alignment
586 chain of the start insn of the range that is in front of the end insn
587 of the range, and considering for each alignment the maximum amount
588 that it might contribute to a size increase.
590 For casesi tables, we also want to know worst case minimum amounts of
591 address difference, in case a machine description wants to introduce
592 some common offset that is added to all offsets in a table.
593 For this purpose, align_fuzz with a growth argument of 0 computes the
594 appropriate adjustment. */
596 /* Compute the maximum delta by which the difference of the addresses of
597 START and END might grow / shrink due to a different address for start
598 which changes the size of alignment insns between START and END.
599 KNOWN_ALIGN_LOG is the alignment known for START.
600 GROWTH should be ~0 if the objective is to compute potential code size
601 increase, and 0 if the objective is to compute potential shrink.
602 The return value is undefined for any other value of GROWTH. */
605 align_fuzz (rtx start
, rtx end
, int known_align_log
, unsigned int growth
)
607 int uid
= INSN_UID (start
);
609 int known_align
= 1 << known_align_log
;
610 int end_shuid
= INSN_SHUID (end
);
613 for (align_label
= uid_align
[uid
]; align_label
; align_label
= uid_align
[uid
])
615 int align_addr
, new_align
;
617 uid
= INSN_UID (align_label
);
618 align_addr
= INSN_ADDRESSES (uid
) - insn_lengths
[uid
];
619 if (uid_shuid
[uid
] > end_shuid
)
621 known_align_log
= LABEL_TO_ALIGNMENT (align_label
);
622 new_align
= 1 << known_align_log
;
623 if (new_align
< known_align
)
625 fuzz
+= (-align_addr
^ growth
) & (new_align
- known_align
);
626 known_align
= new_align
;
631 /* Compute a worst-case reference address of a branch so that it
632 can be safely used in the presence of aligned labels. Since the
633 size of the branch itself is unknown, the size of the branch is
634 not included in the range. I.e. for a forward branch, the reference
635 address is the end address of the branch as known from the previous
636 branch shortening pass, minus a value to account for possible size
637 increase due to alignment. For a backward branch, it is the start
638 address of the branch as known from the current pass, plus a value
639 to account for possible size increase due to alignment.
640 NB.: Therefore, the maximum offset allowed for backward branches needs
641 to exclude the branch size. */
644 insn_current_reference_address (rtx branch
)
649 if (! INSN_ADDRESSES_SET_P ())
652 seq
= NEXT_INSN (PREV_INSN (branch
));
653 seq_uid
= INSN_UID (seq
);
654 if (!JUMP_P (branch
))
655 /* This can happen for example on the PA; the objective is to know the
656 offset to address something in front of the start of the function.
657 Thus, we can treat it like a backward branch.
658 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
659 any alignment we'd encounter, so we skip the call to align_fuzz. */
660 return insn_current_address
;
661 dest
= JUMP_LABEL (branch
);
663 /* BRANCH has no proper alignment chain set, so use SEQ.
664 BRANCH also has no INSN_SHUID. */
665 if (INSN_SHUID (seq
) < INSN_SHUID (dest
))
667 /* Forward branch. */
668 return (insn_last_address
+ insn_lengths
[seq_uid
]
669 - align_fuzz (seq
, dest
, length_unit_log
, ~0));
673 /* Backward branch. */
674 return (insn_current_address
675 + align_fuzz (dest
, seq
, length_unit_log
, ~0));
679 /* Compute branch alignments based on frequency information in the
683 compute_alignments (void)
685 int log
, max_skip
, max_log
;
688 int freq_threshold
= 0;
696 max_labelno
= max_label_num ();
697 min_labelno
= get_first_label_num ();
698 label_align
= XCNEWVEC (struct label_alignment
, max_labelno
- min_labelno
+ 1);
700 /* If not optimizing or optimizing for size, don't assign any alignments. */
701 if (! optimize
|| optimize_function_for_size_p (cfun
))
706 dump_reg_info (dump_file
);
707 dump_flow_info (dump_file
, TDF_DETAILS
);
708 flow_loops_dump (dump_file
, NULL
, 1);
710 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
712 if (bb
->frequency
> freq_max
)
713 freq_max
= bb
->frequency
;
714 freq_threshold
= freq_max
/ PARAM_VALUE (PARAM_ALIGN_THRESHOLD
);
717 fprintf(dump_file
, "freq_max: %i\n",freq_max
);
720 rtx label
= BB_HEAD (bb
);
721 int fallthru_frequency
= 0, branch_frequency
= 0, has_fallthru
= 0;
726 || optimize_bb_for_size_p (bb
))
729 fprintf(dump_file
, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
730 bb
->index
, bb
->frequency
, bb
->loop_father
->num
,
734 max_log
= LABEL_ALIGN (label
);
735 max_skip
= targetm
.asm_out
.label_align_max_skip (label
);
737 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
739 if (e
->flags
& EDGE_FALLTHRU
)
740 has_fallthru
= 1, fallthru_frequency
+= EDGE_FREQUENCY (e
);
742 branch_frequency
+= EDGE_FREQUENCY (e
);
746 fprintf(dump_file
, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
747 bb
->index
, bb
->frequency
, bb
->loop_father
->num
,
749 fallthru_frequency
, branch_frequency
);
750 if (!bb
->loop_father
->inner
&& bb
->loop_father
->num
)
751 fprintf (dump_file
, " inner_loop");
752 if (bb
->loop_father
->header
== bb
)
753 fprintf (dump_file
, " loop_header");
754 fprintf (dump_file
, "\n");
757 /* There are two purposes to align block with no fallthru incoming edge:
758 1) to avoid fetch stalls when branch destination is near cache boundary
759 2) to improve cache efficiency in case the previous block is not executed
760 (so it does not need to be in the cache).
762 We to catch first case, we align frequently executed blocks.
763 To catch the second, we align blocks that are executed more frequently
764 than the predecessor and the predecessor is likely to not be executed
765 when function is called. */
768 && (branch_frequency
> freq_threshold
769 || (bb
->frequency
> bb
->prev_bb
->frequency
* 10
770 && (bb
->prev_bb
->frequency
771 <= ENTRY_BLOCK_PTR
->frequency
/ 2))))
773 log
= JUMP_ALIGN (label
);
775 fprintf(dump_file
, " jump alignment added.\n");
779 max_skip
= targetm
.asm_out
.jump_align_max_skip (label
);
782 /* In case block is frequent and reached mostly by non-fallthru edge,
783 align it. It is most likely a first block of loop. */
785 && optimize_bb_for_speed_p (bb
)
786 && branch_frequency
+ fallthru_frequency
> freq_threshold
788 > fallthru_frequency
* PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS
)))
790 log
= LOOP_ALIGN (label
);
792 fprintf(dump_file
, " internal loop alignment added.\n");
796 max_skip
= targetm
.asm_out
.loop_align_max_skip (label
);
799 LABEL_TO_ALIGNMENT (label
) = max_log
;
800 LABEL_TO_MAX_SKIP (label
) = max_skip
;
803 loop_optimizer_finalize ();
804 free_dominance_info (CDI_DOMINATORS
);
808 struct rtl_opt_pass pass_compute_alignments
=
812 "alignments", /* name */
813 OPTGROUP_NONE
, /* optinfo_flags */
815 compute_alignments
, /* execute */
818 0, /* static_pass_number */
820 0, /* properties_required */
821 0, /* properties_provided */
822 0, /* properties_destroyed */
823 0, /* todo_flags_start */
824 TODO_verify_rtl_sharing
825 | TODO_ggc_collect
/* todo_flags_finish */
830 /* Make a pass over all insns and compute their actual lengths by shortening
831 any branches of variable length if possible. */
833 /* shorten_branches might be called multiple times: for example, the SH
834 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
835 In order to do this, it needs proper length information, which it obtains
836 by calling shorten_branches. This cannot be collapsed with
837 shorten_branches itself into a single pass unless we also want to integrate
838 reorg.c, since the branch splitting exposes new instructions with delay
842 shorten_branches (rtx first
)
849 #define MAX_CODE_ALIGN 16
851 int something_changed
= 1;
852 char *varying_length
;
855 rtx align_tab
[MAX_CODE_ALIGN
];
857 /* Compute maximum UID and allocate label_align / uid_shuid. */
858 max_uid
= get_max_uid ();
860 /* Free uid_shuid before reallocating it. */
863 uid_shuid
= XNEWVEC (int, max_uid
);
865 if (max_labelno
!= max_label_num ())
867 int old
= max_labelno
;
871 max_labelno
= max_label_num ();
873 n_labels
= max_labelno
- min_labelno
+ 1;
874 n_old_labels
= old
- min_labelno
+ 1;
876 label_align
= XRESIZEVEC (struct label_alignment
, label_align
, n_labels
);
878 /* Range of labels grows monotonically in the function. Failing here
879 means that the initialization of array got lost. */
880 gcc_assert (n_old_labels
<= n_labels
);
882 memset (label_align
+ n_old_labels
, 0,
883 (n_labels
- n_old_labels
) * sizeof (struct label_alignment
));
886 /* Initialize label_align and set up uid_shuid to be strictly
887 monotonically rising with insn order. */
888 /* We use max_log here to keep track of the maximum alignment we want to
889 impose on the next CODE_LABEL (or the current one if we are processing
890 the CODE_LABEL itself). */
895 for (insn
= get_insns (), i
= 1; insn
; insn
= NEXT_INSN (insn
))
899 INSN_SHUID (insn
) = i
++;
906 bool next_is_jumptable
;
908 /* Merge in alignments computed by compute_alignments. */
909 log
= LABEL_TO_ALIGNMENT (insn
);
913 max_skip
= LABEL_TO_MAX_SKIP (insn
);
916 next
= next_nonnote_insn (insn
);
917 next_is_jumptable
= next
&& JUMP_TABLE_DATA_P (next
);
918 if (!next_is_jumptable
)
920 log
= LABEL_ALIGN (insn
);
924 max_skip
= targetm
.asm_out
.label_align_max_skip (insn
);
927 /* ADDR_VECs only take room if read-only data goes into the text
929 if ((JUMP_TABLES_IN_TEXT_SECTION
930 || readonly_data_section
== text_section
)
931 && next_is_jumptable
)
933 log
= ADDR_VEC_ALIGN (next
);
937 max_skip
= targetm
.asm_out
.label_align_max_skip (insn
);
940 LABEL_TO_ALIGNMENT (insn
) = max_log
;
941 LABEL_TO_MAX_SKIP (insn
) = max_skip
;
945 else if (BARRIER_P (insn
))
949 for (label
= insn
; label
&& ! INSN_P (label
);
950 label
= NEXT_INSN (label
))
953 log
= LABEL_ALIGN_AFTER_BARRIER (insn
);
957 max_skip
= targetm
.asm_out
.label_align_after_barrier_max_skip (label
);
963 if (!HAVE_ATTR_length
)
966 /* Allocate the rest of the arrays. */
967 insn_lengths
= XNEWVEC (int, max_uid
);
968 insn_lengths_max_uid
= max_uid
;
969 /* Syntax errors can lead to labels being outside of the main insn stream.
970 Initialize insn_addresses, so that we get reproducible results. */
971 INSN_ADDRESSES_ALLOC (max_uid
);
973 varying_length
= XCNEWVEC (char, max_uid
);
975 /* Initialize uid_align. We scan instructions
976 from end to start, and keep in align_tab[n] the last seen insn
977 that does an alignment of at least n+1, i.e. the successor
978 in the alignment chain for an insn that does / has a known
980 uid_align
= XCNEWVEC (rtx
, max_uid
);
982 for (i
= MAX_CODE_ALIGN
; --i
>= 0;)
983 align_tab
[i
] = NULL_RTX
;
984 seq
= get_last_insn ();
985 for (; seq
; seq
= PREV_INSN (seq
))
987 int uid
= INSN_UID (seq
);
989 log
= (LABEL_P (seq
) ? LABEL_TO_ALIGNMENT (seq
) : 0);
990 uid_align
[uid
] = align_tab
[0];
993 /* Found an alignment label. */
994 uid_align
[uid
] = align_tab
[log
];
995 for (i
= log
- 1; i
>= 0; i
--)
1000 /* When optimizing, we start assuming minimum length, and keep increasing
1001 lengths as we find the need for this, till nothing changes.
1002 When not optimizing, we start assuming maximum lengths, and
1003 do a single pass to update the lengths. */
1004 bool increasing
= optimize
!= 0;
1006 #ifdef CASE_VECTOR_SHORTEN_MODE
1009 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1012 int min_shuid
= INSN_SHUID (get_insns ()) - 1;
1013 int max_shuid
= INSN_SHUID (get_last_insn ()) + 1;
1016 for (insn
= first
; insn
!= 0; insn
= NEXT_INSN (insn
))
1018 rtx min_lab
= NULL_RTX
, max_lab
= NULL_RTX
, pat
;
1019 int len
, i
, min
, max
, insn_shuid
;
1021 addr_diff_vec_flags flags
;
1024 || GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
1026 pat
= PATTERN (insn
);
1027 len
= XVECLEN (pat
, 1);
1028 gcc_assert (len
> 0);
1029 min_align
= MAX_CODE_ALIGN
;
1030 for (min
= max_shuid
, max
= min_shuid
, i
= len
- 1; i
>= 0; i
--)
1032 rtx lab
= XEXP (XVECEXP (pat
, 1, i
), 0);
1033 int shuid
= INSN_SHUID (lab
);
1044 if (min_align
> LABEL_TO_ALIGNMENT (lab
))
1045 min_align
= LABEL_TO_ALIGNMENT (lab
);
1047 XEXP (pat
, 2) = gen_rtx_LABEL_REF (Pmode
, min_lab
);
1048 XEXP (pat
, 3) = gen_rtx_LABEL_REF (Pmode
, max_lab
);
1049 insn_shuid
= INSN_SHUID (insn
);
1050 rel
= INSN_SHUID (XEXP (XEXP (pat
, 0), 0));
1051 memset (&flags
, 0, sizeof (flags
));
1052 flags
.min_align
= min_align
;
1053 flags
.base_after_vec
= rel
> insn_shuid
;
1054 flags
.min_after_vec
= min
> insn_shuid
;
1055 flags
.max_after_vec
= max
> insn_shuid
;
1056 flags
.min_after_base
= min
> rel
;
1057 flags
.max_after_base
= max
> rel
;
1058 ADDR_DIFF_VEC_FLAGS (pat
) = flags
;
1061 PUT_MODE (pat
, CASE_VECTOR_SHORTEN_MODE (0, 0, pat
));
1064 #endif /* CASE_VECTOR_SHORTEN_MODE */
1066 /* Compute initial lengths, addresses, and varying flags for each insn. */
1067 int (*length_fun
) (rtx
) = increasing
? insn_min_length
: insn_default_length
;
1069 for (insn_current_address
= 0, insn
= first
;
1071 insn_current_address
+= insn_lengths
[uid
], insn
= NEXT_INSN (insn
))
1073 uid
= INSN_UID (insn
);
1075 insn_lengths
[uid
] = 0;
1079 int log
= LABEL_TO_ALIGNMENT (insn
);
1082 int align
= 1 << log
;
1083 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1084 insn_lengths
[uid
] = new_address
- insn_current_address
;
1088 INSN_ADDRESSES (uid
) = insn_current_address
+ insn_lengths
[uid
];
1090 if (NOTE_P (insn
) || BARRIER_P (insn
)
1091 || LABEL_P (insn
) || DEBUG_INSN_P(insn
))
1093 if (INSN_DELETED_P (insn
))
1096 body
= PATTERN (insn
);
1097 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
1099 /* This only takes room if read-only data goes into the text
1101 if (JUMP_TABLES_IN_TEXT_SECTION
1102 || readonly_data_section
== text_section
)
1103 insn_lengths
[uid
] = (XVECLEN (body
,
1104 GET_CODE (body
) == ADDR_DIFF_VEC
)
1105 * GET_MODE_SIZE (GET_MODE (body
)));
1106 /* Alignment is handled by ADDR_VEC_ALIGN. */
1108 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
1109 insn_lengths
[uid
] = asm_insn_count (body
) * insn_default_length (insn
);
1110 else if (GET_CODE (body
) == SEQUENCE
)
1113 int const_delay_slots
;
1115 const_delay_slots
= const_num_delay_slots (XVECEXP (body
, 0, 0));
1117 const_delay_slots
= 0;
1119 int (*inner_length_fun
) (rtx
)
1120 = const_delay_slots
? length_fun
: insn_default_length
;
1121 /* Inside a delay slot sequence, we do not do any branch shortening
1122 if the shortening could change the number of delay slots
1124 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1126 rtx inner_insn
= XVECEXP (body
, 0, i
);
1127 int inner_uid
= INSN_UID (inner_insn
);
1130 if (GET_CODE (body
) == ASM_INPUT
1131 || asm_noperands (PATTERN (XVECEXP (body
, 0, i
))) >= 0)
1132 inner_length
= (asm_insn_count (PATTERN (inner_insn
))
1133 * insn_default_length (inner_insn
));
1135 inner_length
= inner_length_fun (inner_insn
);
1137 insn_lengths
[inner_uid
] = inner_length
;
1138 if (const_delay_slots
)
1140 if ((varying_length
[inner_uid
]
1141 = insn_variable_length_p (inner_insn
)) != 0)
1142 varying_length
[uid
] = 1;
1143 INSN_ADDRESSES (inner_uid
) = (insn_current_address
1144 + insn_lengths
[uid
]);
1147 varying_length
[inner_uid
] = 0;
1148 insn_lengths
[uid
] += inner_length
;
1151 else if (GET_CODE (body
) != USE
&& GET_CODE (body
) != CLOBBER
)
1153 insn_lengths
[uid
] = length_fun (insn
);
1154 varying_length
[uid
] = insn_variable_length_p (insn
);
1157 /* If needed, do any adjustment. */
1158 #ifdef ADJUST_INSN_LENGTH
1159 ADJUST_INSN_LENGTH (insn
, insn_lengths
[uid
]);
1160 if (insn_lengths
[uid
] < 0)
1161 fatal_insn ("negative insn length", insn
);
1165 /* Now loop over all the insns finding varying length insns. For each,
1166 get the current insn length. If it has changed, reflect the change.
1167 When nothing changes for a full pass, we are done. */
1169 while (something_changed
)
1171 something_changed
= 0;
1172 insn_current_align
= MAX_CODE_ALIGN
- 1;
1173 for (insn_current_address
= 0, insn
= first
;
1175 insn
= NEXT_INSN (insn
))
1178 #ifdef ADJUST_INSN_LENGTH
1183 uid
= INSN_UID (insn
);
1187 int log
= LABEL_TO_ALIGNMENT (insn
);
1188 if (log
> insn_current_align
)
1190 int align
= 1 << log
;
1191 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1192 insn_lengths
[uid
] = new_address
- insn_current_address
;
1193 insn_current_align
= log
;
1194 insn_current_address
= new_address
;
1197 insn_lengths
[uid
] = 0;
1198 INSN_ADDRESSES (uid
) = insn_current_address
;
1202 length_align
= INSN_LENGTH_ALIGNMENT (insn
);
1203 if (length_align
< insn_current_align
)
1204 insn_current_align
= length_align
;
1206 insn_last_address
= INSN_ADDRESSES (uid
);
1207 INSN_ADDRESSES (uid
) = insn_current_address
;
1209 #ifdef CASE_VECTOR_SHORTEN_MODE
1210 if (optimize
&& JUMP_P (insn
)
1211 && GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1213 rtx body
= PATTERN (insn
);
1214 int old_length
= insn_lengths
[uid
];
1215 rtx rel_lab
= XEXP (XEXP (body
, 0), 0);
1216 rtx min_lab
= XEXP (XEXP (body
, 2), 0);
1217 rtx max_lab
= XEXP (XEXP (body
, 3), 0);
1218 int rel_addr
= INSN_ADDRESSES (INSN_UID (rel_lab
));
1219 int min_addr
= INSN_ADDRESSES (INSN_UID (min_lab
));
1220 int max_addr
= INSN_ADDRESSES (INSN_UID (max_lab
));
1223 addr_diff_vec_flags flags
;
1224 enum machine_mode vec_mode
;
1226 /* Avoid automatic aggregate initialization. */
1227 flags
= ADDR_DIFF_VEC_FLAGS (body
);
1229 /* Try to find a known alignment for rel_lab. */
1230 for (prev
= rel_lab
;
1232 && ! insn_lengths
[INSN_UID (prev
)]
1233 && ! (varying_length
[INSN_UID (prev
)] & 1);
1234 prev
= PREV_INSN (prev
))
1235 if (varying_length
[INSN_UID (prev
)] & 2)
1237 rel_align
= LABEL_TO_ALIGNMENT (prev
);
1241 /* See the comment on addr_diff_vec_flags in rtl.h for the
1242 meaning of the flags values. base: REL_LAB vec: INSN */
1243 /* Anything after INSN has still addresses from the last
1244 pass; adjust these so that they reflect our current
1245 estimate for this pass. */
1246 if (flags
.base_after_vec
)
1247 rel_addr
+= insn_current_address
- insn_last_address
;
1248 if (flags
.min_after_vec
)
1249 min_addr
+= insn_current_address
- insn_last_address
;
1250 if (flags
.max_after_vec
)
1251 max_addr
+= insn_current_address
- insn_last_address
;
1252 /* We want to know the worst case, i.e. lowest possible value
1253 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1254 its offset is positive, and we have to be wary of code shrink;
1255 otherwise, it is negative, and we have to be vary of code
1257 if (flags
.min_after_base
)
1259 /* If INSN is between REL_LAB and MIN_LAB, the size
1260 changes we are about to make can change the alignment
1261 within the observed offset, therefore we have to break
1262 it up into two parts that are independent. */
1263 if (! flags
.base_after_vec
&& flags
.min_after_vec
)
1265 min_addr
-= align_fuzz (rel_lab
, insn
, rel_align
, 0);
1266 min_addr
-= align_fuzz (insn
, min_lab
, 0, 0);
1269 min_addr
-= align_fuzz (rel_lab
, min_lab
, rel_align
, 0);
1273 if (flags
.base_after_vec
&& ! flags
.min_after_vec
)
1275 min_addr
-= align_fuzz (min_lab
, insn
, 0, ~0);
1276 min_addr
-= align_fuzz (insn
, rel_lab
, 0, ~0);
1279 min_addr
-= align_fuzz (min_lab
, rel_lab
, 0, ~0);
1281 /* Likewise, determine the highest lowest possible value
1282 for the offset of MAX_LAB. */
1283 if (flags
.max_after_base
)
1285 if (! flags
.base_after_vec
&& flags
.max_after_vec
)
1287 max_addr
+= align_fuzz (rel_lab
, insn
, rel_align
, ~0);
1288 max_addr
+= align_fuzz (insn
, max_lab
, 0, ~0);
1291 max_addr
+= align_fuzz (rel_lab
, max_lab
, rel_align
, ~0);
1295 if (flags
.base_after_vec
&& ! flags
.max_after_vec
)
1297 max_addr
+= align_fuzz (max_lab
, insn
, 0, 0);
1298 max_addr
+= align_fuzz (insn
, rel_lab
, 0, 0);
1301 max_addr
+= align_fuzz (max_lab
, rel_lab
, 0, 0);
1303 vec_mode
= CASE_VECTOR_SHORTEN_MODE (min_addr
- rel_addr
,
1304 max_addr
- rel_addr
, body
);
1306 || (GET_MODE_SIZE (vec_mode
)
1307 >= GET_MODE_SIZE (GET_MODE (body
))))
1308 PUT_MODE (body
, vec_mode
);
1309 if (JUMP_TABLES_IN_TEXT_SECTION
1310 || readonly_data_section
== text_section
)
1313 = (XVECLEN (body
, 1) * GET_MODE_SIZE (GET_MODE (body
)));
1314 insn_current_address
+= insn_lengths
[uid
];
1315 if (insn_lengths
[uid
] != old_length
)
1316 something_changed
= 1;
1321 #endif /* CASE_VECTOR_SHORTEN_MODE */
1323 if (! (varying_length
[uid
]))
1325 if (NONJUMP_INSN_P (insn
)
1326 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1330 body
= PATTERN (insn
);
1331 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1333 rtx inner_insn
= XVECEXP (body
, 0, i
);
1334 int inner_uid
= INSN_UID (inner_insn
);
1336 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1338 insn_current_address
+= insn_lengths
[inner_uid
];
1342 insn_current_address
+= insn_lengths
[uid
];
1347 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1351 body
= PATTERN (insn
);
1353 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1355 rtx inner_insn
= XVECEXP (body
, 0, i
);
1356 int inner_uid
= INSN_UID (inner_insn
);
1359 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1361 /* insn_current_length returns 0 for insns with a
1362 non-varying length. */
1363 if (! varying_length
[inner_uid
])
1364 inner_length
= insn_lengths
[inner_uid
];
1366 inner_length
= insn_current_length (inner_insn
);
1368 if (inner_length
!= insn_lengths
[inner_uid
])
1370 if (!increasing
|| inner_length
> insn_lengths
[inner_uid
])
1372 insn_lengths
[inner_uid
] = inner_length
;
1373 something_changed
= 1;
1376 inner_length
= insn_lengths
[inner_uid
];
1378 insn_current_address
+= inner_length
;
1379 new_length
+= inner_length
;
1384 new_length
= insn_current_length (insn
);
1385 insn_current_address
+= new_length
;
1388 #ifdef ADJUST_INSN_LENGTH
1389 /* If needed, do any adjustment. */
1390 tmp_length
= new_length
;
1391 ADJUST_INSN_LENGTH (insn
, new_length
);
1392 insn_current_address
+= (new_length
- tmp_length
);
1395 if (new_length
!= insn_lengths
[uid
]
1396 && (!increasing
|| new_length
> insn_lengths
[uid
]))
1398 insn_lengths
[uid
] = new_length
;
1399 something_changed
= 1;
1402 insn_current_address
+= insn_lengths
[uid
] - new_length
;
1404 /* For a non-optimizing compile, do only a single pass. */
1409 free (varying_length
);
1412 /* Given the body of an INSN known to be generated by an ASM statement, return
1413 the number of machine instructions likely to be generated for this insn.
1414 This is used to compute its length. */
1417 asm_insn_count (rtx body
)
1421 if (GET_CODE (body
) == ASM_INPUT
)
1422 templ
= XSTR (body
, 0);
1424 templ
= decode_asm_operands (body
, NULL
, NULL
, NULL
, NULL
, NULL
);
1426 return asm_str_count (templ
);
1429 /* Return the number of machine instructions likely to be generated for the
1430 inline-asm template. */
1432 asm_str_count (const char *templ
)
1439 for (; *templ
; templ
++)
1440 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ
, templ
)
1447 /* ??? This is probably the wrong place for these. */
1448 /* Structure recording the mapping from source file and directory
1449 names at compile time to those to be embedded in debug
1451 typedef struct debug_prefix_map
1453 const char *old_prefix
;
1454 const char *new_prefix
;
1457 struct debug_prefix_map
*next
;
1460 /* Linked list of such structures. */
1461 debug_prefix_map
*debug_prefix_maps
;
1464 /* Record a debug file prefix mapping. ARG is the argument to
1465 -fdebug-prefix-map and must be of the form OLD=NEW. */
1468 add_debug_prefix_map (const char *arg
)
1470 debug_prefix_map
*map
;
1473 p
= strchr (arg
, '=');
1476 error ("invalid argument %qs to -fdebug-prefix-map", arg
);
1479 map
= XNEW (debug_prefix_map
);
1480 map
->old_prefix
= xstrndup (arg
, p
- arg
);
1481 map
->old_len
= p
- arg
;
1483 map
->new_prefix
= xstrdup (p
);
1484 map
->new_len
= strlen (p
);
1485 map
->next
= debug_prefix_maps
;
1486 debug_prefix_maps
= map
;
1489 /* Perform user-specified mapping of debug filename prefixes. Return
1490 the new name corresponding to FILENAME. */
1493 remap_debug_filename (const char *filename
)
1495 debug_prefix_map
*map
;
1500 for (map
= debug_prefix_maps
; map
; map
= map
->next
)
1501 if (filename_ncmp (filename
, map
->old_prefix
, map
->old_len
) == 0)
1505 name
= filename
+ map
->old_len
;
1506 name_len
= strlen (name
) + 1;
1507 s
= (char *) alloca (name_len
+ map
->new_len
);
1508 memcpy (s
, map
->new_prefix
, map
->new_len
);
1509 memcpy (s
+ map
->new_len
, name
, name_len
);
1510 return ggc_strdup (s
);
1513 /* Return true if DWARF2 debug info can be emitted for DECL. */
1516 dwarf2_debug_info_emitted_p (tree decl
)
1518 if (write_symbols
!= DWARF2_DEBUG
&& write_symbols
!= VMS_AND_DWARF2_DEBUG
)
1521 if (DECL_IGNORED_P (decl
))
1527 /* Return scope resulting from combination of S1 and S2. */
1529 choose_inner_scope (tree s1
, tree s2
)
1535 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
1540 /* Emit lexical block notes needed to change scope from S1 to S2. */
1543 change_scope (rtx orig_insn
, tree s1
, tree s2
)
1545 rtx insn
= orig_insn
;
1546 tree com
= NULL_TREE
;
1547 tree ts1
= s1
, ts2
= s2
;
1552 gcc_assert (ts1
&& ts2
);
1553 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
1554 ts1
= BLOCK_SUPERCONTEXT (ts1
);
1555 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
1556 ts2
= BLOCK_SUPERCONTEXT (ts2
);
1559 ts1
= BLOCK_SUPERCONTEXT (ts1
);
1560 ts2
= BLOCK_SUPERCONTEXT (ts2
);
1569 rtx note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
1570 NOTE_BLOCK (note
) = s
;
1571 s
= BLOCK_SUPERCONTEXT (s
);
1578 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
1579 NOTE_BLOCK (insn
) = s
;
1580 s
= BLOCK_SUPERCONTEXT (s
);
1584 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1585 on the scope tree and the newly reordered instructions. */
1588 reemit_insn_block_notes (void)
1590 tree cur_block
= DECL_INITIAL (cfun
->decl
);
1593 insn
= get_insns ();
1594 if (!active_insn_p (insn
))
1595 insn
= next_active_insn (insn
);
1596 for (; insn
; insn
= next_active_insn (insn
))
1600 /* Avoid putting scope notes between jump table and its label. */
1601 if (JUMP_TABLE_DATA_P (insn
))
1604 this_block
= insn_scope (insn
);
1605 /* For sequences compute scope resulting from merging all scopes
1606 of instructions nested inside. */
1607 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1610 rtx body
= PATTERN (insn
);
1613 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1614 this_block
= choose_inner_scope (this_block
,
1615 insn_scope (XVECEXP (body
, 0, i
)));
1619 if (INSN_LOCATION (insn
) == UNKNOWN_LOCATION
)
1622 this_block
= DECL_INITIAL (cfun
->decl
);
1625 if (this_block
!= cur_block
)
1627 change_scope (insn
, cur_block
, this_block
);
1628 cur_block
= this_block
;
1632 /* change_scope emits before the insn, not after. */
1633 note
= emit_note (NOTE_INSN_DELETED
);
1634 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
1640 /* Output assembler code for the start of a function,
1641 and initialize some of the variables in this file
1642 for the new function. The label for the function and associated
1643 assembler pseudo-ops have already been output in `assemble_start_function'.
1645 FIRST is the first insn of the rtl for the function being compiled.
1646 FILE is the file to write assembler code to.
1647 OPTIMIZE_P is nonzero if we should eliminate redundant
1648 test and compare insns. */
1651 final_start_function (rtx first ATTRIBUTE_UNUSED
, FILE *file
,
1652 int optimize_p ATTRIBUTE_UNUSED
)
1656 this_is_asm_operands
= 0;
1658 last_filename
= LOCATION_FILE (prologue_location
);
1659 last_linenum
= LOCATION_LINE (prologue_location
);
1660 last_discriminator
= discriminator
= 0;
1662 high_block_linenum
= high_function_linenum
= last_linenum
;
1664 if (!DECL_IGNORED_P (current_function_decl
))
1665 debug_hooks
->begin_prologue (last_linenum
, last_filename
);
1667 if (!dwarf2_debug_info_emitted_p (current_function_decl
))
1668 dwarf2out_begin_prologue (0, NULL
);
1670 #ifdef LEAF_REG_REMAP
1671 if (crtl
->uses_only_leaf_regs
)
1672 leaf_renumber_regs (first
);
1675 /* The Sun386i and perhaps other machines don't work right
1676 if the profiling code comes after the prologue. */
1677 if (targetm
.profile_before_prologue () && crtl
->profile
)
1678 profile_function (file
);
1680 /* If debugging, assign block numbers to all of the blocks in this
1684 reemit_insn_block_notes ();
1685 number_blocks (current_function_decl
);
1686 /* We never actually put out begin/end notes for the top-level
1687 block in the function. But, conceptually, that block is
1689 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl
)) = 1;
1692 if (warn_frame_larger_than
1693 && get_frame_size () > frame_larger_than_size
)
1695 /* Issue a warning */
1696 warning (OPT_Wframe_larger_than_
,
1697 "the frame size of %wd bytes is larger than %wd bytes",
1698 get_frame_size (), frame_larger_than_size
);
1701 /* First output the function prologue: code to set up the stack frame. */
1702 targetm
.asm_out
.function_prologue (file
, get_frame_size ());
1704 /* If the machine represents the prologue as RTL, the profiling code must
1705 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1706 #ifdef HAVE_prologue
1707 if (! HAVE_prologue
)
1709 profile_after_prologue (file
);
1713 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED
)
1715 if (!targetm
.profile_before_prologue () && crtl
->profile
)
1716 profile_function (file
);
1720 profile_function (FILE *file ATTRIBUTE_UNUSED
)
1722 #ifndef NO_PROFILE_COUNTERS
1723 # define NO_PROFILE_COUNTERS 0
1725 #ifdef ASM_OUTPUT_REG_PUSH
1726 rtx sval
= NULL
, chain
= NULL
;
1728 if (cfun
->returns_struct
)
1729 sval
= targetm
.calls
.struct_value_rtx (TREE_TYPE (current_function_decl
),
1731 if (cfun
->static_chain_decl
)
1732 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
1733 #endif /* ASM_OUTPUT_REG_PUSH */
1735 if (! NO_PROFILE_COUNTERS
)
1737 int align
= MIN (BIGGEST_ALIGNMENT
, LONG_TYPE_SIZE
);
1738 switch_to_section (data_section
);
1739 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
1740 targetm
.asm_out
.internal_label (file
, "LP", current_function_funcdef_no
);
1741 assemble_integer (const0_rtx
, LONG_TYPE_SIZE
/ BITS_PER_UNIT
, align
, 1);
1744 switch_to_section (current_function_section ());
1746 #ifdef ASM_OUTPUT_REG_PUSH
1747 if (sval
&& REG_P (sval
))
1748 ASM_OUTPUT_REG_PUSH (file
, REGNO (sval
));
1749 if (chain
&& REG_P (chain
))
1750 ASM_OUTPUT_REG_PUSH (file
, REGNO (chain
));
1753 FUNCTION_PROFILER (file
, current_function_funcdef_no
);
1755 #ifdef ASM_OUTPUT_REG_PUSH
1756 if (chain
&& REG_P (chain
))
1757 ASM_OUTPUT_REG_POP (file
, REGNO (chain
));
1758 if (sval
&& REG_P (sval
))
1759 ASM_OUTPUT_REG_POP (file
, REGNO (sval
));
1763 /* Output assembler code for the end of a function.
1764 For clarity, args are same as those of `final_start_function'
1765 even though not all of them are needed. */
1768 final_end_function (void)
1772 if (!DECL_IGNORED_P (current_function_decl
))
1773 debug_hooks
->end_function (high_function_linenum
);
1775 /* Finally, output the function epilogue:
1776 code to restore the stack frame and return to the caller. */
1777 targetm
.asm_out
.function_epilogue (asm_out_file
, get_frame_size ());
1779 /* And debug output. */
1780 if (!DECL_IGNORED_P (current_function_decl
))
1781 debug_hooks
->end_epilogue (last_linenum
, last_filename
);
1783 if (!dwarf2_debug_info_emitted_p (current_function_decl
)
1784 && dwarf2out_do_frame ())
1785 dwarf2out_end_epilogue (last_linenum
, last_filename
);
1789 /* Dumper helper for basic block information. FILE is the assembly
1790 output file, and INSN is the instruction being emitted. */
1793 dump_basic_block_info (FILE *file
, rtx insn
, basic_block
*start_to_bb
,
1794 basic_block
*end_to_bb
, int bb_map_size
, int *bb_seqn
)
1798 if (!flag_debug_asm
)
1801 if (INSN_UID (insn
) < bb_map_size
1802 && (bb
= start_to_bb
[INSN_UID (insn
)]) != NULL
)
1807 fprintf (file
, "%s BLOCK %d", ASM_COMMENT_START
, bb
->index
);
1809 fprintf (file
, " freq:%d", bb
->frequency
);
1811 fprintf (file
, " count:" HOST_WIDEST_INT_PRINT_DEC
,
1813 fprintf (file
, " seq:%d", (*bb_seqn
)++);
1814 fprintf (file
, "\n%s PRED:", ASM_COMMENT_START
);
1815 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1817 dump_edge_info (file
, e
, TDF_DETAILS
, 0);
1819 fprintf (file
, "\n");
1821 if (INSN_UID (insn
) < bb_map_size
1822 && (bb
= end_to_bb
[INSN_UID (insn
)]) != NULL
)
1827 fprintf (asm_out_file
, "%s SUCC:", ASM_COMMENT_START
);
1828 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1830 dump_edge_info (asm_out_file
, e
, TDF_DETAILS
, 1);
1832 fprintf (file
, "\n");
1836 /* Output assembler code for some insns: all or part of a function.
1837 For description of args, see `final_start_function', above. */
1840 final (rtx first
, FILE *file
, int optimize_p
)
1845 /* Used for -dA dump. */
1846 basic_block
*start_to_bb
= NULL
;
1847 basic_block
*end_to_bb
= NULL
;
1848 int bb_map_size
= 0;
1851 last_ignored_compare
= 0;
1854 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1856 /* If CC tracking across branches is enabled, record the insn which
1857 jumps to each branch only reached from one place. */
1858 if (optimize_p
&& JUMP_P (insn
))
1860 rtx lab
= JUMP_LABEL (insn
);
1861 if (lab
&& LABEL_P (lab
) && LABEL_NUSES (lab
) == 1)
1863 LABEL_REFS (lab
) = insn
;
1877 bb_map_size
= get_max_uid () + 1;
1878 start_to_bb
= XCNEWVEC (basic_block
, bb_map_size
);
1879 end_to_bb
= XCNEWVEC (basic_block
, bb_map_size
);
1881 /* There is no cfg for a thunk. */
1882 if (!cfun
->is_thunk
)
1883 FOR_EACH_BB_REVERSE (bb
)
1885 start_to_bb
[INSN_UID (BB_HEAD (bb
))] = bb
;
1886 end_to_bb
[INSN_UID (BB_END (bb
))] = bb
;
1890 /* Output the insns. */
1891 for (insn
= first
; insn
;)
1893 if (HAVE_ATTR_length
)
1895 if ((unsigned) INSN_UID (insn
) >= INSN_ADDRESSES_SIZE ())
1897 /* This can be triggered by bugs elsewhere in the compiler if
1898 new insns are created after init_insn_lengths is called. */
1899 gcc_assert (NOTE_P (insn
));
1900 insn_current_address
= -1;
1903 insn_current_address
= INSN_ADDRESSES (INSN_UID (insn
));
1906 dump_basic_block_info (file
, insn
, start_to_bb
, end_to_bb
,
1907 bb_map_size
, &bb_seqn
);
1908 insn
= final_scan_insn (insn
, file
, optimize_p
, 0, &seen
);
1917 /* Remove CFI notes, to avoid compare-debug failures. */
1918 for (insn
= first
; insn
; insn
= next
)
1920 next
= NEXT_INSN (insn
);
1922 && (NOTE_KIND (insn
) == NOTE_INSN_CFI
1923 || NOTE_KIND (insn
) == NOTE_INSN_CFI_LABEL
))
1929 get_insn_template (int code
, rtx insn
)
1931 switch (insn_data
[code
].output_format
)
1933 case INSN_OUTPUT_FORMAT_SINGLE
:
1934 return insn_data
[code
].output
.single
;
1935 case INSN_OUTPUT_FORMAT_MULTI
:
1936 return insn_data
[code
].output
.multi
[which_alternative
];
1937 case INSN_OUTPUT_FORMAT_FUNCTION
:
1939 return (*insn_data
[code
].output
.function
) (recog_data
.operand
, insn
);
1946 /* Emit the appropriate declaration for an alternate-entry-point
1947 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1948 LABEL_KIND != LABEL_NORMAL.
1950 The case fall-through in this function is intentional. */
1952 output_alternate_entry_point (FILE *file
, rtx insn
)
1954 const char *name
= LABEL_NAME (insn
);
1956 switch (LABEL_KIND (insn
))
1958 case LABEL_WEAK_ENTRY
:
1959 #ifdef ASM_WEAKEN_LABEL
1960 ASM_WEAKEN_LABEL (file
, name
);
1962 case LABEL_GLOBAL_ENTRY
:
1963 targetm
.asm_out
.globalize_label (file
, name
);
1964 case LABEL_STATIC_ENTRY
:
1965 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1966 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
1968 ASM_OUTPUT_LABEL (file
, name
);
1977 /* Given a CALL_INSN, find and return the nested CALL. */
1979 call_from_call_insn (rtx insn
)
1982 gcc_assert (CALL_P (insn
));
1985 while (GET_CODE (x
) != CALL
)
1987 switch (GET_CODE (x
))
1992 x
= COND_EXEC_CODE (x
);
1995 x
= XVECEXP (x
, 0, 0);
2005 /* The final scan for one insn, INSN.
2006 Args are same as in `final', except that INSN
2007 is the insn being scanned.
2008 Value returned is the next insn to be scanned.
2010 NOPEEPHOLES is the flag to disallow peephole processing (currently
2011 used for within delayed branch sequence output).
2013 SEEN is used to track the end of the prologue, for emitting
2014 debug information. We force the emission of a line note after
2015 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
2016 at the beginning of the second basic block, whichever comes
2020 final_scan_insn (rtx insn
, FILE *file
, int optimize_p ATTRIBUTE_UNUSED
,
2021 int nopeepholes ATTRIBUTE_UNUSED
, int *seen
)
2030 /* Ignore deleted insns. These can occur when we split insns (due to a
2031 template of "#") while not optimizing. */
2032 if (INSN_DELETED_P (insn
))
2033 return NEXT_INSN (insn
);
2035 switch (GET_CODE (insn
))
2038 switch (NOTE_KIND (insn
))
2040 case NOTE_INSN_DELETED
:
2043 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
2044 in_cold_section_p
= !in_cold_section_p
;
2046 if (dwarf2out_do_frame ())
2047 dwarf2out_switch_text_section ();
2048 else if (!DECL_IGNORED_P (current_function_decl
))
2049 debug_hooks
->switch_text_section ();
2051 switch_to_section (current_function_section ());
2052 targetm
.asm_out
.function_switched_text_sections (asm_out_file
,
2053 current_function_decl
,
2057 case NOTE_INSN_BASIC_BLOCK
:
2058 if (targetm
.asm_out
.unwind_emit
)
2059 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
2061 if ((*seen
& (SEEN_EMITTED
| SEEN_BB
)) == SEEN_BB
)
2063 *seen
|= SEEN_EMITTED
;
2064 force_source_line
= true;
2069 discriminator
= NOTE_BASIC_BLOCK (insn
)->discriminator
;
2073 case NOTE_INSN_EH_REGION_BEG
:
2074 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHB",
2075 NOTE_EH_HANDLER (insn
));
2078 case NOTE_INSN_EH_REGION_END
:
2079 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHE",
2080 NOTE_EH_HANDLER (insn
));
2083 case NOTE_INSN_PROLOGUE_END
:
2084 targetm
.asm_out
.function_end_prologue (file
);
2085 profile_after_prologue (file
);
2087 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
2089 *seen
|= SEEN_EMITTED
;
2090 force_source_line
= true;
2097 case NOTE_INSN_EPILOGUE_BEG
:
2098 if (!DECL_IGNORED_P (current_function_decl
))
2099 (*debug_hooks
->begin_epilogue
) (last_linenum
, last_filename
);
2100 targetm
.asm_out
.function_begin_epilogue (file
);
2104 dwarf2out_emit_cfi (NOTE_CFI (insn
));
2107 case NOTE_INSN_CFI_LABEL
:
2108 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LCFI",
2109 NOTE_LABEL_NUMBER (insn
));
2112 case NOTE_INSN_FUNCTION_BEG
:
2114 if (!DECL_IGNORED_P (current_function_decl
))
2115 debug_hooks
->end_prologue (last_linenum
, last_filename
);
2117 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
2119 *seen
|= SEEN_EMITTED
;
2120 force_source_line
= true;
2127 case NOTE_INSN_BLOCK_BEG
:
2128 if (debug_info_level
== DINFO_LEVEL_NORMAL
2129 || debug_info_level
== DINFO_LEVEL_VERBOSE
2130 || write_symbols
== DWARF2_DEBUG
2131 || write_symbols
== VMS_AND_DWARF2_DEBUG
2132 || write_symbols
== VMS_DEBUG
)
2134 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
2138 high_block_linenum
= last_linenum
;
2140 /* Output debugging info about the symbol-block beginning. */
2141 if (!DECL_IGNORED_P (current_function_decl
))
2142 debug_hooks
->begin_block (last_linenum
, n
);
2144 /* Mark this block as output. */
2145 TREE_ASM_WRITTEN (NOTE_BLOCK (insn
)) = 1;
2147 if (write_symbols
== DBX_DEBUG
2148 || write_symbols
== SDB_DEBUG
)
2150 location_t
*locus_ptr
2151 = block_nonartificial_location (NOTE_BLOCK (insn
));
2153 if (locus_ptr
!= NULL
)
2155 override_filename
= LOCATION_FILE (*locus_ptr
);
2156 override_linenum
= LOCATION_LINE (*locus_ptr
);
2161 case NOTE_INSN_BLOCK_END
:
2162 if (debug_info_level
== DINFO_LEVEL_NORMAL
2163 || debug_info_level
== DINFO_LEVEL_VERBOSE
2164 || write_symbols
== DWARF2_DEBUG
2165 || write_symbols
== VMS_AND_DWARF2_DEBUG
2166 || write_symbols
== VMS_DEBUG
)
2168 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
2172 /* End of a symbol-block. */
2174 gcc_assert (block_depth
>= 0);
2176 if (!DECL_IGNORED_P (current_function_decl
))
2177 debug_hooks
->end_block (high_block_linenum
, n
);
2179 if (write_symbols
== DBX_DEBUG
2180 || write_symbols
== SDB_DEBUG
)
2182 tree outer_block
= BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn
));
2183 location_t
*locus_ptr
2184 = block_nonartificial_location (outer_block
);
2186 if (locus_ptr
!= NULL
)
2188 override_filename
= LOCATION_FILE (*locus_ptr
);
2189 override_linenum
= LOCATION_LINE (*locus_ptr
);
2193 override_filename
= NULL
;
2194 override_linenum
= 0;
2199 case NOTE_INSN_DELETED_LABEL
:
2200 /* Emit the label. We may have deleted the CODE_LABEL because
2201 the label could be proved to be unreachable, though still
2202 referenced (in the form of having its address taken. */
2203 ASM_OUTPUT_DEBUG_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
));
2206 case NOTE_INSN_DELETED_DEBUG_LABEL
:
2207 /* Similarly, but need to use different namespace for it. */
2208 if (CODE_LABEL_NUMBER (insn
) != -1)
2209 ASM_OUTPUT_DEBUG_LABEL (file
, "LDL", CODE_LABEL_NUMBER (insn
));
2212 case NOTE_INSN_VAR_LOCATION
:
2213 case NOTE_INSN_CALL_ARG_LOCATION
:
2214 if (!DECL_IGNORED_P (current_function_decl
))
2215 debug_hooks
->var_location (insn
);
2228 /* The target port might emit labels in the output function for
2229 some insn, e.g. sh.c output_branchy_insn. */
2230 if (CODE_LABEL_NUMBER (insn
) <= max_labelno
)
2232 int align
= LABEL_TO_ALIGNMENT (insn
);
2233 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2234 int max_skip
= LABEL_TO_MAX_SKIP (insn
);
2237 if (align
&& NEXT_INSN (insn
))
2239 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2240 ASM_OUTPUT_MAX_SKIP_ALIGN (file
, align
, max_skip
);
2242 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2243 ASM_OUTPUT_ALIGN_WITH_NOP (file
, align
);
2245 ASM_OUTPUT_ALIGN (file
, align
);
2252 if (!DECL_IGNORED_P (current_function_decl
) && LABEL_NAME (insn
))
2253 debug_hooks
->label (insn
);
2257 next
= next_nonnote_insn (insn
);
2258 /* If this label is followed by a jump-table, make sure we put
2259 the label in the read-only section. Also possibly write the
2260 label and jump table together. */
2261 if (next
!= 0 && JUMP_TABLE_DATA_P (next
))
2263 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2264 /* In this case, the case vector is being moved by the
2265 target, so don't output the label at all. Leave that
2266 to the back end macros. */
2268 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2272 switch_to_section (targetm
.asm_out
.function_rodata_section
2273 (current_function_decl
));
2275 #ifdef ADDR_VEC_ALIGN
2276 log_align
= ADDR_VEC_ALIGN (next
);
2278 log_align
= exact_log2 (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2280 ASM_OUTPUT_ALIGN (file
, log_align
);
2283 switch_to_section (current_function_section ());
2285 #ifdef ASM_OUTPUT_CASE_LABEL
2286 ASM_OUTPUT_CASE_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
),
2289 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2294 if (LABEL_ALT_ENTRY_P (insn
))
2295 output_alternate_entry_point (file
, insn
);
2297 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2302 rtx body
= PATTERN (insn
);
2303 int insn_code_number
;
2307 /* Reset this early so it is correct for ASM statements. */
2308 current_insn_predicate
= NULL_RTX
;
2310 /* An INSN, JUMP_INSN or CALL_INSN.
2311 First check for special kinds that recog doesn't recognize. */
2313 if (GET_CODE (body
) == USE
/* These are just declarations. */
2314 || GET_CODE (body
) == CLOBBER
)
2319 /* If there is a REG_CC_SETTER note on this insn, it means that
2320 the setting of the condition code was done in the delay slot
2321 of the insn that branched here. So recover the cc status
2322 from the insn that set it. */
2324 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
2327 NOTICE_UPDATE_CC (PATTERN (XEXP (note
, 0)), XEXP (note
, 0));
2328 cc_prev_status
= cc_status
;
2333 /* Detect insns that are really jump-tables
2334 and output them as such. */
2336 if (GET_CODE (body
) == ADDR_VEC
|| GET_CODE (body
) == ADDR_DIFF_VEC
)
2338 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2342 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2343 switch_to_section (targetm
.asm_out
.function_rodata_section
2344 (current_function_decl
));
2346 switch_to_section (current_function_section ());
2350 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2351 if (GET_CODE (body
) == ADDR_VEC
)
2353 #ifdef ASM_OUTPUT_ADDR_VEC
2354 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn
), body
);
2361 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2362 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn
), body
);
2368 vlen
= XVECLEN (body
, GET_CODE (body
) == ADDR_DIFF_VEC
);
2369 for (idx
= 0; idx
< vlen
; idx
++)
2371 if (GET_CODE (body
) == ADDR_VEC
)
2373 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2374 ASM_OUTPUT_ADDR_VEC_ELT
2375 (file
, CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 0, idx
), 0)));
2382 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2383 ASM_OUTPUT_ADDR_DIFF_ELT
2386 CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 1, idx
), 0)),
2387 CODE_LABEL_NUMBER (XEXP (XEXP (body
, 0), 0)));
2393 #ifdef ASM_OUTPUT_CASE_END
2394 ASM_OUTPUT_CASE_END (file
,
2395 CODE_LABEL_NUMBER (PREV_INSN (insn
)),
2400 switch_to_section (current_function_section ());
2404 /* Output this line note if it is the first or the last line
2406 if (!DECL_IGNORED_P (current_function_decl
)
2407 && notice_source_line (insn
, &is_stmt
))
2408 (*debug_hooks
->source_line
) (last_linenum
, last_filename
,
2409 last_discriminator
, is_stmt
);
2411 if (GET_CODE (body
) == ASM_INPUT
)
2413 const char *string
= XSTR (body
, 0);
2415 /* There's no telling what that did to the condition codes. */
2420 expanded_location loc
;
2423 loc
= expand_location (ASM_INPUT_SOURCE_LOCATION (body
));
2424 if (*loc
.file
&& loc
.line
)
2425 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2426 ASM_COMMENT_START
, loc
.line
, loc
.file
);
2427 fprintf (asm_out_file
, "\t%s\n", string
);
2428 #if HAVE_AS_LINE_ZERO
2429 if (*loc
.file
&& loc
.line
)
2430 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2436 /* Detect `asm' construct with operands. */
2437 if (asm_noperands (body
) >= 0)
2439 unsigned int noperands
= asm_noperands (body
);
2440 rtx
*ops
= XALLOCAVEC (rtx
, noperands
);
2443 expanded_location expanded
;
2445 /* There's no telling what that did to the condition codes. */
2448 /* Get out the operand values. */
2449 string
= decode_asm_operands (body
, ops
, NULL
, NULL
, NULL
, &loc
);
2450 /* Inhibit dying on what would otherwise be compiler bugs. */
2451 insn_noperands
= noperands
;
2452 this_is_asm_operands
= insn
;
2453 expanded
= expand_location (loc
);
2455 #ifdef FINAL_PRESCAN_INSN
2456 FINAL_PRESCAN_INSN (insn
, ops
, insn_noperands
);
2459 /* Output the insn using them. */
2463 if (expanded
.file
&& expanded
.line
)
2464 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2465 ASM_COMMENT_START
, expanded
.line
, expanded
.file
);
2466 output_asm_insn (string
, ops
);
2467 #if HAVE_AS_LINE_ZERO
2468 if (expanded
.file
&& expanded
.line
)
2469 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2473 if (targetm
.asm_out
.final_postscan_insn
)
2474 targetm
.asm_out
.final_postscan_insn (file
, insn
, ops
,
2477 this_is_asm_operands
= 0;
2483 if (GET_CODE (body
) == SEQUENCE
)
2485 /* A delayed-branch sequence */
2488 final_sequence
= body
;
2490 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2491 force the restoration of a comparison that was previously
2492 thought unnecessary. If that happens, cancel this sequence
2493 and cause that insn to be restored. */
2495 next
= final_scan_insn (XVECEXP (body
, 0, 0), file
, 0, 1, seen
);
2496 if (next
!= XVECEXP (body
, 0, 1))
2502 for (i
= 1; i
< XVECLEN (body
, 0); i
++)
2504 rtx insn
= XVECEXP (body
, 0, i
);
2505 rtx next
= NEXT_INSN (insn
);
2506 /* We loop in case any instruction in a delay slot gets
2509 insn
= final_scan_insn (insn
, file
, 0, 1, seen
);
2510 while (insn
!= next
);
2512 #ifdef DBR_OUTPUT_SEQEND
2513 DBR_OUTPUT_SEQEND (file
);
2517 /* If the insn requiring the delay slot was a CALL_INSN, the
2518 insns in the delay slot are actually executed before the
2519 called function. Hence we don't preserve any CC-setting
2520 actions in these insns and the CC must be marked as being
2521 clobbered by the function. */
2522 if (CALL_P (XVECEXP (body
, 0, 0)))
2529 /* We have a real machine instruction as rtl. */
2531 body
= PATTERN (insn
);
2534 set
= single_set (insn
);
2536 /* Check for redundant test and compare instructions
2537 (when the condition codes are already set up as desired).
2538 This is done only when optimizing; if not optimizing,
2539 it should be possible for the user to alter a variable
2540 with the debugger in between statements
2541 and the next statement should reexamine the variable
2542 to compute the condition codes. */
2547 && GET_CODE (SET_DEST (set
)) == CC0
2548 && insn
!= last_ignored_compare
)
2551 if (GET_CODE (SET_SRC (set
)) == SUBREG
)
2552 SET_SRC (set
) = alter_subreg (&SET_SRC (set
), true);
2554 src1
= SET_SRC (set
);
2556 if (GET_CODE (SET_SRC (set
)) == COMPARE
)
2558 if (GET_CODE (XEXP (SET_SRC (set
), 0)) == SUBREG
)
2559 XEXP (SET_SRC (set
), 0)
2560 = alter_subreg (&XEXP (SET_SRC (set
), 0), true);
2561 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == SUBREG
)
2562 XEXP (SET_SRC (set
), 1)
2563 = alter_subreg (&XEXP (SET_SRC (set
), 1), true);
2564 if (XEXP (SET_SRC (set
), 1)
2565 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set
), 0))))
2566 src2
= XEXP (SET_SRC (set
), 0);
2568 if ((cc_status
.value1
!= 0
2569 && rtx_equal_p (src1
, cc_status
.value1
))
2570 || (cc_status
.value2
!= 0
2571 && rtx_equal_p (src1
, cc_status
.value2
))
2572 || (src2
!= 0 && cc_status
.value1
!= 0
2573 && rtx_equal_p (src2
, cc_status
.value1
))
2574 || (src2
!= 0 && cc_status
.value2
!= 0
2575 && rtx_equal_p (src2
, cc_status
.value2
)))
2577 /* Don't delete insn if it has an addressing side-effect. */
2578 if (! FIND_REG_INC_NOTE (insn
, NULL_RTX
)
2579 /* or if anything in it is volatile. */
2580 && ! volatile_refs_p (PATTERN (insn
)))
2582 /* We don't really delete the insn; just ignore it. */
2583 last_ignored_compare
= insn
;
2590 /* If this is a conditional branch, maybe modify it
2591 if the cc's are in a nonstandard state
2592 so that it accomplishes the same thing that it would
2593 do straightforwardly if the cc's were set up normally. */
2595 if (cc_status
.flags
!= 0
2597 && GET_CODE (body
) == SET
2598 && SET_DEST (body
) == pc_rtx
2599 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2600 && COMPARISON_P (XEXP (SET_SRC (body
), 0))
2601 && XEXP (XEXP (SET_SRC (body
), 0), 0) == cc0_rtx
)
2603 /* This function may alter the contents of its argument
2604 and clear some of the cc_status.flags bits.
2605 It may also return 1 meaning condition now always true
2606 or -1 meaning condition now always false
2607 or 2 meaning condition nontrivial but altered. */
2608 int result
= alter_cond (XEXP (SET_SRC (body
), 0));
2609 /* If condition now has fixed value, replace the IF_THEN_ELSE
2610 with its then-operand or its else-operand. */
2612 SET_SRC (body
) = XEXP (SET_SRC (body
), 1);
2614 SET_SRC (body
) = XEXP (SET_SRC (body
), 2);
2616 /* The jump is now either unconditional or a no-op.
2617 If it has become a no-op, don't try to output it.
2618 (It would not be recognized.) */
2619 if (SET_SRC (body
) == pc_rtx
)
2624 else if (ANY_RETURN_P (SET_SRC (body
)))
2625 /* Replace (set (pc) (return)) with (return). */
2626 PATTERN (insn
) = body
= SET_SRC (body
);
2628 /* Rerecognize the instruction if it has changed. */
2630 INSN_CODE (insn
) = -1;
2633 /* If this is a conditional trap, maybe modify it if the cc's
2634 are in a nonstandard state so that it accomplishes the same
2635 thing that it would do straightforwardly if the cc's were
2637 if (cc_status
.flags
!= 0
2638 && NONJUMP_INSN_P (insn
)
2639 && GET_CODE (body
) == TRAP_IF
2640 && COMPARISON_P (TRAP_CONDITION (body
))
2641 && XEXP (TRAP_CONDITION (body
), 0) == cc0_rtx
)
2643 /* This function may alter the contents of its argument
2644 and clear some of the cc_status.flags bits.
2645 It may also return 1 meaning condition now always true
2646 or -1 meaning condition now always false
2647 or 2 meaning condition nontrivial but altered. */
2648 int result
= alter_cond (TRAP_CONDITION (body
));
2650 /* If TRAP_CONDITION has become always false, delete the
2658 /* If TRAP_CONDITION has become always true, replace
2659 TRAP_CONDITION with const_true_rtx. */
2661 TRAP_CONDITION (body
) = const_true_rtx
;
2663 /* Rerecognize the instruction if it has changed. */
2665 INSN_CODE (insn
) = -1;
2668 /* Make same adjustments to instructions that examine the
2669 condition codes without jumping and instructions that
2670 handle conditional moves (if this machine has either one). */
2672 if (cc_status
.flags
!= 0
2675 rtx cond_rtx
, then_rtx
, else_rtx
;
2678 && GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
)
2680 cond_rtx
= XEXP (SET_SRC (set
), 0);
2681 then_rtx
= XEXP (SET_SRC (set
), 1);
2682 else_rtx
= XEXP (SET_SRC (set
), 2);
2686 cond_rtx
= SET_SRC (set
);
2687 then_rtx
= const_true_rtx
;
2688 else_rtx
= const0_rtx
;
2691 if (COMPARISON_P (cond_rtx
)
2692 && XEXP (cond_rtx
, 0) == cc0_rtx
)
2695 result
= alter_cond (cond_rtx
);
2697 validate_change (insn
, &SET_SRC (set
), then_rtx
, 0);
2698 else if (result
== -1)
2699 validate_change (insn
, &SET_SRC (set
), else_rtx
, 0);
2700 else if (result
== 2)
2701 INSN_CODE (insn
) = -1;
2702 if (SET_DEST (set
) == SET_SRC (set
))
2709 #ifdef HAVE_peephole
2710 /* Do machine-specific peephole optimizations if desired. */
2712 if (optimize_p
&& !flag_no_peephole
&& !nopeepholes
)
2714 rtx next
= peephole (insn
);
2715 /* When peepholing, if there were notes within the peephole,
2716 emit them before the peephole. */
2717 if (next
!= 0 && next
!= NEXT_INSN (insn
))
2719 rtx note
, prev
= PREV_INSN (insn
);
2721 for (note
= NEXT_INSN (insn
); note
!= next
;
2722 note
= NEXT_INSN (note
))
2723 final_scan_insn (note
, file
, optimize_p
, nopeepholes
, seen
);
2725 /* Put the notes in the proper position for a later
2726 rescan. For example, the SH target can do this
2727 when generating a far jump in a delayed branch
2729 note
= NEXT_INSN (insn
);
2730 PREV_INSN (note
) = prev
;
2731 NEXT_INSN (prev
) = note
;
2732 NEXT_INSN (PREV_INSN (next
)) = insn
;
2733 PREV_INSN (insn
) = PREV_INSN (next
);
2734 NEXT_INSN (insn
) = next
;
2735 PREV_INSN (next
) = insn
;
2738 /* PEEPHOLE might have changed this. */
2739 body
= PATTERN (insn
);
2743 /* Try to recognize the instruction.
2744 If successful, verify that the operands satisfy the
2745 constraints for the instruction. Crash if they don't,
2746 since `reload' should have changed them so that they do. */
2748 insn_code_number
= recog_memoized (insn
);
2749 cleanup_subreg_operands (insn
);
2751 /* Dump the insn in the assembly for debugging (-dAP).
2752 If the final dump is requested as slim RTL, dump slim
2753 RTL to the assembly file also. */
2754 if (flag_dump_rtl_in_asm
)
2756 print_rtx_head
= ASM_COMMENT_START
;
2757 if (! (dump_flags
& TDF_SLIM
))
2758 print_rtl_single (asm_out_file
, insn
);
2760 dump_insn_slim (asm_out_file
, insn
);
2761 print_rtx_head
= "";
2764 if (! constrain_operands_cached (1))
2765 fatal_insn_not_found (insn
);
2767 /* Some target machines need to prescan each insn before
2770 #ifdef FINAL_PRESCAN_INSN
2771 FINAL_PRESCAN_INSN (insn
, recog_data
.operand
, recog_data
.n_operands
);
2774 if (targetm
.have_conditional_execution ()
2775 && GET_CODE (PATTERN (insn
)) == COND_EXEC
)
2776 current_insn_predicate
= COND_EXEC_TEST (PATTERN (insn
));
2779 cc_prev_status
= cc_status
;
2781 /* Update `cc_status' for this instruction.
2782 The instruction's output routine may change it further.
2783 If the output routine for a jump insn needs to depend
2784 on the cc status, it should look at cc_prev_status. */
2786 NOTICE_UPDATE_CC (body
, insn
);
2789 current_output_insn
= debug_insn
= insn
;
2791 /* Find the proper template for this insn. */
2792 templ
= get_insn_template (insn_code_number
, insn
);
2794 /* If the C code returns 0, it means that it is a jump insn
2795 which follows a deleted test insn, and that test insn
2796 needs to be reinserted. */
2801 gcc_assert (prev_nonnote_insn (insn
) == last_ignored_compare
);
2803 /* We have already processed the notes between the setter and
2804 the user. Make sure we don't process them again, this is
2805 particularly important if one of the notes is a block
2806 scope note or an EH note. */
2808 prev
!= last_ignored_compare
;
2809 prev
= PREV_INSN (prev
))
2812 delete_insn (prev
); /* Use delete_note. */
2818 /* If the template is the string "#", it means that this insn must
2820 if (templ
[0] == '#' && templ
[1] == '\0')
2822 rtx new_rtx
= try_split (body
, insn
, 0);
2824 /* If we didn't split the insn, go away. */
2825 if (new_rtx
== insn
&& PATTERN (new_rtx
) == body
)
2826 fatal_insn ("could not split insn", insn
);
2828 /* If we have a length attribute, this instruction should have
2829 been split in shorten_branches, to ensure that we would have
2830 valid length info for the splitees. */
2831 gcc_assert (!HAVE_ATTR_length
);
2836 /* ??? This will put the directives in the wrong place if
2837 get_insn_template outputs assembly directly. However calling it
2838 before get_insn_template breaks if the insns is split. */
2839 if (targetm
.asm_out
.unwind_emit_before_insn
2840 && targetm
.asm_out
.unwind_emit
)
2841 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
2845 rtx x
= call_from_call_insn (insn
);
2847 if (x
&& MEM_P (x
) && GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
2851 t
= SYMBOL_REF_DECL (x
);
2853 assemble_external (t
);
2855 if (!DECL_IGNORED_P (current_function_decl
))
2856 debug_hooks
->var_location (insn
);
2859 /* Output assembler code from the template. */
2860 output_asm_insn (templ
, recog_data
.operand
);
2862 /* Some target machines need to postscan each insn after
2864 if (targetm
.asm_out
.final_postscan_insn
)
2865 targetm
.asm_out
.final_postscan_insn (file
, insn
, recog_data
.operand
,
2866 recog_data
.n_operands
);
2868 if (!targetm
.asm_out
.unwind_emit_before_insn
2869 && targetm
.asm_out
.unwind_emit
)
2870 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
2872 current_output_insn
= debug_insn
= 0;
2875 return NEXT_INSN (insn
);
2878 /* Return whether a source line note needs to be emitted before INSN.
2879 Sets IS_STMT to TRUE if the line should be marked as a possible
2880 breakpoint location. */
2883 notice_source_line (rtx insn
, bool *is_stmt
)
2885 const char *filename
;
2888 if (override_filename
)
2890 filename
= override_filename
;
2891 linenum
= override_linenum
;
2895 filename
= insn_file (insn
);
2896 linenum
= insn_line (insn
);
2899 if (filename
== NULL
)
2902 if (force_source_line
2903 || filename
!= last_filename
2904 || last_linenum
!= linenum
)
2906 force_source_line
= false;
2907 last_filename
= filename
;
2908 last_linenum
= linenum
;
2909 last_discriminator
= discriminator
;
2911 high_block_linenum
= MAX (last_linenum
, high_block_linenum
);
2912 high_function_linenum
= MAX (last_linenum
, high_function_linenum
);
2916 if (SUPPORTS_DISCRIMINATOR
&& last_discriminator
!= discriminator
)
2918 /* If the discriminator changed, but the line number did not,
2919 output the line table entry with is_stmt false so the
2920 debugger does not treat this as a breakpoint location. */
2921 last_discriminator
= discriminator
;
2929 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2930 directly to the desired hard register. */
2933 cleanup_subreg_operands (rtx insn
)
2936 bool changed
= false;
2937 extract_insn_cached (insn
);
2938 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2940 /* The following test cannot use recog_data.operand when testing
2941 for a SUBREG: the underlying object might have been changed
2942 already if we are inside a match_operator expression that
2943 matches the else clause. Instead we test the underlying
2944 expression directly. */
2945 if (GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
2947 recog_data
.operand
[i
] = alter_subreg (recog_data
.operand_loc
[i
], true);
2950 else if (GET_CODE (recog_data
.operand
[i
]) == PLUS
2951 || GET_CODE (recog_data
.operand
[i
]) == MULT
2952 || MEM_P (recog_data
.operand
[i
]))
2953 recog_data
.operand
[i
] = walk_alter_subreg (recog_data
.operand_loc
[i
], &changed
);
2956 for (i
= 0; i
< recog_data
.n_dups
; i
++)
2958 if (GET_CODE (*recog_data
.dup_loc
[i
]) == SUBREG
)
2960 *recog_data
.dup_loc
[i
] = alter_subreg (recog_data
.dup_loc
[i
], true);
2963 else if (GET_CODE (*recog_data
.dup_loc
[i
]) == PLUS
2964 || GET_CODE (*recog_data
.dup_loc
[i
]) == MULT
2965 || MEM_P (*recog_data
.dup_loc
[i
]))
2966 *recog_data
.dup_loc
[i
] = walk_alter_subreg (recog_data
.dup_loc
[i
], &changed
);
2969 df_insn_rescan (insn
);
2972 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
2973 the thing it is a subreg of. Do it anyway if FINAL_P. */
2976 alter_subreg (rtx
*xp
, bool final_p
)
2979 rtx y
= SUBREG_REG (x
);
2981 /* simplify_subreg does not remove subreg from volatile references.
2982 We are required to. */
2985 int offset
= SUBREG_BYTE (x
);
2987 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2988 contains 0 instead of the proper offset. See simplify_subreg. */
2990 && GET_MODE_SIZE (GET_MODE (y
)) < GET_MODE_SIZE (GET_MODE (x
)))
2992 int difference
= GET_MODE_SIZE (GET_MODE (y
))
2993 - GET_MODE_SIZE (GET_MODE (x
));
2994 if (WORDS_BIG_ENDIAN
)
2995 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
2996 if (BYTES_BIG_ENDIAN
)
2997 offset
+= difference
% UNITS_PER_WORD
;
3001 *xp
= adjust_address (y
, GET_MODE (x
), offset
);
3003 *xp
= adjust_address_nv (y
, GET_MODE (x
), offset
);
3007 rtx new_rtx
= simplify_subreg (GET_MODE (x
), y
, GET_MODE (y
),
3012 else if (final_p
&& REG_P (y
))
3014 /* Simplify_subreg can't handle some REG cases, but we have to. */
3016 HOST_WIDE_INT offset
;
3018 regno
= subreg_regno (x
);
3019 if (subreg_lowpart_p (x
))
3020 offset
= byte_lowpart_offset (GET_MODE (x
), GET_MODE (y
));
3022 offset
= SUBREG_BYTE (x
);
3023 *xp
= gen_rtx_REG_offset (y
, GET_MODE (x
), regno
, offset
);
3030 /* Do alter_subreg on all the SUBREGs contained in X. */
3033 walk_alter_subreg (rtx
*xp
, bool *changed
)
3036 switch (GET_CODE (x
))
3041 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
3042 XEXP (x
, 1) = walk_alter_subreg (&XEXP (x
, 1), changed
);
3047 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
3052 return alter_subreg (xp
, true);
3063 /* Given BODY, the body of a jump instruction, alter the jump condition
3064 as required by the bits that are set in cc_status.flags.
3065 Not all of the bits there can be handled at this level in all cases.
3067 The value is normally 0.
3068 1 means that the condition has become always true.
3069 -1 means that the condition has become always false.
3070 2 means that COND has been altered. */
3073 alter_cond (rtx cond
)
3077 if (cc_status
.flags
& CC_REVERSED
)
3080 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
3083 if (cc_status
.flags
& CC_INVERTED
)
3086 PUT_CODE (cond
, reverse_condition (GET_CODE (cond
)));
3089 if (cc_status
.flags
& CC_NOT_POSITIVE
)
3090 switch (GET_CODE (cond
))
3095 /* Jump becomes unconditional. */
3101 /* Jump becomes no-op. */
3105 PUT_CODE (cond
, EQ
);
3110 PUT_CODE (cond
, NE
);
3118 if (cc_status
.flags
& CC_NOT_NEGATIVE
)
3119 switch (GET_CODE (cond
))
3123 /* Jump becomes unconditional. */
3128 /* Jump becomes no-op. */
3133 PUT_CODE (cond
, EQ
);
3139 PUT_CODE (cond
, NE
);
3147 if (cc_status
.flags
& CC_NO_OVERFLOW
)
3148 switch (GET_CODE (cond
))
3151 /* Jump becomes unconditional. */
3155 PUT_CODE (cond
, EQ
);
3160 PUT_CODE (cond
, NE
);
3165 /* Jump becomes no-op. */
3172 if (cc_status
.flags
& (CC_Z_IN_NOT_N
| CC_Z_IN_N
))
3173 switch (GET_CODE (cond
))
3179 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? GE
: LT
);
3184 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? LT
: GE
);
3189 if (cc_status
.flags
& CC_NOT_SIGNED
)
3190 /* The flags are valid if signed condition operators are converted
3192 switch (GET_CODE (cond
))
3195 PUT_CODE (cond
, LEU
);
3200 PUT_CODE (cond
, LTU
);
3205 PUT_CODE (cond
, GTU
);
3210 PUT_CODE (cond
, GEU
);
3222 /* Report inconsistency between the assembler template and the operands.
3223 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3226 output_operand_lossage (const char *cmsgid
, ...)
3230 const char *pfx_str
;
3233 va_start (ap
, cmsgid
);
3235 pfx_str
= this_is_asm_operands
? _("invalid 'asm': ") : "output_operand: ";
3236 asprintf (&fmt_string
, "%s%s", pfx_str
, _(cmsgid
));
3237 vasprintf (&new_message
, fmt_string
, ap
);
3239 if (this_is_asm_operands
)
3240 error_for_asm (this_is_asm_operands
, "%s", new_message
);
3242 internal_error ("%s", new_message
);
3249 /* Output of assembler code from a template, and its subroutines. */
3251 /* Annotate the assembly with a comment describing the pattern and
3252 alternative used. */
3255 output_asm_name (void)
3259 int num
= INSN_CODE (debug_insn
);
3260 fprintf (asm_out_file
, "\t%s %d\t%s",
3261 ASM_COMMENT_START
, INSN_UID (debug_insn
),
3262 insn_data
[num
].name
);
3263 if (insn_data
[num
].n_alternatives
> 1)
3264 fprintf (asm_out_file
, "/%d", which_alternative
+ 1);
3266 if (HAVE_ATTR_length
)
3267 fprintf (asm_out_file
, "\t[length = %d]",
3268 get_attr_length (debug_insn
));
3270 /* Clear this so only the first assembler insn
3271 of any rtl insn will get the special comment for -dp. */
3276 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3277 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3278 corresponds to the address of the object and 0 if to the object. */
3281 get_mem_expr_from_op (rtx op
, int *paddressp
)
3289 return REG_EXPR (op
);
3290 else if (!MEM_P (op
))
3293 if (MEM_EXPR (op
) != 0)
3294 return MEM_EXPR (op
);
3296 /* Otherwise we have an address, so indicate it and look at the address. */
3300 /* First check if we have a decl for the address, then look at the right side
3301 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3302 But don't allow the address to itself be indirect. */
3303 if ((expr
= get_mem_expr_from_op (op
, &inner_addressp
)) && ! inner_addressp
)
3305 else if (GET_CODE (op
) == PLUS
3306 && (expr
= get_mem_expr_from_op (XEXP (op
, 1), &inner_addressp
)))
3310 || GET_RTX_CLASS (GET_CODE (op
)) == RTX_BIN_ARITH
)
3313 expr
= get_mem_expr_from_op (op
, &inner_addressp
);
3314 return inner_addressp
? 0 : expr
;
3317 /* Output operand names for assembler instructions. OPERANDS is the
3318 operand vector, OPORDER is the order to write the operands, and NOPS
3319 is the number of operands to write. */
3322 output_asm_operand_names (rtx
*operands
, int *oporder
, int nops
)
3327 for (i
= 0; i
< nops
; i
++)
3330 rtx op
= operands
[oporder
[i
]];
3331 tree expr
= get_mem_expr_from_op (op
, &addressp
);
3333 fprintf (asm_out_file
, "%c%s",
3334 wrote
? ',' : '\t', wrote
? "" : ASM_COMMENT_START
);
3338 fprintf (asm_out_file
, "%s",
3339 addressp
? "*" : "");
3340 print_mem_expr (asm_out_file
, expr
);
3343 else if (REG_P (op
) && ORIGINAL_REGNO (op
)
3344 && ORIGINAL_REGNO (op
) != REGNO (op
))
3345 fprintf (asm_out_file
, " tmp%i", ORIGINAL_REGNO (op
));
3349 #ifdef ASSEMBLER_DIALECT
3350 /* Helper function to parse assembler dialects in the asm string.
3351 This is called from output_asm_insn and asm_fprintf. */
3353 do_assembler_dialects (const char *p
, int *dialect
)
3364 output_operand_lossage ("nested assembly dialect alternatives");
3368 /* If we want the first dialect, do nothing. Otherwise, skip
3369 DIALECT_NUMBER of strings ending with '|'. */
3370 for (i
= 0; i
< dialect_number
; i
++)
3372 while (*p
&& *p
!= '}' && *p
++ != '|')
3379 output_operand_lossage ("unterminated assembly dialect alternative");
3386 /* Skip to close brace. */
3391 output_operand_lossage ("unterminated assembly dialect alternative");
3395 while (*p
++ != '}');
3399 putc (c
, asm_out_file
);
3404 putc (c
, asm_out_file
);
3415 /* Output text from TEMPLATE to the assembler output file,
3416 obeying %-directions to substitute operands taken from
3417 the vector OPERANDS.
3419 %N (for N a digit) means print operand N in usual manner.
3420 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3421 and print the label name with no punctuation.
3422 %cN means require operand N to be a constant
3423 and print the constant expression with no punctuation.
3424 %aN means expect operand N to be a memory address
3425 (not a memory reference!) and print a reference
3427 %nN means expect operand N to be a constant
3428 and print a constant expression for minus the value
3429 of the operand, with no other punctuation. */
3432 output_asm_insn (const char *templ
, rtx
*operands
)
3436 #ifdef ASSEMBLER_DIALECT
3439 int oporder
[MAX_RECOG_OPERANDS
];
3440 char opoutput
[MAX_RECOG_OPERANDS
];
3443 /* An insn may return a null string template
3444 in a case where no assembler code is needed. */
3448 memset (opoutput
, 0, sizeof opoutput
);
3450 putc ('\t', asm_out_file
);
3452 #ifdef ASM_OUTPUT_OPCODE
3453 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3460 if (flag_verbose_asm
)
3461 output_asm_operand_names (operands
, oporder
, ops
);
3462 if (flag_print_asm_name
)
3466 memset (opoutput
, 0, sizeof opoutput
);
3468 putc (c
, asm_out_file
);
3469 #ifdef ASM_OUTPUT_OPCODE
3470 while ((c
= *p
) == '\t')
3472 putc (c
, asm_out_file
);
3475 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3479 #ifdef ASSEMBLER_DIALECT
3483 p
= do_assembler_dialects (p
, &dialect
);
3488 /* %% outputs a single %. */
3492 putc (c
, asm_out_file
);
3494 /* %= outputs a number which is unique to each insn in the entire
3495 compilation. This is useful for making local labels that are
3496 referred to more than once in a given insn. */
3500 fprintf (asm_out_file
, "%d", insn_counter
);
3502 /* % followed by a letter and some digits
3503 outputs an operand in a special way depending on the letter.
3504 Letters `acln' are implemented directly.
3505 Other letters are passed to `output_operand' so that
3506 the TARGET_PRINT_OPERAND hook can define them. */
3507 else if (ISALPHA (*p
))
3510 unsigned long opnum
;
3513 opnum
= strtoul (p
, &endptr
, 10);
3516 output_operand_lossage ("operand number missing "
3518 else if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3519 output_operand_lossage ("operand number out of range");
3520 else if (letter
== 'l')
3521 output_asm_label (operands
[opnum
]);
3522 else if (letter
== 'a')
3523 output_address (operands
[opnum
]);
3524 else if (letter
== 'c')
3526 if (CONSTANT_ADDRESS_P (operands
[opnum
]))
3527 output_addr_const (asm_out_file
, operands
[opnum
]);
3529 output_operand (operands
[opnum
], 'c');
3531 else if (letter
== 'n')
3533 if (CONST_INT_P (operands
[opnum
]))
3534 fprintf (asm_out_file
, HOST_WIDE_INT_PRINT_DEC
,
3535 - INTVAL (operands
[opnum
]));
3538 putc ('-', asm_out_file
);
3539 output_addr_const (asm_out_file
, operands
[opnum
]);
3543 output_operand (operands
[opnum
], letter
);
3545 if (!opoutput
[opnum
])
3546 oporder
[ops
++] = opnum
;
3547 opoutput
[opnum
] = 1;
3552 /* % followed by a digit outputs an operand the default way. */
3553 else if (ISDIGIT (*p
))
3555 unsigned long opnum
;
3558 opnum
= strtoul (p
, &endptr
, 10);
3559 if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3560 output_operand_lossage ("operand number out of range");
3562 output_operand (operands
[opnum
], 0);
3564 if (!opoutput
[opnum
])
3565 oporder
[ops
++] = opnum
;
3566 opoutput
[opnum
] = 1;
3571 /* % followed by punctuation: output something for that
3572 punctuation character alone, with no operand. The
3573 TARGET_PRINT_OPERAND hook decides what is actually done. */
3574 else if (targetm
.asm_out
.print_operand_punct_valid_p ((unsigned char) *p
))
3575 output_operand (NULL_RTX
, *p
++);
3577 output_operand_lossage ("invalid %%-code");
3581 putc (c
, asm_out_file
);
3584 /* Write out the variable names for operands, if we know them. */
3585 if (flag_verbose_asm
)
3586 output_asm_operand_names (operands
, oporder
, ops
);
3587 if (flag_print_asm_name
)
3590 putc ('\n', asm_out_file
);
3593 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3596 output_asm_label (rtx x
)
3600 if (GET_CODE (x
) == LABEL_REF
)
3604 && NOTE_KIND (x
) == NOTE_INSN_DELETED_LABEL
))
3605 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3607 output_operand_lossage ("'%%l' operand isn't a label");
3609 assemble_name (asm_out_file
, buf
);
3612 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3613 output_operand. Marks SYMBOL_REFs as referenced through use of
3614 assemble_external. */
3617 mark_symbol_ref_as_used (rtx
*xp
, void *dummy ATTRIBUTE_UNUSED
)
3621 /* If we have a used symbol, we may have to emit assembly
3622 annotations corresponding to whether the symbol is external, weak
3623 or has non-default visibility. */
3624 if (GET_CODE (x
) == SYMBOL_REF
)
3628 t
= SYMBOL_REF_DECL (x
);
3630 assemble_external (t
);
3638 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3641 mark_symbol_refs_as_used (rtx x
)
3643 for_each_rtx (&x
, mark_symbol_ref_as_used
, NULL
);
3646 /* Print operand X using machine-dependent assembler syntax.
3647 CODE is a non-digit that preceded the operand-number in the % spec,
3648 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3649 between the % and the digits.
3650 When CODE is a non-letter, X is 0.
3652 The meanings of the letters are machine-dependent and controlled
3653 by TARGET_PRINT_OPERAND. */
3656 output_operand (rtx x
, int code ATTRIBUTE_UNUSED
)
3658 if (x
&& GET_CODE (x
) == SUBREG
)
3659 x
= alter_subreg (&x
, true);
3661 /* X must not be a pseudo reg. */
3662 gcc_assert (!x
|| !REG_P (x
) || REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3664 targetm
.asm_out
.print_operand (asm_out_file
, x
, code
);
3669 for_each_rtx (&x
, mark_symbol_ref_as_used
, NULL
);
3672 /* Print a memory reference operand for address X using
3673 machine-dependent assembler syntax. */
3676 output_address (rtx x
)
3678 bool changed
= false;
3679 walk_alter_subreg (&x
, &changed
);
3680 targetm
.asm_out
.print_operand_address (asm_out_file
, x
);
3683 /* Print an integer constant expression in assembler syntax.
3684 Addition and subtraction are the only arithmetic
3685 that may appear in these expressions. */
3688 output_addr_const (FILE *file
, rtx x
)
3693 switch (GET_CODE (x
))
3700 if (SYMBOL_REF_DECL (x
))
3701 assemble_external (SYMBOL_REF_DECL (x
));
3702 #ifdef ASM_OUTPUT_SYMBOL_REF
3703 ASM_OUTPUT_SYMBOL_REF (file
, x
);
3705 assemble_name (file
, XSTR (x
, 0));
3713 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3714 #ifdef ASM_OUTPUT_LABEL_REF
3715 ASM_OUTPUT_LABEL_REF (file
, buf
);
3717 assemble_name (file
, buf
);
3722 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3726 /* This used to output parentheses around the expression,
3727 but that does not work on the 386 (either ATT or BSD assembler). */
3728 output_addr_const (file
, XEXP (x
, 0));
3732 if (GET_MODE (x
) == VOIDmode
)
3734 /* We can use %d if the number is one word and positive. */
3735 if (CONST_DOUBLE_HIGH (x
))
3736 fprintf (file
, HOST_WIDE_INT_PRINT_DOUBLE_HEX
,
3737 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (x
),
3738 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
));
3739 else if (CONST_DOUBLE_LOW (x
) < 0)
3740 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
3741 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
));
3743 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3746 /* We can't handle floating point constants;
3747 PRINT_OPERAND must handle them. */
3748 output_operand_lossage ("floating constant misused");
3752 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_FIXED_VALUE_LOW (x
));
3756 /* Some assemblers need integer constants to appear last (eg masm). */
3757 if (CONST_INT_P (XEXP (x
, 0)))
3759 output_addr_const (file
, XEXP (x
, 1));
3760 if (INTVAL (XEXP (x
, 0)) >= 0)
3761 fprintf (file
, "+");
3762 output_addr_const (file
, XEXP (x
, 0));
3766 output_addr_const (file
, XEXP (x
, 0));
3767 if (!CONST_INT_P (XEXP (x
, 1))
3768 || INTVAL (XEXP (x
, 1)) >= 0)
3769 fprintf (file
, "+");
3770 output_addr_const (file
, XEXP (x
, 1));
3775 /* Avoid outputting things like x-x or x+5-x,
3776 since some assemblers can't handle that. */
3777 x
= simplify_subtraction (x
);
3778 if (GET_CODE (x
) != MINUS
)
3781 output_addr_const (file
, XEXP (x
, 0));
3782 fprintf (file
, "-");
3783 if ((CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) >= 0)
3784 || GET_CODE (XEXP (x
, 1)) == PC
3785 || GET_CODE (XEXP (x
, 1)) == SYMBOL_REF
)
3786 output_addr_const (file
, XEXP (x
, 1));
3789 fputs (targetm
.asm_out
.open_paren
, file
);
3790 output_addr_const (file
, XEXP (x
, 1));
3791 fputs (targetm
.asm_out
.close_paren
, file
);
3799 output_addr_const (file
, XEXP (x
, 0));
3803 if (targetm
.asm_out
.output_addr_const_extra (file
, x
))
3806 output_operand_lossage ("invalid expression as operand");
3810 /* Output a quoted string. */
3813 output_quoted_string (FILE *asm_file
, const char *string
)
3815 #ifdef OUTPUT_QUOTED_STRING
3816 OUTPUT_QUOTED_STRING (asm_file
, string
);
3820 putc ('\"', asm_file
);
3821 while ((c
= *string
++) != 0)
3825 if (c
== '\"' || c
== '\\')
3826 putc ('\\', asm_file
);
3830 fprintf (asm_file
, "\\%03o", (unsigned char) c
);
3832 putc ('\"', asm_file
);
3836 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3839 fprint_whex (FILE *f
, unsigned HOST_WIDE_INT value
)
3841 char buf
[2 + CHAR_BIT
* sizeof (value
) / 4];
3846 char *p
= buf
+ sizeof (buf
);
3848 *--p
= "0123456789abcdef"[value
% 16];
3849 while ((value
/= 16) != 0);
3852 fwrite (p
, 1, buf
+ sizeof (buf
) - p
, f
);
3856 /* Internal function that prints an unsigned long in decimal in reverse.
3857 The output string IS NOT null-terminated. */
3860 sprint_ul_rev (char *s
, unsigned long value
)
3865 s
[i
] = "0123456789"[value
% 10];
3868 /* alternate version, without modulo */
3869 /* oldval = value; */
3871 /* s[i] = "0123456789" [oldval - 10*value]; */
3878 /* Write an unsigned long as decimal to a file, fast. */
3881 fprint_ul (FILE *f
, unsigned long value
)
3883 /* python says: len(str(2**64)) == 20 */
3887 i
= sprint_ul_rev (s
, value
);
3889 /* It's probably too small to bother with string reversal and fputs. */
3898 /* Write an unsigned long as decimal to a string, fast.
3899 s must be wide enough to not overflow, at least 21 chars.
3900 Returns the length of the string (without terminating '\0'). */
3903 sprint_ul (char *s
, unsigned long value
)
3910 len
= sprint_ul_rev (s
, value
);
3913 /* Reverse the string. */
3927 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3928 %R prints the value of REGISTER_PREFIX.
3929 %L prints the value of LOCAL_LABEL_PREFIX.
3930 %U prints the value of USER_LABEL_PREFIX.
3931 %I prints the value of IMMEDIATE_PREFIX.
3932 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3933 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3935 We handle alternate assembler dialects here, just like output_asm_insn. */
3938 asm_fprintf (FILE *file
, const char *p
, ...)
3942 #ifdef ASSEMBLER_DIALECT
3947 va_start (argptr
, p
);
3954 #ifdef ASSEMBLER_DIALECT
3958 p
= do_assembler_dialects (p
, &dialect
);
3965 while (strchr ("-+ #0", c
))
3970 while (ISDIGIT (c
) || c
== '.')
3981 case 'd': case 'i': case 'u':
3982 case 'x': case 'X': case 'o':
3986 fprintf (file
, buf
, va_arg (argptr
, int));
3990 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3991 'o' cases, but we do not check for those cases. It
3992 means that the value is a HOST_WIDE_INT, which may be
3993 either `long' or `long long'. */
3994 memcpy (q
, HOST_WIDE_INT_PRINT
, strlen (HOST_WIDE_INT_PRINT
));
3995 q
+= strlen (HOST_WIDE_INT_PRINT
);
3998 fprintf (file
, buf
, va_arg (argptr
, HOST_WIDE_INT
));
4003 #ifdef HAVE_LONG_LONG
4009 fprintf (file
, buf
, va_arg (argptr
, long long));
4016 fprintf (file
, buf
, va_arg (argptr
, long));
4024 fprintf (file
, buf
, va_arg (argptr
, char *));
4028 #ifdef ASM_OUTPUT_OPCODE
4029 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
4034 #ifdef REGISTER_PREFIX
4035 fprintf (file
, "%s", REGISTER_PREFIX
);
4040 #ifdef IMMEDIATE_PREFIX
4041 fprintf (file
, "%s", IMMEDIATE_PREFIX
);
4046 #ifdef LOCAL_LABEL_PREFIX
4047 fprintf (file
, "%s", LOCAL_LABEL_PREFIX
);
4052 fputs (user_label_prefix
, file
);
4055 #ifdef ASM_FPRINTF_EXTENSIONS
4056 /* Uppercase letters are reserved for general use by asm_fprintf
4057 and so are not available to target specific code. In order to
4058 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4059 they are defined here. As they get turned into real extensions
4060 to asm_fprintf they should be removed from this list. */
4061 case 'A': case 'B': case 'C': case 'D': case 'E':
4062 case 'F': case 'G': case 'H': case 'J': case 'K':
4063 case 'M': case 'N': case 'P': case 'Q': case 'S':
4064 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4067 ASM_FPRINTF_EXTENSIONS (file
, argptr
, p
)
4080 /* Return nonzero if this function has no function calls. */
4083 leaf_function_p (void)
4088 if (crtl
->profile
|| profile_arc_flag
)
4091 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4094 && ! SIBLING_CALL_P (insn
))
4096 if (NONJUMP_INSN_P (insn
)
4097 && GET_CODE (PATTERN (insn
)) == SEQUENCE
4098 && CALL_P (XVECEXP (PATTERN (insn
), 0, 0))
4099 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
4102 for (link
= crtl
->epilogue_delay_list
;
4104 link
= XEXP (link
, 1))
4106 insn
= XEXP (link
, 0);
4109 && ! SIBLING_CALL_P (insn
))
4111 if (NONJUMP_INSN_P (insn
)
4112 && GET_CODE (PATTERN (insn
)) == SEQUENCE
4113 && CALL_P (XVECEXP (PATTERN (insn
), 0, 0))
4114 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
4121 /* Return 1 if branch is a forward branch.
4122 Uses insn_shuid array, so it works only in the final pass. May be used by
4123 output templates to customary add branch prediction hints.
4126 final_forward_branch_p (rtx insn
)
4128 int insn_id
, label_id
;
4130 gcc_assert (uid_shuid
);
4131 insn_id
= INSN_SHUID (insn
);
4132 label_id
= INSN_SHUID (JUMP_LABEL (insn
));
4133 /* We've hit some insns that does not have id information available. */
4134 gcc_assert (insn_id
&& label_id
);
4135 return insn_id
< label_id
;
4138 /* On some machines, a function with no call insns
4139 can run faster if it doesn't create its own register window.
4140 When output, the leaf function should use only the "output"
4141 registers. Ordinarily, the function would be compiled to use
4142 the "input" registers to find its arguments; it is a candidate
4143 for leaf treatment if it uses only the "input" registers.
4144 Leaf function treatment means renumbering so the function
4145 uses the "output" registers instead. */
4147 #ifdef LEAF_REGISTERS
4149 /* Return 1 if this function uses only the registers that can be
4150 safely renumbered. */
4153 only_leaf_regs_used (void)
4156 const char *const permitted_reg_in_leaf_functions
= LEAF_REGISTERS
;
4158 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4159 if ((df_regs_ever_live_p (i
) || global_regs
[i
])
4160 && ! permitted_reg_in_leaf_functions
[i
])
4163 if (crtl
->uses_pic_offset_table
4164 && pic_offset_table_rtx
!= 0
4165 && REG_P (pic_offset_table_rtx
)
4166 && ! permitted_reg_in_leaf_functions
[REGNO (pic_offset_table_rtx
)])
4172 /* Scan all instructions and renumber all registers into those
4173 available in leaf functions. */
4176 leaf_renumber_regs (rtx first
)
4180 /* Renumber only the actual patterns.
4181 The reg-notes can contain frame pointer refs,
4182 and renumbering them could crash, and should not be needed. */
4183 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4185 leaf_renumber_regs_insn (PATTERN (insn
));
4186 for (insn
= crtl
->epilogue_delay_list
;
4188 insn
= XEXP (insn
, 1))
4189 if (INSN_P (XEXP (insn
, 0)))
4190 leaf_renumber_regs_insn (PATTERN (XEXP (insn
, 0)));
4193 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4194 available in leaf functions. */
4197 leaf_renumber_regs_insn (rtx in_rtx
)
4200 const char *format_ptr
;
4205 /* Renumber all input-registers into output-registers.
4206 renumbered_regs would be 1 for an output-register;
4213 /* Don't renumber the same reg twice. */
4217 newreg
= REGNO (in_rtx
);
4218 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4219 to reach here as part of a REG_NOTE. */
4220 if (newreg
>= FIRST_PSEUDO_REGISTER
)
4225 newreg
= LEAF_REG_REMAP (newreg
);
4226 gcc_assert (newreg
>= 0);
4227 df_set_regs_ever_live (REGNO (in_rtx
), false);
4228 df_set_regs_ever_live (newreg
, true);
4229 SET_REGNO (in_rtx
, newreg
);
4233 if (INSN_P (in_rtx
))
4235 /* Inside a SEQUENCE, we find insns.
4236 Renumber just the patterns of these insns,
4237 just as we do for the top-level insns. */
4238 leaf_renumber_regs_insn (PATTERN (in_rtx
));
4242 format_ptr
= GET_RTX_FORMAT (GET_CODE (in_rtx
));
4244 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (in_rtx
)); i
++)
4245 switch (*format_ptr
++)
4248 leaf_renumber_regs_insn (XEXP (in_rtx
, i
));
4252 if (NULL
!= XVEC (in_rtx
, i
))
4254 for (j
= 0; j
< XVECLEN (in_rtx
, i
); j
++)
4255 leaf_renumber_regs_insn (XVECEXP (in_rtx
, i
, j
));
4274 /* Turn the RTL into assembly. */
4276 rest_of_handle_final (void)
4281 /* Get the function's name, as described by its RTL. This may be
4282 different from the DECL_NAME name used in the source file. */
4284 x
= DECL_RTL (current_function_decl
);
4285 gcc_assert (MEM_P (x
));
4287 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
4288 fnname
= XSTR (x
, 0);
4290 assemble_start_function (current_function_decl
, fnname
);
4291 final_start_function (get_insns (), asm_out_file
, optimize
);
4292 final (get_insns (), asm_out_file
, optimize
);
4293 final_end_function ();
4295 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4296 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4297 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4298 output_function_exception_table (fnname
);
4300 assemble_end_function (current_function_decl
, fnname
);
4302 user_defined_section_attribute
= false;
4304 /* Free up reg info memory. */
4308 fflush (asm_out_file
);
4310 /* Write DBX symbols if requested. */
4312 /* Note that for those inline functions where we don't initially
4313 know for certain that we will be generating an out-of-line copy,
4314 the first invocation of this routine (rest_of_compilation) will
4315 skip over this code by doing a `goto exit_rest_of_compilation;'.
4316 Later on, wrapup_global_declarations will (indirectly) call
4317 rest_of_compilation again for those inline functions that need
4318 to have out-of-line copies generated. During that call, we
4319 *will* be routed past here. */
4321 timevar_push (TV_SYMOUT
);
4322 if (!DECL_IGNORED_P (current_function_decl
))
4323 debug_hooks
->function_decl (current_function_decl
);
4324 timevar_pop (TV_SYMOUT
);
4326 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4327 DECL_INITIAL (current_function_decl
) = error_mark_node
;
4329 if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
4330 && targetm
.have_ctors_dtors
)
4331 targetm
.asm_out
.constructor (XEXP (DECL_RTL (current_function_decl
), 0),
4332 decl_init_priority_lookup
4333 (current_function_decl
));
4334 if (DECL_STATIC_DESTRUCTOR (current_function_decl
)
4335 && targetm
.have_ctors_dtors
)
4336 targetm
.asm_out
.destructor (XEXP (DECL_RTL (current_function_decl
), 0),
4337 decl_fini_priority_lookup
4338 (current_function_decl
));
4342 struct rtl_opt_pass pass_final
=
4347 OPTGROUP_NONE
, /* optinfo_flags */
4349 rest_of_handle_final
, /* execute */
4352 0, /* static_pass_number */
4353 TV_FINAL
, /* tv_id */
4354 0, /* properties_required */
4355 0, /* properties_provided */
4356 0, /* properties_destroyed */
4357 0, /* todo_flags_start */
4358 TODO_ggc_collect
/* todo_flags_finish */
4364 rest_of_handle_shorten_branches (void)
4366 /* Shorten branches. */
4367 shorten_branches (get_insns ());
4371 struct rtl_opt_pass pass_shorten_branches
=
4375 "shorten", /* name */
4376 OPTGROUP_NONE
, /* optinfo_flags */
4378 rest_of_handle_shorten_branches
, /* execute */
4381 0, /* static_pass_number */
4382 TV_SHORTEN_BRANCH
, /* tv_id */
4383 0, /* properties_required */
4384 0, /* properties_provided */
4385 0, /* properties_destroyed */
4386 0, /* todo_flags_start */
4387 0 /* todo_flags_finish */
4393 rest_of_clean_state (void)
4396 FILE *final_output
= NULL
;
4397 int save_unnumbered
= flag_dump_unnumbered
;
4398 int save_noaddr
= flag_dump_noaddr
;
4400 if (flag_dump_final_insns
)
4402 final_output
= fopen (flag_dump_final_insns
, "a");
4405 error ("could not open final insn dump file %qs: %m",
4406 flag_dump_final_insns
);
4407 flag_dump_final_insns
= NULL
;
4411 flag_dump_noaddr
= flag_dump_unnumbered
= 1;
4412 if (flag_compare_debug_opt
|| flag_compare_debug
)
4413 dump_flags
|= TDF_NOUID
;
4414 dump_function_header (final_output
, current_function_decl
,
4416 final_insns_dump_p
= true;
4418 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4420 INSN_UID (insn
) = CODE_LABEL_NUMBER (insn
);
4424 set_block_for_insn (insn
, NULL
);
4425 INSN_UID (insn
) = 0;
4430 /* It is very important to decompose the RTL instruction chain here:
4431 debug information keeps pointing into CODE_LABEL insns inside the function
4432 body. If these remain pointing to the other insns, we end up preserving
4433 whole RTL chain and attached detailed debug info in memory. */
4434 for (insn
= get_insns (); insn
; insn
= next
)
4436 next
= NEXT_INSN (insn
);
4437 NEXT_INSN (insn
) = NULL
;
4438 PREV_INSN (insn
) = NULL
;
4441 && (!NOTE_P (insn
) ||
4442 (NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
4443 && NOTE_KIND (insn
) != NOTE_INSN_CALL_ARG_LOCATION
4444 && NOTE_KIND (insn
) != NOTE_INSN_BLOCK_BEG
4445 && NOTE_KIND (insn
) != NOTE_INSN_BLOCK_END
4446 && NOTE_KIND (insn
) != NOTE_INSN_DELETED_DEBUG_LABEL
)))
4447 print_rtl_single (final_output
, insn
);
4452 flag_dump_noaddr
= save_noaddr
;
4453 flag_dump_unnumbered
= save_unnumbered
;
4454 final_insns_dump_p
= false;
4456 if (fclose (final_output
))
4458 error ("could not close final insn dump file %qs: %m",
4459 flag_dump_final_insns
);
4460 flag_dump_final_insns
= NULL
;
4464 /* In case the function was not output,
4465 don't leave any temporary anonymous types
4466 queued up for sdb output. */
4467 #ifdef SDB_DEBUGGING_INFO
4468 if (write_symbols
== SDB_DEBUG
)
4469 sdbout_types (NULL_TREE
);
4472 flag_rerun_cse_after_global_opts
= 0;
4473 reload_completed
= 0;
4474 epilogue_completed
= 0;
4476 regstack_completed
= 0;
4479 /* Clear out the insn_length contents now that they are no
4481 init_insn_lengths ();
4483 /* Show no temporary slots allocated. */
4486 free_bb_for_insn ();
4490 /* We can reduce stack alignment on call site only when we are sure that
4491 the function body just produced will be actually used in the final
4493 if (decl_binds_to_current_def_p (current_function_decl
))
4495 unsigned int pref
= crtl
->preferred_stack_boundary
;
4496 if (crtl
->stack_alignment_needed
> crtl
->preferred_stack_boundary
)
4497 pref
= crtl
->stack_alignment_needed
;
4498 cgraph_rtl_info (current_function_decl
)->preferred_incoming_stack_boundary
4502 /* Make sure volatile mem refs aren't considered valid operands for
4503 arithmetic insns. We must call this here if this is a nested inline
4504 function, since the above code leaves us in the init_recog state,
4505 and the function context push/pop code does not save/restore volatile_ok.
4507 ??? Maybe it isn't necessary for expand_start_function to call this
4508 anymore if we do it here? */
4510 init_recog_no_volatile ();
4512 /* We're done with this function. Free up memory if we can. */
4513 free_after_parsing (cfun
);
4514 free_after_compilation (cfun
);
4518 struct rtl_opt_pass pass_clean_state
=
4522 "*clean_state", /* name */
4523 OPTGROUP_NONE
, /* optinfo_flags */
4525 rest_of_clean_state
, /* execute */
4528 0, /* static_pass_number */
4529 TV_FINAL
, /* tv_id */
4530 0, /* properties_required */
4531 0, /* properties_provided */
4532 PROP_rtl
, /* properties_destroyed */
4533 0, /* todo_flags_start */
4534 0 /* todo_flags_finish */