1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
47 #include "coretypes.h"
55 #include "insn-config.h"
60 #include "tree-pretty-print.h" /* for dump_function_header */
62 #include "insn-attr.h"
63 #include "conditions.h"
67 #include "rtl-error.h"
68 #include "toplev.h" /* exact_log2, floor_log2 */
73 #include "tree-pass.h"
79 #include "print-rtl.h"
81 #ifdef XCOFF_DEBUGGING_INFO
82 #include "xcoffout.h" /* Needed for external data declarations. */
85 #include "dwarf2out.h"
87 #ifdef DBX_DEBUGGING_INFO
93 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
94 So define a null default for it to save conditionalization later. */
95 #ifndef CC_STATUS_INIT
96 #define CC_STATUS_INIT
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
108 /* Bitflags used by final_scan_insn. */
110 #define SEEN_EMITTED 2
112 /* Last insn processed by final_scan_insn. */
113 static rtx_insn
*debug_insn
;
114 rtx_insn
*current_output_insn
;
116 /* Line number of last NOTE. */
117 static int last_linenum
;
119 /* Last discriminator written to assembly. */
120 static int last_discriminator
;
122 /* Discriminator of current block. */
123 static int discriminator
;
125 /* Highest line number in current block. */
126 static int high_block_linenum
;
128 /* Likewise for function. */
129 static int high_function_linenum
;
131 /* Filename of last NOTE. */
132 static const char *last_filename
;
134 /* Override filename and line number. */
135 static const char *override_filename
;
136 static int override_linenum
;
138 /* Whether to force emission of a line note before the next insn. */
139 static bool force_source_line
= false;
141 extern const int length_unit_log
; /* This is defined in insn-attrtab.c. */
143 /* Nonzero while outputting an `asm' with operands.
144 This means that inconsistencies are the user's fault, so don't die.
145 The precise value is the insn being output, to pass to error_for_asm. */
146 const rtx_insn
*this_is_asm_operands
;
148 /* Number of operands of this insn, for an `asm' with operands. */
149 static unsigned int insn_noperands
;
151 /* Compare optimization flag. */
153 static rtx last_ignored_compare
= 0;
155 /* Assign a unique number to each insn that is output.
156 This can be used to generate unique local labels. */
158 static int insn_counter
= 0;
160 /* This variable contains machine-dependent flags (defined in tm.h)
161 set and examined by output routines
162 that describe how to interpret the condition codes properly. */
166 /* During output of an insn, this contains a copy of cc_status
167 from before the insn. */
169 CC_STATUS cc_prev_status
;
171 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
173 static int block_depth
;
175 /* Nonzero if have enabled APP processing of our assembler output. */
179 /* If we are outputting an insn sequence, this contains the sequence rtx.
182 rtx_sequence
*final_sequence
;
184 #ifdef ASSEMBLER_DIALECT
186 /* Number of the assembler dialect to use, starting at 0. */
187 static int dialect_number
;
190 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
191 rtx current_insn_predicate
;
193 /* True if printing into -fdump-final-insns= dump. */
194 bool final_insns_dump_p
;
196 /* True if profile_function should be called, but hasn't been called yet. */
197 static bool need_profile_function
;
199 static int asm_insn_count (rtx
);
200 static void profile_function (FILE *);
201 static void profile_after_prologue (FILE *);
202 static bool notice_source_line (rtx_insn
*, bool *);
203 static rtx
walk_alter_subreg (rtx
*, bool *);
204 static void output_asm_name (void);
205 static void output_alternate_entry_point (FILE *, rtx_insn
*);
206 static tree
get_mem_expr_from_op (rtx
, int *);
207 static void output_asm_operand_names (rtx
*, int *, int);
208 #ifdef LEAF_REGISTERS
209 static void leaf_renumber_regs (rtx_insn
*);
212 static int alter_cond (rtx
);
214 #ifndef ADDR_VEC_ALIGN
215 static int final_addr_vec_align (rtx
);
217 static int align_fuzz (rtx
, rtx
, int, unsigned);
218 static void collect_fn_hard_reg_usage (void);
219 static tree
get_call_fndecl (rtx_insn
*);
221 /* Initialize data in final at the beginning of a compilation. */
224 init_final (const char *filename ATTRIBUTE_UNUSED
)
229 #ifdef ASSEMBLER_DIALECT
230 dialect_number
= ASSEMBLER_DIALECT
;
234 /* Default target function prologue and epilogue assembler output.
236 If not overridden for epilogue code, then the function body itself
237 contains return instructions wherever needed. */
239 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED
,
240 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
245 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED
,
246 tree decl ATTRIBUTE_UNUSED
,
247 bool new_is_cold ATTRIBUTE_UNUSED
)
251 /* Default target hook that outputs nothing to a stream. */
253 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED
)
257 /* Enable APP processing of subsequent output.
258 Used before the output from an `asm' statement. */
265 fputs (ASM_APP_ON
, asm_out_file
);
270 /* Disable APP processing of subsequent output.
271 Called from varasm.c before most kinds of output. */
278 fputs (ASM_APP_OFF
, asm_out_file
);
283 /* Return the number of slots filled in the current
284 delayed branch sequence (we don't count the insn needing the
285 delay slot). Zero if not in a delayed branch sequence. */
288 dbr_sequence_length (void)
290 if (final_sequence
!= 0)
291 return XVECLEN (final_sequence
, 0) - 1;
296 /* The next two pages contain routines used to compute the length of an insn
297 and to shorten branches. */
299 /* Arrays for insn lengths, and addresses. The latter is referenced by
300 `insn_current_length'. */
302 static int *insn_lengths
;
304 vec
<int> insn_addresses_
;
306 /* Max uid for which the above arrays are valid. */
307 static int insn_lengths_max_uid
;
309 /* Address of insn being processed. Used by `insn_current_length'. */
310 int insn_current_address
;
312 /* Address of insn being processed in previous iteration. */
313 int insn_last_address
;
315 /* known invariant alignment of insn being processed. */
316 int insn_current_align
;
318 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
319 gives the next following alignment insn that increases the known
320 alignment, or NULL_RTX if there is no such insn.
321 For any alignment obtained this way, we can again index uid_align with
322 its uid to obtain the next following align that in turn increases the
323 alignment, till we reach NULL_RTX; the sequence obtained this way
324 for each insn we'll call the alignment chain of this insn in the following
327 struct label_alignment
333 static rtx
*uid_align
;
334 static int *uid_shuid
;
335 static struct label_alignment
*label_align
;
337 /* Indicate that branch shortening hasn't yet been done. */
340 init_insn_lengths (void)
351 insn_lengths_max_uid
= 0;
353 if (HAVE_ATTR_length
)
354 INSN_ADDRESSES_FREE ();
362 /* Obtain the current length of an insn. If branch shortening has been done,
363 get its actual length. Otherwise, use FALLBACK_FN to calculate the
366 get_attr_length_1 (rtx_insn
*insn
, int (*fallback_fn
) (rtx_insn
*))
372 if (!HAVE_ATTR_length
)
375 if (insn_lengths_max_uid
> INSN_UID (insn
))
376 return insn_lengths
[INSN_UID (insn
)];
378 switch (GET_CODE (insn
))
388 length
= fallback_fn (insn
);
392 body
= PATTERN (insn
);
393 if (GET_CODE (body
) == USE
|| GET_CODE (body
) == CLOBBER
)
396 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
397 length
= asm_insn_count (body
) * fallback_fn (insn
);
398 else if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (body
))
399 for (i
= 0; i
< seq
->len (); i
++)
400 length
+= get_attr_length_1 (seq
->insn (i
), fallback_fn
);
402 length
= fallback_fn (insn
);
409 #ifdef ADJUST_INSN_LENGTH
410 ADJUST_INSN_LENGTH (insn
, length
);
415 /* Obtain the current length of an insn. If branch shortening has been done,
416 get its actual length. Otherwise, get its maximum length. */
418 get_attr_length (rtx_insn
*insn
)
420 return get_attr_length_1 (insn
, insn_default_length
);
423 /* Obtain the current length of an insn. If branch shortening has been done,
424 get its actual length. Otherwise, get its minimum length. */
426 get_attr_min_length (rtx_insn
*insn
)
428 return get_attr_length_1 (insn
, insn_min_length
);
431 /* Code to handle alignment inside shorten_branches. */
433 /* Here is an explanation how the algorithm in align_fuzz can give
436 Call a sequence of instructions beginning with alignment point X
437 and continuing until the next alignment point `block X'. When `X'
438 is used in an expression, it means the alignment value of the
441 Call the distance between the start of the first insn of block X, and
442 the end of the last insn of block X `IX', for the `inner size of X'.
443 This is clearly the sum of the instruction lengths.
445 Likewise with the next alignment-delimited block following X, which we
448 Call the distance between the start of the first insn of block X, and
449 the start of the first insn of block Y `OX', for the `outer size of X'.
451 The estimated padding is then OX - IX.
453 OX can be safely estimated as
458 OX = round_up(IX, X) + Y - X
460 Clearly est(IX) >= real(IX), because that only depends on the
461 instruction lengths, and those being overestimated is a given.
463 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
464 we needn't worry about that when thinking about OX.
466 When X >= Y, the alignment provided by Y adds no uncertainty factor
467 for branch ranges starting before X, so we can just round what we have.
468 But when X < Y, we don't know anything about the, so to speak,
469 `middle bits', so we have to assume the worst when aligning up from an
470 address mod X to one mod Y, which is Y - X. */
473 #define LABEL_ALIGN(LABEL) align_labels_log
477 #define LOOP_ALIGN(LABEL) align_loops_log
480 #ifndef LABEL_ALIGN_AFTER_BARRIER
481 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
485 #define JUMP_ALIGN(LABEL) align_jumps_log
489 default_label_align_after_barrier_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
495 default_loop_align_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
497 return align_loops_max_skip
;
501 default_label_align_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
503 return align_labels_max_skip
;
507 default_jump_align_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
509 return align_jumps_max_skip
;
512 #ifndef ADDR_VEC_ALIGN
514 final_addr_vec_align (rtx addr_vec
)
516 int align
= GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec
)));
518 if (align
> BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
519 align
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
520 return exact_log2 (align
);
524 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
527 #ifndef INSN_LENGTH_ALIGNMENT
528 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
531 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
533 static int min_labelno
, max_labelno
;
535 #define LABEL_TO_ALIGNMENT(LABEL) \
536 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
538 #define LABEL_TO_MAX_SKIP(LABEL) \
539 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
541 /* For the benefit of port specific code do this also as a function. */
544 label_to_alignment (rtx label
)
546 if (CODE_LABEL_NUMBER (label
) <= max_labelno
)
547 return LABEL_TO_ALIGNMENT (label
);
552 label_to_max_skip (rtx label
)
554 if (CODE_LABEL_NUMBER (label
) <= max_labelno
)
555 return LABEL_TO_MAX_SKIP (label
);
559 /* The differences in addresses
560 between a branch and its target might grow or shrink depending on
561 the alignment the start insn of the range (the branch for a forward
562 branch or the label for a backward branch) starts out on; if these
563 differences are used naively, they can even oscillate infinitely.
564 We therefore want to compute a 'worst case' address difference that
565 is independent of the alignment the start insn of the range end
566 up on, and that is at least as large as the actual difference.
567 The function align_fuzz calculates the amount we have to add to the
568 naively computed difference, by traversing the part of the alignment
569 chain of the start insn of the range that is in front of the end insn
570 of the range, and considering for each alignment the maximum amount
571 that it might contribute to a size increase.
573 For casesi tables, we also want to know worst case minimum amounts of
574 address difference, in case a machine description wants to introduce
575 some common offset that is added to all offsets in a table.
576 For this purpose, align_fuzz with a growth argument of 0 computes the
577 appropriate adjustment. */
579 /* Compute the maximum delta by which the difference of the addresses of
580 START and END might grow / shrink due to a different address for start
581 which changes the size of alignment insns between START and END.
582 KNOWN_ALIGN_LOG is the alignment known for START.
583 GROWTH should be ~0 if the objective is to compute potential code size
584 increase, and 0 if the objective is to compute potential shrink.
585 The return value is undefined for any other value of GROWTH. */
588 align_fuzz (rtx start
, rtx end
, int known_align_log
, unsigned int growth
)
590 int uid
= INSN_UID (start
);
592 int known_align
= 1 << known_align_log
;
593 int end_shuid
= INSN_SHUID (end
);
596 for (align_label
= uid_align
[uid
]; align_label
; align_label
= uid_align
[uid
])
598 int align_addr
, new_align
;
600 uid
= INSN_UID (align_label
);
601 align_addr
= INSN_ADDRESSES (uid
) - insn_lengths
[uid
];
602 if (uid_shuid
[uid
] > end_shuid
)
604 known_align_log
= LABEL_TO_ALIGNMENT (align_label
);
605 new_align
= 1 << known_align_log
;
606 if (new_align
< known_align
)
608 fuzz
+= (-align_addr
^ growth
) & (new_align
- known_align
);
609 known_align
= new_align
;
614 /* Compute a worst-case reference address of a branch so that it
615 can be safely used in the presence of aligned labels. Since the
616 size of the branch itself is unknown, the size of the branch is
617 not included in the range. I.e. for a forward branch, the reference
618 address is the end address of the branch as known from the previous
619 branch shortening pass, minus a value to account for possible size
620 increase due to alignment. For a backward branch, it is the start
621 address of the branch as known from the current pass, plus a value
622 to account for possible size increase due to alignment.
623 NB.: Therefore, the maximum offset allowed for backward branches needs
624 to exclude the branch size. */
627 insn_current_reference_address (rtx_insn
*branch
)
632 if (! INSN_ADDRESSES_SET_P ())
635 rtx_insn
*seq
= NEXT_INSN (PREV_INSN (branch
));
636 seq_uid
= INSN_UID (seq
);
637 if (!JUMP_P (branch
))
638 /* This can happen for example on the PA; the objective is to know the
639 offset to address something in front of the start of the function.
640 Thus, we can treat it like a backward branch.
641 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
642 any alignment we'd encounter, so we skip the call to align_fuzz. */
643 return insn_current_address
;
644 dest
= JUMP_LABEL (branch
);
646 /* BRANCH has no proper alignment chain set, so use SEQ.
647 BRANCH also has no INSN_SHUID. */
648 if (INSN_SHUID (seq
) < INSN_SHUID (dest
))
650 /* Forward branch. */
651 return (insn_last_address
+ insn_lengths
[seq_uid
]
652 - align_fuzz (seq
, dest
, length_unit_log
, ~0));
656 /* Backward branch. */
657 return (insn_current_address
658 + align_fuzz (dest
, seq
, length_unit_log
, ~0));
662 /* Compute branch alignments based on frequency information in the
666 compute_alignments (void)
668 int log
, max_skip
, max_log
;
671 int freq_threshold
= 0;
679 max_labelno
= max_label_num ();
680 min_labelno
= get_first_label_num ();
681 label_align
= XCNEWVEC (struct label_alignment
, max_labelno
- min_labelno
+ 1);
683 /* If not optimizing or optimizing for size, don't assign any alignments. */
684 if (! optimize
|| optimize_function_for_size_p (cfun
))
689 dump_reg_info (dump_file
);
690 dump_flow_info (dump_file
, TDF_DETAILS
);
691 flow_loops_dump (dump_file
, NULL
, 1);
693 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
694 FOR_EACH_BB_FN (bb
, cfun
)
695 if (bb
->frequency
> freq_max
)
696 freq_max
= bb
->frequency
;
697 freq_threshold
= freq_max
/ PARAM_VALUE (PARAM_ALIGN_THRESHOLD
);
700 fprintf (dump_file
, "freq_max: %i\n",freq_max
);
701 FOR_EACH_BB_FN (bb
, cfun
)
703 rtx_insn
*label
= BB_HEAD (bb
);
704 int fallthru_frequency
= 0, branch_frequency
= 0, has_fallthru
= 0;
709 || optimize_bb_for_size_p (bb
))
713 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
714 bb
->index
, bb
->frequency
, bb
->loop_father
->num
,
718 max_log
= LABEL_ALIGN (label
);
719 max_skip
= targetm
.asm_out
.label_align_max_skip (label
);
721 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
723 if (e
->flags
& EDGE_FALLTHRU
)
724 has_fallthru
= 1, fallthru_frequency
+= EDGE_FREQUENCY (e
);
726 branch_frequency
+= EDGE_FREQUENCY (e
);
730 fprintf (dump_file
, "BB %4i freq %4i loop %2i loop_depth"
731 " %2i fall %4i branch %4i",
732 bb
->index
, bb
->frequency
, bb
->loop_father
->num
,
734 fallthru_frequency
, branch_frequency
);
735 if (!bb
->loop_father
->inner
&& bb
->loop_father
->num
)
736 fprintf (dump_file
, " inner_loop");
737 if (bb
->loop_father
->header
== bb
)
738 fprintf (dump_file
, " loop_header");
739 fprintf (dump_file
, "\n");
742 /* There are two purposes to align block with no fallthru incoming edge:
743 1) to avoid fetch stalls when branch destination is near cache boundary
744 2) to improve cache efficiency in case the previous block is not executed
745 (so it does not need to be in the cache).
747 We to catch first case, we align frequently executed blocks.
748 To catch the second, we align blocks that are executed more frequently
749 than the predecessor and the predecessor is likely to not be executed
750 when function is called. */
753 && (branch_frequency
> freq_threshold
754 || (bb
->frequency
> bb
->prev_bb
->frequency
* 10
755 && (bb
->prev_bb
->frequency
756 <= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
/ 2))))
758 log
= JUMP_ALIGN (label
);
760 fprintf (dump_file
, " jump alignment added.\n");
764 max_skip
= targetm
.asm_out
.jump_align_max_skip (label
);
767 /* In case block is frequent and reached mostly by non-fallthru edge,
768 align it. It is most likely a first block of loop. */
770 && !(single_succ_p (bb
)
771 && single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
))
772 && optimize_bb_for_speed_p (bb
)
773 && branch_frequency
+ fallthru_frequency
> freq_threshold
775 > fallthru_frequency
* PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS
)))
777 log
= LOOP_ALIGN (label
);
779 fprintf (dump_file
, " internal loop alignment added.\n");
783 max_skip
= targetm
.asm_out
.loop_align_max_skip (label
);
786 LABEL_TO_ALIGNMENT (label
) = max_log
;
787 LABEL_TO_MAX_SKIP (label
) = max_skip
;
790 loop_optimizer_finalize ();
791 free_dominance_info (CDI_DOMINATORS
);
795 /* Grow the LABEL_ALIGN array after new labels are created. */
798 grow_label_align (void)
800 int old
= max_labelno
;
804 max_labelno
= max_label_num ();
806 n_labels
= max_labelno
- min_labelno
+ 1;
807 n_old_labels
= old
- min_labelno
+ 1;
809 label_align
= XRESIZEVEC (struct label_alignment
, label_align
, n_labels
);
811 /* Range of labels grows monotonically in the function. Failing here
812 means that the initialization of array got lost. */
813 gcc_assert (n_old_labels
<= n_labels
);
815 memset (label_align
+ n_old_labels
, 0,
816 (n_labels
- n_old_labels
) * sizeof (struct label_alignment
));
819 /* Update the already computed alignment information. LABEL_PAIRS is a vector
820 made up of pairs of labels for which the alignment information of the first
821 element will be copied from that of the second element. */
824 update_alignments (vec
<rtx
> &label_pairs
)
827 rtx iter
, label
= NULL_RTX
;
829 if (max_labelno
!= max_label_num ())
832 FOR_EACH_VEC_ELT (label_pairs
, i
, iter
)
835 LABEL_TO_ALIGNMENT (label
) = LABEL_TO_ALIGNMENT (iter
);
836 LABEL_TO_MAX_SKIP (label
) = LABEL_TO_MAX_SKIP (iter
);
844 const pass_data pass_data_compute_alignments
=
847 "alignments", /* name */
848 OPTGROUP_NONE
, /* optinfo_flags */
850 0, /* properties_required */
851 0, /* properties_provided */
852 0, /* properties_destroyed */
853 0, /* todo_flags_start */
854 0, /* todo_flags_finish */
857 class pass_compute_alignments
: public rtl_opt_pass
860 pass_compute_alignments (gcc::context
*ctxt
)
861 : rtl_opt_pass (pass_data_compute_alignments
, ctxt
)
864 /* opt_pass methods: */
865 virtual unsigned int execute (function
*) { return compute_alignments (); }
867 }; // class pass_compute_alignments
872 make_pass_compute_alignments (gcc::context
*ctxt
)
874 return new pass_compute_alignments (ctxt
);
878 /* Make a pass over all insns and compute their actual lengths by shortening
879 any branches of variable length if possible. */
881 /* shorten_branches might be called multiple times: for example, the SH
882 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
883 In order to do this, it needs proper length information, which it obtains
884 by calling shorten_branches. This cannot be collapsed with
885 shorten_branches itself into a single pass unless we also want to integrate
886 reorg.c, since the branch splitting exposes new instructions with delay
890 shorten_branches (rtx_insn
*first
)
897 #define MAX_CODE_ALIGN 16
899 int something_changed
= 1;
900 char *varying_length
;
903 rtx align_tab
[MAX_CODE_ALIGN
];
905 /* Compute maximum UID and allocate label_align / uid_shuid. */
906 max_uid
= get_max_uid ();
908 /* Free uid_shuid before reallocating it. */
911 uid_shuid
= XNEWVEC (int, max_uid
);
913 if (max_labelno
!= max_label_num ())
916 /* Initialize label_align and set up uid_shuid to be strictly
917 monotonically rising with insn order. */
918 /* We use max_log here to keep track of the maximum alignment we want to
919 impose on the next CODE_LABEL (or the current one if we are processing
920 the CODE_LABEL itself). */
925 for (insn
= get_insns (), i
= 1; insn
; insn
= NEXT_INSN (insn
))
929 INSN_SHUID (insn
) = i
++;
936 bool next_is_jumptable
;
938 /* Merge in alignments computed by compute_alignments. */
939 log
= LABEL_TO_ALIGNMENT (insn
);
943 max_skip
= LABEL_TO_MAX_SKIP (insn
);
946 next
= next_nonnote_insn (insn
);
947 next_is_jumptable
= next
&& JUMP_TABLE_DATA_P (next
);
948 if (!next_is_jumptable
)
950 log
= LABEL_ALIGN (insn
);
954 max_skip
= targetm
.asm_out
.label_align_max_skip (insn
);
957 /* ADDR_VECs only take room if read-only data goes into the text
959 if ((JUMP_TABLES_IN_TEXT_SECTION
960 || readonly_data_section
== text_section
)
961 && next_is_jumptable
)
963 log
= ADDR_VEC_ALIGN (next
);
967 max_skip
= targetm
.asm_out
.label_align_max_skip (insn
);
970 LABEL_TO_ALIGNMENT (insn
) = max_log
;
971 LABEL_TO_MAX_SKIP (insn
) = max_skip
;
975 else if (BARRIER_P (insn
))
979 for (label
= insn
; label
&& ! INSN_P (label
);
980 label
= NEXT_INSN (label
))
983 log
= LABEL_ALIGN_AFTER_BARRIER (insn
);
987 max_skip
= targetm
.asm_out
.label_align_after_barrier_max_skip (label
);
993 if (!HAVE_ATTR_length
)
996 /* Allocate the rest of the arrays. */
997 insn_lengths
= XNEWVEC (int, max_uid
);
998 insn_lengths_max_uid
= max_uid
;
999 /* Syntax errors can lead to labels being outside of the main insn stream.
1000 Initialize insn_addresses, so that we get reproducible results. */
1001 INSN_ADDRESSES_ALLOC (max_uid
);
1003 varying_length
= XCNEWVEC (char, max_uid
);
1005 /* Initialize uid_align. We scan instructions
1006 from end to start, and keep in align_tab[n] the last seen insn
1007 that does an alignment of at least n+1, i.e. the successor
1008 in the alignment chain for an insn that does / has a known
1010 uid_align
= XCNEWVEC (rtx
, max_uid
);
1012 for (i
= MAX_CODE_ALIGN
; --i
>= 0;)
1013 align_tab
[i
] = NULL_RTX
;
1014 seq
= get_last_insn ();
1015 for (; seq
; seq
= PREV_INSN (seq
))
1017 int uid
= INSN_UID (seq
);
1019 log
= (LABEL_P (seq
) ? LABEL_TO_ALIGNMENT (seq
) : 0);
1020 uid_align
[uid
] = align_tab
[0];
1023 /* Found an alignment label. */
1024 uid_align
[uid
] = align_tab
[log
];
1025 for (i
= log
- 1; i
>= 0; i
--)
1030 /* When optimizing, we start assuming minimum length, and keep increasing
1031 lengths as we find the need for this, till nothing changes.
1032 When not optimizing, we start assuming maximum lengths, and
1033 do a single pass to update the lengths. */
1034 bool increasing
= optimize
!= 0;
1036 #ifdef CASE_VECTOR_SHORTEN_MODE
1039 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1042 int min_shuid
= INSN_SHUID (get_insns ()) - 1;
1043 int max_shuid
= INSN_SHUID (get_last_insn ()) + 1;
1046 for (insn
= first
; insn
!= 0; insn
= NEXT_INSN (insn
))
1048 rtx min_lab
= NULL_RTX
, max_lab
= NULL_RTX
, pat
;
1049 int len
, i
, min
, max
, insn_shuid
;
1051 addr_diff_vec_flags flags
;
1053 if (! JUMP_TABLE_DATA_P (insn
)
1054 || GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
1056 pat
= PATTERN (insn
);
1057 len
= XVECLEN (pat
, 1);
1058 gcc_assert (len
> 0);
1059 min_align
= MAX_CODE_ALIGN
;
1060 for (min
= max_shuid
, max
= min_shuid
, i
= len
- 1; i
>= 0; i
--)
1062 rtx lab
= XEXP (XVECEXP (pat
, 1, i
), 0);
1063 int shuid
= INSN_SHUID (lab
);
1074 if (min_align
> LABEL_TO_ALIGNMENT (lab
))
1075 min_align
= LABEL_TO_ALIGNMENT (lab
);
1077 XEXP (pat
, 2) = gen_rtx_LABEL_REF (Pmode
, min_lab
);
1078 XEXP (pat
, 3) = gen_rtx_LABEL_REF (Pmode
, max_lab
);
1079 insn_shuid
= INSN_SHUID (insn
);
1080 rel
= INSN_SHUID (XEXP (XEXP (pat
, 0), 0));
1081 memset (&flags
, 0, sizeof (flags
));
1082 flags
.min_align
= min_align
;
1083 flags
.base_after_vec
= rel
> insn_shuid
;
1084 flags
.min_after_vec
= min
> insn_shuid
;
1085 flags
.max_after_vec
= max
> insn_shuid
;
1086 flags
.min_after_base
= min
> rel
;
1087 flags
.max_after_base
= max
> rel
;
1088 ADDR_DIFF_VEC_FLAGS (pat
) = flags
;
1091 PUT_MODE (pat
, CASE_VECTOR_SHORTEN_MODE (0, 0, pat
));
1094 #endif /* CASE_VECTOR_SHORTEN_MODE */
1096 /* Compute initial lengths, addresses, and varying flags for each insn. */
1097 int (*length_fun
) (rtx_insn
*) = increasing
? insn_min_length
: insn_default_length
;
1099 for (insn_current_address
= 0, insn
= first
;
1101 insn_current_address
+= insn_lengths
[uid
], insn
= NEXT_INSN (insn
))
1103 uid
= INSN_UID (insn
);
1105 insn_lengths
[uid
] = 0;
1109 int log
= LABEL_TO_ALIGNMENT (insn
);
1112 int align
= 1 << log
;
1113 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1114 insn_lengths
[uid
] = new_address
- insn_current_address
;
1118 INSN_ADDRESSES (uid
) = insn_current_address
+ insn_lengths
[uid
];
1120 if (NOTE_P (insn
) || BARRIER_P (insn
)
1121 || LABEL_P (insn
) || DEBUG_INSN_P (insn
))
1123 if (insn
->deleted ())
1126 body
= PATTERN (insn
);
1127 if (JUMP_TABLE_DATA_P (insn
))
1129 /* This only takes room if read-only data goes into the text
1131 if (JUMP_TABLES_IN_TEXT_SECTION
1132 || readonly_data_section
== text_section
)
1133 insn_lengths
[uid
] = (XVECLEN (body
,
1134 GET_CODE (body
) == ADDR_DIFF_VEC
)
1135 * GET_MODE_SIZE (GET_MODE (body
)));
1136 /* Alignment is handled by ADDR_VEC_ALIGN. */
1138 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
1139 insn_lengths
[uid
] = asm_insn_count (body
) * insn_default_length (insn
);
1140 else if (rtx_sequence
*body_seq
= dyn_cast
<rtx_sequence
*> (body
))
1143 int const_delay_slots
;
1145 const_delay_slots
= const_num_delay_slots (body_seq
->insn (0));
1147 const_delay_slots
= 0;
1149 int (*inner_length_fun
) (rtx_insn
*)
1150 = const_delay_slots
? length_fun
: insn_default_length
;
1151 /* Inside a delay slot sequence, we do not do any branch shortening
1152 if the shortening could change the number of delay slots
1154 for (i
= 0; i
< body_seq
->len (); i
++)
1156 rtx_insn
*inner_insn
= body_seq
->insn (i
);
1157 int inner_uid
= INSN_UID (inner_insn
);
1160 if (GET_CODE (body
) == ASM_INPUT
1161 || asm_noperands (PATTERN (inner_insn
)) >= 0)
1162 inner_length
= (asm_insn_count (PATTERN (inner_insn
))
1163 * insn_default_length (inner_insn
));
1165 inner_length
= inner_length_fun (inner_insn
);
1167 insn_lengths
[inner_uid
] = inner_length
;
1168 if (const_delay_slots
)
1170 if ((varying_length
[inner_uid
]
1171 = insn_variable_length_p (inner_insn
)) != 0)
1172 varying_length
[uid
] = 1;
1173 INSN_ADDRESSES (inner_uid
) = (insn_current_address
1174 + insn_lengths
[uid
]);
1177 varying_length
[inner_uid
] = 0;
1178 insn_lengths
[uid
] += inner_length
;
1181 else if (GET_CODE (body
) != USE
&& GET_CODE (body
) != CLOBBER
)
1183 insn_lengths
[uid
] = length_fun (insn
);
1184 varying_length
[uid
] = insn_variable_length_p (insn
);
1187 /* If needed, do any adjustment. */
1188 #ifdef ADJUST_INSN_LENGTH
1189 ADJUST_INSN_LENGTH (insn
, insn_lengths
[uid
]);
1190 if (insn_lengths
[uid
] < 0)
1191 fatal_insn ("negative insn length", insn
);
1195 /* Now loop over all the insns finding varying length insns. For each,
1196 get the current insn length. If it has changed, reflect the change.
1197 When nothing changes for a full pass, we are done. */
1199 while (something_changed
)
1201 something_changed
= 0;
1202 insn_current_align
= MAX_CODE_ALIGN
- 1;
1203 for (insn_current_address
= 0, insn
= first
;
1205 insn
= NEXT_INSN (insn
))
1208 #ifdef ADJUST_INSN_LENGTH
1213 uid
= INSN_UID (insn
);
1217 int log
= LABEL_TO_ALIGNMENT (insn
);
1219 #ifdef CASE_VECTOR_SHORTEN_MODE
1220 /* If the mode of a following jump table was changed, we
1221 may need to update the alignment of this label. */
1223 bool next_is_jumptable
;
1225 next
= next_nonnote_insn (insn
);
1226 next_is_jumptable
= next
&& JUMP_TABLE_DATA_P (next
);
1227 if ((JUMP_TABLES_IN_TEXT_SECTION
1228 || readonly_data_section
== text_section
)
1229 && next_is_jumptable
)
1231 int newlog
= ADDR_VEC_ALIGN (next
);
1235 LABEL_TO_ALIGNMENT (insn
) = log
;
1236 something_changed
= 1;
1241 if (log
> insn_current_align
)
1243 int align
= 1 << log
;
1244 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1245 insn_lengths
[uid
] = new_address
- insn_current_address
;
1246 insn_current_align
= log
;
1247 insn_current_address
= new_address
;
1250 insn_lengths
[uid
] = 0;
1251 INSN_ADDRESSES (uid
) = insn_current_address
;
1255 length_align
= INSN_LENGTH_ALIGNMENT (insn
);
1256 if (length_align
< insn_current_align
)
1257 insn_current_align
= length_align
;
1259 insn_last_address
= INSN_ADDRESSES (uid
);
1260 INSN_ADDRESSES (uid
) = insn_current_address
;
1262 #ifdef CASE_VECTOR_SHORTEN_MODE
1264 && JUMP_TABLE_DATA_P (insn
)
1265 && GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1267 rtx body
= PATTERN (insn
);
1268 int old_length
= insn_lengths
[uid
];
1270 safe_as_a
<rtx_insn
*> (XEXP (XEXP (body
, 0), 0));
1271 rtx min_lab
= XEXP (XEXP (body
, 2), 0);
1272 rtx max_lab
= XEXP (XEXP (body
, 3), 0);
1273 int rel_addr
= INSN_ADDRESSES (INSN_UID (rel_lab
));
1274 int min_addr
= INSN_ADDRESSES (INSN_UID (min_lab
));
1275 int max_addr
= INSN_ADDRESSES (INSN_UID (max_lab
));
1278 addr_diff_vec_flags flags
;
1279 machine_mode vec_mode
;
1281 /* Avoid automatic aggregate initialization. */
1282 flags
= ADDR_DIFF_VEC_FLAGS (body
);
1284 /* Try to find a known alignment for rel_lab. */
1285 for (prev
= rel_lab
;
1287 && ! insn_lengths
[INSN_UID (prev
)]
1288 && ! (varying_length
[INSN_UID (prev
)] & 1);
1289 prev
= PREV_INSN (prev
))
1290 if (varying_length
[INSN_UID (prev
)] & 2)
1292 rel_align
= LABEL_TO_ALIGNMENT (prev
);
1296 /* See the comment on addr_diff_vec_flags in rtl.h for the
1297 meaning of the flags values. base: REL_LAB vec: INSN */
1298 /* Anything after INSN has still addresses from the last
1299 pass; adjust these so that they reflect our current
1300 estimate for this pass. */
1301 if (flags
.base_after_vec
)
1302 rel_addr
+= insn_current_address
- insn_last_address
;
1303 if (flags
.min_after_vec
)
1304 min_addr
+= insn_current_address
- insn_last_address
;
1305 if (flags
.max_after_vec
)
1306 max_addr
+= insn_current_address
- insn_last_address
;
1307 /* We want to know the worst case, i.e. lowest possible value
1308 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1309 its offset is positive, and we have to be wary of code shrink;
1310 otherwise, it is negative, and we have to be vary of code
1312 if (flags
.min_after_base
)
1314 /* If INSN is between REL_LAB and MIN_LAB, the size
1315 changes we are about to make can change the alignment
1316 within the observed offset, therefore we have to break
1317 it up into two parts that are independent. */
1318 if (! flags
.base_after_vec
&& flags
.min_after_vec
)
1320 min_addr
-= align_fuzz (rel_lab
, insn
, rel_align
, 0);
1321 min_addr
-= align_fuzz (insn
, min_lab
, 0, 0);
1324 min_addr
-= align_fuzz (rel_lab
, min_lab
, rel_align
, 0);
1328 if (flags
.base_after_vec
&& ! flags
.min_after_vec
)
1330 min_addr
-= align_fuzz (min_lab
, insn
, 0, ~0);
1331 min_addr
-= align_fuzz (insn
, rel_lab
, 0, ~0);
1334 min_addr
-= align_fuzz (min_lab
, rel_lab
, 0, ~0);
1336 /* Likewise, determine the highest lowest possible value
1337 for the offset of MAX_LAB. */
1338 if (flags
.max_after_base
)
1340 if (! flags
.base_after_vec
&& flags
.max_after_vec
)
1342 max_addr
+= align_fuzz (rel_lab
, insn
, rel_align
, ~0);
1343 max_addr
+= align_fuzz (insn
, max_lab
, 0, ~0);
1346 max_addr
+= align_fuzz (rel_lab
, max_lab
, rel_align
, ~0);
1350 if (flags
.base_after_vec
&& ! flags
.max_after_vec
)
1352 max_addr
+= align_fuzz (max_lab
, insn
, 0, 0);
1353 max_addr
+= align_fuzz (insn
, rel_lab
, 0, 0);
1356 max_addr
+= align_fuzz (max_lab
, rel_lab
, 0, 0);
1358 vec_mode
= CASE_VECTOR_SHORTEN_MODE (min_addr
- rel_addr
,
1359 max_addr
- rel_addr
, body
);
1361 || (GET_MODE_SIZE (vec_mode
)
1362 >= GET_MODE_SIZE (GET_MODE (body
))))
1363 PUT_MODE (body
, vec_mode
);
1364 if (JUMP_TABLES_IN_TEXT_SECTION
1365 || readonly_data_section
== text_section
)
1368 = (XVECLEN (body
, 1) * GET_MODE_SIZE (GET_MODE (body
)));
1369 insn_current_address
+= insn_lengths
[uid
];
1370 if (insn_lengths
[uid
] != old_length
)
1371 something_changed
= 1;
1376 #endif /* CASE_VECTOR_SHORTEN_MODE */
1378 if (! (varying_length
[uid
]))
1380 if (NONJUMP_INSN_P (insn
)
1381 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1385 body
= PATTERN (insn
);
1386 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1388 rtx inner_insn
= XVECEXP (body
, 0, i
);
1389 int inner_uid
= INSN_UID (inner_insn
);
1391 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1393 insn_current_address
+= insn_lengths
[inner_uid
];
1397 insn_current_address
+= insn_lengths
[uid
];
1402 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1404 rtx_sequence
*seqn
= as_a
<rtx_sequence
*> (PATTERN (insn
));
1407 body
= PATTERN (insn
);
1409 for (i
= 0; i
< seqn
->len (); i
++)
1411 rtx_insn
*inner_insn
= seqn
->insn (i
);
1412 int inner_uid
= INSN_UID (inner_insn
);
1415 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1417 /* insn_current_length returns 0 for insns with a
1418 non-varying length. */
1419 if (! varying_length
[inner_uid
])
1420 inner_length
= insn_lengths
[inner_uid
];
1422 inner_length
= insn_current_length (inner_insn
);
1424 if (inner_length
!= insn_lengths
[inner_uid
])
1426 if (!increasing
|| inner_length
> insn_lengths
[inner_uid
])
1428 insn_lengths
[inner_uid
] = inner_length
;
1429 something_changed
= 1;
1432 inner_length
= insn_lengths
[inner_uid
];
1434 insn_current_address
+= inner_length
;
1435 new_length
+= inner_length
;
1440 new_length
= insn_current_length (insn
);
1441 insn_current_address
+= new_length
;
1444 #ifdef ADJUST_INSN_LENGTH
1445 /* If needed, do any adjustment. */
1446 tmp_length
= new_length
;
1447 ADJUST_INSN_LENGTH (insn
, new_length
);
1448 insn_current_address
+= (new_length
- tmp_length
);
1451 if (new_length
!= insn_lengths
[uid
]
1452 && (!increasing
|| new_length
> insn_lengths
[uid
]))
1454 insn_lengths
[uid
] = new_length
;
1455 something_changed
= 1;
1458 insn_current_address
+= insn_lengths
[uid
] - new_length
;
1460 /* For a non-optimizing compile, do only a single pass. */
1465 free (varying_length
);
1468 /* Given the body of an INSN known to be generated by an ASM statement, return
1469 the number of machine instructions likely to be generated for this insn.
1470 This is used to compute its length. */
1473 asm_insn_count (rtx body
)
1477 if (GET_CODE (body
) == ASM_INPUT
)
1478 templ
= XSTR (body
, 0);
1480 templ
= decode_asm_operands (body
, NULL
, NULL
, NULL
, NULL
, NULL
);
1482 return asm_str_count (templ
);
1485 /* Return the number of machine instructions likely to be generated for the
1486 inline-asm template. */
1488 asm_str_count (const char *templ
)
1495 for (; *templ
; templ
++)
1496 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ
, templ
)
1503 /* ??? This is probably the wrong place for these. */
1504 /* Structure recording the mapping from source file and directory
1505 names at compile time to those to be embedded in debug
1507 struct debug_prefix_map
1509 const char *old_prefix
;
1510 const char *new_prefix
;
1513 struct debug_prefix_map
*next
;
1516 /* Linked list of such structures. */
1517 static debug_prefix_map
*debug_prefix_maps
;
1520 /* Record a debug file prefix mapping. ARG is the argument to
1521 -fdebug-prefix-map and must be of the form OLD=NEW. */
1524 add_debug_prefix_map (const char *arg
)
1526 debug_prefix_map
*map
;
1529 p
= strchr (arg
, '=');
1532 error ("invalid argument %qs to -fdebug-prefix-map", arg
);
1535 map
= XNEW (debug_prefix_map
);
1536 map
->old_prefix
= xstrndup (arg
, p
- arg
);
1537 map
->old_len
= p
- arg
;
1539 map
->new_prefix
= xstrdup (p
);
1540 map
->new_len
= strlen (p
);
1541 map
->next
= debug_prefix_maps
;
1542 debug_prefix_maps
= map
;
1545 /* Perform user-specified mapping of debug filename prefixes. Return
1546 the new name corresponding to FILENAME. */
1549 remap_debug_filename (const char *filename
)
1551 debug_prefix_map
*map
;
1556 for (map
= debug_prefix_maps
; map
; map
= map
->next
)
1557 if (filename_ncmp (filename
, map
->old_prefix
, map
->old_len
) == 0)
1561 name
= filename
+ map
->old_len
;
1562 name_len
= strlen (name
) + 1;
1563 s
= (char *) alloca (name_len
+ map
->new_len
);
1564 memcpy (s
, map
->new_prefix
, map
->new_len
);
1565 memcpy (s
+ map
->new_len
, name
, name_len
);
1566 return ggc_strdup (s
);
1569 /* Return true if DWARF2 debug info can be emitted for DECL. */
1572 dwarf2_debug_info_emitted_p (tree decl
)
1574 if (write_symbols
!= DWARF2_DEBUG
&& write_symbols
!= VMS_AND_DWARF2_DEBUG
)
1577 if (DECL_IGNORED_P (decl
))
1583 /* Return scope resulting from combination of S1 and S2. */
1585 choose_inner_scope (tree s1
, tree s2
)
1591 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
1596 /* Emit lexical block notes needed to change scope from S1 to S2. */
1599 change_scope (rtx_insn
*orig_insn
, tree s1
, tree s2
)
1601 rtx_insn
*insn
= orig_insn
;
1602 tree com
= NULL_TREE
;
1603 tree ts1
= s1
, ts2
= s2
;
1608 gcc_assert (ts1
&& ts2
);
1609 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
1610 ts1
= BLOCK_SUPERCONTEXT (ts1
);
1611 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
1612 ts2
= BLOCK_SUPERCONTEXT (ts2
);
1615 ts1
= BLOCK_SUPERCONTEXT (ts1
);
1616 ts2
= BLOCK_SUPERCONTEXT (ts2
);
1625 rtx_note
*note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
1626 NOTE_BLOCK (note
) = s
;
1627 s
= BLOCK_SUPERCONTEXT (s
);
1634 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
1635 NOTE_BLOCK (insn
) = s
;
1636 s
= BLOCK_SUPERCONTEXT (s
);
1640 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1641 on the scope tree and the newly reordered instructions. */
1644 reemit_insn_block_notes (void)
1646 tree cur_block
= DECL_INITIAL (cfun
->decl
);
1650 insn
= get_insns ();
1651 for (; insn
; insn
= NEXT_INSN (insn
))
1655 /* Prevent lexical blocks from straddling section boundaries. */
1656 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
1658 for (tree s
= cur_block
; s
!= DECL_INITIAL (cfun
->decl
);
1659 s
= BLOCK_SUPERCONTEXT (s
))
1661 rtx_note
*note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
1662 NOTE_BLOCK (note
) = s
;
1663 note
= emit_note_after (NOTE_INSN_BLOCK_BEG
, insn
);
1664 NOTE_BLOCK (note
) = s
;
1668 if (!active_insn_p (insn
))
1671 /* Avoid putting scope notes between jump table and its label. */
1672 if (JUMP_TABLE_DATA_P (insn
))
1675 this_block
= insn_scope (insn
);
1676 /* For sequences compute scope resulting from merging all scopes
1677 of instructions nested inside. */
1678 if (rtx_sequence
*body
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
1683 for (i
= 0; i
< body
->len (); i
++)
1684 this_block
= choose_inner_scope (this_block
,
1685 insn_scope (body
->insn (i
)));
1689 if (INSN_LOCATION (insn
) == UNKNOWN_LOCATION
)
1692 this_block
= DECL_INITIAL (cfun
->decl
);
1695 if (this_block
!= cur_block
)
1697 change_scope (insn
, cur_block
, this_block
);
1698 cur_block
= this_block
;
1702 /* change_scope emits before the insn, not after. */
1703 note
= emit_note (NOTE_INSN_DELETED
);
1704 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
1710 static const char *some_local_dynamic_name
;
1712 /* Locate some local-dynamic symbol still in use by this function
1713 so that we can print its name in local-dynamic base patterns.
1714 Return null if there are no local-dynamic references. */
1717 get_some_local_dynamic_name ()
1719 subrtx_iterator::array_type array
;
1722 if (some_local_dynamic_name
)
1723 return some_local_dynamic_name
;
1725 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1726 if (NONDEBUG_INSN_P (insn
))
1727 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
1729 const_rtx x
= *iter
;
1730 if (GET_CODE (x
) == SYMBOL_REF
)
1732 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
1733 return some_local_dynamic_name
= XSTR (x
, 0);
1734 if (CONSTANT_POOL_ADDRESS_P (x
))
1735 iter
.substitute (get_pool_constant (x
));
1742 /* Output assembler code for the start of a function,
1743 and initialize some of the variables in this file
1744 for the new function. The label for the function and associated
1745 assembler pseudo-ops have already been output in `assemble_start_function'.
1747 FIRST is the first insn of the rtl for the function being compiled.
1748 FILE is the file to write assembler code to.
1749 OPTIMIZE_P is nonzero if we should eliminate redundant
1750 test and compare insns. */
1753 final_start_function (rtx_insn
*first
, FILE *file
,
1754 int optimize_p ATTRIBUTE_UNUSED
)
1758 this_is_asm_operands
= 0;
1760 need_profile_function
= false;
1762 last_filename
= LOCATION_FILE (prologue_location
);
1763 last_linenum
= LOCATION_LINE (prologue_location
);
1764 last_discriminator
= discriminator
= 0;
1766 high_block_linenum
= high_function_linenum
= last_linenum
;
1768 if (flag_sanitize
& SANITIZE_ADDRESS
)
1769 asan_function_start ();
1771 if (!DECL_IGNORED_P (current_function_decl
))
1772 debug_hooks
->begin_prologue (last_linenum
, last_filename
);
1774 if (!dwarf2_debug_info_emitted_p (current_function_decl
))
1775 dwarf2out_begin_prologue (0, NULL
);
1777 #ifdef LEAF_REG_REMAP
1778 if (crtl
->uses_only_leaf_regs
)
1779 leaf_renumber_regs (first
);
1782 /* The Sun386i and perhaps other machines don't work right
1783 if the profiling code comes after the prologue. */
1784 if (targetm
.profile_before_prologue () && crtl
->profile
)
1786 if (targetm
.asm_out
.function_prologue
== default_function_pro_epilogue
1787 && targetm
.have_prologue ())
1790 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1796 else if (NOTE_KIND (insn
) == NOTE_INSN_BASIC_BLOCK
1797 || NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
)
1799 else if (NOTE_KIND (insn
) == NOTE_INSN_DELETED
1800 || NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
1809 need_profile_function
= true;
1811 profile_function (file
);
1814 profile_function (file
);
1817 /* If debugging, assign block numbers to all of the blocks in this
1821 reemit_insn_block_notes ();
1822 number_blocks (current_function_decl
);
1823 /* We never actually put out begin/end notes for the top-level
1824 block in the function. But, conceptually, that block is
1826 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl
)) = 1;
1829 if (warn_frame_larger_than
1830 && get_frame_size () > frame_larger_than_size
)
1832 /* Issue a warning */
1833 warning (OPT_Wframe_larger_than_
,
1834 "the frame size of %wd bytes is larger than %wd bytes",
1835 get_frame_size (), frame_larger_than_size
);
1838 /* First output the function prologue: code to set up the stack frame. */
1839 targetm
.asm_out
.function_prologue (file
, get_frame_size ());
1841 /* If the machine represents the prologue as RTL, the profiling code must
1842 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1843 if (! targetm
.have_prologue ())
1844 profile_after_prologue (file
);
1848 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED
)
1850 if (!targetm
.profile_before_prologue () && crtl
->profile
)
1851 profile_function (file
);
1855 profile_function (FILE *file ATTRIBUTE_UNUSED
)
1857 #ifndef NO_PROFILE_COUNTERS
1858 # define NO_PROFILE_COUNTERS 0
1860 #ifdef ASM_OUTPUT_REG_PUSH
1861 rtx sval
= NULL
, chain
= NULL
;
1863 if (cfun
->returns_struct
)
1864 sval
= targetm
.calls
.struct_value_rtx (TREE_TYPE (current_function_decl
),
1866 if (cfun
->static_chain_decl
)
1867 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
1868 #endif /* ASM_OUTPUT_REG_PUSH */
1870 if (! NO_PROFILE_COUNTERS
)
1872 int align
= MIN (BIGGEST_ALIGNMENT
, LONG_TYPE_SIZE
);
1873 switch_to_section (data_section
);
1874 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
1875 targetm
.asm_out
.internal_label (file
, "LP", current_function_funcdef_no
);
1876 assemble_integer (const0_rtx
, LONG_TYPE_SIZE
/ BITS_PER_UNIT
, align
, 1);
1879 switch_to_section (current_function_section ());
1881 #ifdef ASM_OUTPUT_REG_PUSH
1882 if (sval
&& REG_P (sval
))
1883 ASM_OUTPUT_REG_PUSH (file
, REGNO (sval
));
1884 if (chain
&& REG_P (chain
))
1885 ASM_OUTPUT_REG_PUSH (file
, REGNO (chain
));
1888 FUNCTION_PROFILER (file
, current_function_funcdef_no
);
1890 #ifdef ASM_OUTPUT_REG_PUSH
1891 if (chain
&& REG_P (chain
))
1892 ASM_OUTPUT_REG_POP (file
, REGNO (chain
));
1893 if (sval
&& REG_P (sval
))
1894 ASM_OUTPUT_REG_POP (file
, REGNO (sval
));
1898 /* Output assembler code for the end of a function.
1899 For clarity, args are same as those of `final_start_function'
1900 even though not all of them are needed. */
1903 final_end_function (void)
1907 if (!DECL_IGNORED_P (current_function_decl
))
1908 debug_hooks
->end_function (high_function_linenum
);
1910 /* Finally, output the function epilogue:
1911 code to restore the stack frame and return to the caller. */
1912 targetm
.asm_out
.function_epilogue (asm_out_file
, get_frame_size ());
1914 /* And debug output. */
1915 if (!DECL_IGNORED_P (current_function_decl
))
1916 debug_hooks
->end_epilogue (last_linenum
, last_filename
);
1918 if (!dwarf2_debug_info_emitted_p (current_function_decl
)
1919 && dwarf2out_do_frame ())
1920 dwarf2out_end_epilogue (last_linenum
, last_filename
);
1922 some_local_dynamic_name
= 0;
1926 /* Dumper helper for basic block information. FILE is the assembly
1927 output file, and INSN is the instruction being emitted. */
1930 dump_basic_block_info (FILE *file
, rtx_insn
*insn
, basic_block
*start_to_bb
,
1931 basic_block
*end_to_bb
, int bb_map_size
, int *bb_seqn
)
1935 if (!flag_debug_asm
)
1938 if (INSN_UID (insn
) < bb_map_size
1939 && (bb
= start_to_bb
[INSN_UID (insn
)]) != NULL
)
1944 fprintf (file
, "%s BLOCK %d", ASM_COMMENT_START
, bb
->index
);
1946 fprintf (file
, " freq:%d", bb
->frequency
);
1948 fprintf (file
, " count:%" PRId64
,
1950 fprintf (file
, " seq:%d", (*bb_seqn
)++);
1951 fprintf (file
, "\n%s PRED:", ASM_COMMENT_START
);
1952 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1954 dump_edge_info (file
, e
, TDF_DETAILS
, 0);
1956 fprintf (file
, "\n");
1958 if (INSN_UID (insn
) < bb_map_size
1959 && (bb
= end_to_bb
[INSN_UID (insn
)]) != NULL
)
1964 fprintf (asm_out_file
, "%s SUCC:", ASM_COMMENT_START
);
1965 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1967 dump_edge_info (asm_out_file
, e
, TDF_DETAILS
, 1);
1969 fprintf (file
, "\n");
1973 /* Output assembler code for some insns: all or part of a function.
1974 For description of args, see `final_start_function', above. */
1977 final (rtx_insn
*first
, FILE *file
, int optimize_p
)
1979 rtx_insn
*insn
, *next
;
1982 /* Used for -dA dump. */
1983 basic_block
*start_to_bb
= NULL
;
1984 basic_block
*end_to_bb
= NULL
;
1985 int bb_map_size
= 0;
1988 last_ignored_compare
= 0;
1991 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1993 /* If CC tracking across branches is enabled, record the insn which
1994 jumps to each branch only reached from one place. */
1995 if (optimize_p
&& JUMP_P (insn
))
1997 rtx lab
= JUMP_LABEL (insn
);
1998 if (lab
&& LABEL_P (lab
) && LABEL_NUSES (lab
) == 1)
2000 LABEL_REFS (lab
) = insn
;
2013 bb_map_size
= get_max_uid () + 1;
2014 start_to_bb
= XCNEWVEC (basic_block
, bb_map_size
);
2015 end_to_bb
= XCNEWVEC (basic_block
, bb_map_size
);
2017 /* There is no cfg for a thunk. */
2018 if (!cfun
->is_thunk
)
2019 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2021 start_to_bb
[INSN_UID (BB_HEAD (bb
))] = bb
;
2022 end_to_bb
[INSN_UID (BB_END (bb
))] = bb
;
2026 /* Output the insns. */
2027 for (insn
= first
; insn
;)
2029 if (HAVE_ATTR_length
)
2031 if ((unsigned) INSN_UID (insn
) >= INSN_ADDRESSES_SIZE ())
2033 /* This can be triggered by bugs elsewhere in the compiler if
2034 new insns are created after init_insn_lengths is called. */
2035 gcc_assert (NOTE_P (insn
));
2036 insn_current_address
= -1;
2039 insn_current_address
= INSN_ADDRESSES (INSN_UID (insn
));
2042 dump_basic_block_info (file
, insn
, start_to_bb
, end_to_bb
,
2043 bb_map_size
, &bb_seqn
);
2044 insn
= final_scan_insn (insn
, file
, optimize_p
, 0, &seen
);
2053 /* Remove CFI notes, to avoid compare-debug failures. */
2054 for (insn
= first
; insn
; insn
= next
)
2056 next
= NEXT_INSN (insn
);
2058 && (NOTE_KIND (insn
) == NOTE_INSN_CFI
2059 || NOTE_KIND (insn
) == NOTE_INSN_CFI_LABEL
))
2065 get_insn_template (int code
, rtx insn
)
2067 switch (insn_data
[code
].output_format
)
2069 case INSN_OUTPUT_FORMAT_SINGLE
:
2070 return insn_data
[code
].output
.single
;
2071 case INSN_OUTPUT_FORMAT_MULTI
:
2072 return insn_data
[code
].output
.multi
[which_alternative
];
2073 case INSN_OUTPUT_FORMAT_FUNCTION
:
2075 return (*insn_data
[code
].output
.function
) (recog_data
.operand
,
2076 as_a
<rtx_insn
*> (insn
));
2083 /* Emit the appropriate declaration for an alternate-entry-point
2084 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2085 LABEL_KIND != LABEL_NORMAL.
2087 The case fall-through in this function is intentional. */
2089 output_alternate_entry_point (FILE *file
, rtx_insn
*insn
)
2091 const char *name
= LABEL_NAME (insn
);
2093 switch (LABEL_KIND (insn
))
2095 case LABEL_WEAK_ENTRY
:
2096 #ifdef ASM_WEAKEN_LABEL
2097 ASM_WEAKEN_LABEL (file
, name
);
2099 case LABEL_GLOBAL_ENTRY
:
2100 targetm
.asm_out
.globalize_label (file
, name
);
2101 case LABEL_STATIC_ENTRY
:
2102 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2103 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
2105 ASM_OUTPUT_LABEL (file
, name
);
2114 /* Given a CALL_INSN, find and return the nested CALL. */
2116 call_from_call_insn (rtx_call_insn
*insn
)
2119 gcc_assert (CALL_P (insn
));
2122 while (GET_CODE (x
) != CALL
)
2124 switch (GET_CODE (x
))
2129 x
= COND_EXEC_CODE (x
);
2132 x
= XVECEXP (x
, 0, 0);
2142 /* The final scan for one insn, INSN.
2143 Args are same as in `final', except that INSN
2144 is the insn being scanned.
2145 Value returned is the next insn to be scanned.
2147 NOPEEPHOLES is the flag to disallow peephole processing (currently
2148 used for within delayed branch sequence output).
2150 SEEN is used to track the end of the prologue, for emitting
2151 debug information. We force the emission of a line note after
2152 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2155 final_scan_insn (rtx_insn
*insn
, FILE *file
, int optimize_p ATTRIBUTE_UNUSED
,
2156 int nopeepholes ATTRIBUTE_UNUSED
, int *seen
)
2165 /* Ignore deleted insns. These can occur when we split insns (due to a
2166 template of "#") while not optimizing. */
2167 if (insn
->deleted ())
2168 return NEXT_INSN (insn
);
2170 switch (GET_CODE (insn
))
2173 switch (NOTE_KIND (insn
))
2175 case NOTE_INSN_DELETED
:
2176 case NOTE_INSN_UPDATE_SJLJ_CONTEXT
:
2179 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
2180 in_cold_section_p
= !in_cold_section_p
;
2182 if (dwarf2out_do_frame ())
2183 dwarf2out_switch_text_section ();
2184 else if (!DECL_IGNORED_P (current_function_decl
))
2185 debug_hooks
->switch_text_section ();
2187 switch_to_section (current_function_section ());
2188 targetm
.asm_out
.function_switched_text_sections (asm_out_file
,
2189 current_function_decl
,
2191 /* Emit a label for the split cold section. Form label name by
2192 suffixing "cold" to the original function's name. */
2193 if (in_cold_section_p
)
2196 = clone_function_name (current_function_decl
, "cold");
2197 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2198 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file
,
2200 (cold_function_name
),
2201 current_function_decl
);
2203 ASM_OUTPUT_LABEL (asm_out_file
,
2204 IDENTIFIER_POINTER (cold_function_name
));
2209 case NOTE_INSN_BASIC_BLOCK
:
2210 if (need_profile_function
)
2212 profile_function (asm_out_file
);
2213 need_profile_function
= false;
2216 if (targetm
.asm_out
.unwind_emit
)
2217 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
2219 discriminator
= NOTE_BASIC_BLOCK (insn
)->discriminator
;
2223 case NOTE_INSN_EH_REGION_BEG
:
2224 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHB",
2225 NOTE_EH_HANDLER (insn
));
2228 case NOTE_INSN_EH_REGION_END
:
2229 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHE",
2230 NOTE_EH_HANDLER (insn
));
2233 case NOTE_INSN_PROLOGUE_END
:
2234 targetm
.asm_out
.function_end_prologue (file
);
2235 profile_after_prologue (file
);
2237 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
2239 *seen
|= SEEN_EMITTED
;
2240 force_source_line
= true;
2247 case NOTE_INSN_EPILOGUE_BEG
:
2248 if (!DECL_IGNORED_P (current_function_decl
))
2249 (*debug_hooks
->begin_epilogue
) (last_linenum
, last_filename
);
2250 targetm
.asm_out
.function_begin_epilogue (file
);
2254 dwarf2out_emit_cfi (NOTE_CFI (insn
));
2257 case NOTE_INSN_CFI_LABEL
:
2258 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LCFI",
2259 NOTE_LABEL_NUMBER (insn
));
2262 case NOTE_INSN_FUNCTION_BEG
:
2263 if (need_profile_function
)
2265 profile_function (asm_out_file
);
2266 need_profile_function
= false;
2270 if (!DECL_IGNORED_P (current_function_decl
))
2271 debug_hooks
->end_prologue (last_linenum
, last_filename
);
2273 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
2275 *seen
|= SEEN_EMITTED
;
2276 force_source_line
= true;
2283 case NOTE_INSN_BLOCK_BEG
:
2284 if (debug_info_level
== DINFO_LEVEL_NORMAL
2285 || debug_info_level
== DINFO_LEVEL_VERBOSE
2286 || write_symbols
== DWARF2_DEBUG
2287 || write_symbols
== VMS_AND_DWARF2_DEBUG
2288 || write_symbols
== VMS_DEBUG
)
2290 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
2294 high_block_linenum
= last_linenum
;
2296 /* Output debugging info about the symbol-block beginning. */
2297 if (!DECL_IGNORED_P (current_function_decl
))
2298 debug_hooks
->begin_block (last_linenum
, n
);
2300 /* Mark this block as output. */
2301 TREE_ASM_WRITTEN (NOTE_BLOCK (insn
)) = 1;
2303 if (write_symbols
== DBX_DEBUG
2304 || write_symbols
== SDB_DEBUG
)
2306 location_t
*locus_ptr
2307 = block_nonartificial_location (NOTE_BLOCK (insn
));
2309 if (locus_ptr
!= NULL
)
2311 override_filename
= LOCATION_FILE (*locus_ptr
);
2312 override_linenum
= LOCATION_LINE (*locus_ptr
);
2317 case NOTE_INSN_BLOCK_END
:
2318 if (debug_info_level
== DINFO_LEVEL_NORMAL
2319 || debug_info_level
== DINFO_LEVEL_VERBOSE
2320 || write_symbols
== DWARF2_DEBUG
2321 || write_symbols
== VMS_AND_DWARF2_DEBUG
2322 || write_symbols
== VMS_DEBUG
)
2324 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
2328 /* End of a symbol-block. */
2330 gcc_assert (block_depth
>= 0);
2332 if (!DECL_IGNORED_P (current_function_decl
))
2333 debug_hooks
->end_block (high_block_linenum
, n
);
2335 if (write_symbols
== DBX_DEBUG
2336 || write_symbols
== SDB_DEBUG
)
2338 tree outer_block
= BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn
));
2339 location_t
*locus_ptr
2340 = block_nonartificial_location (outer_block
);
2342 if (locus_ptr
!= NULL
)
2344 override_filename
= LOCATION_FILE (*locus_ptr
);
2345 override_linenum
= LOCATION_LINE (*locus_ptr
);
2349 override_filename
= NULL
;
2350 override_linenum
= 0;
2355 case NOTE_INSN_DELETED_LABEL
:
2356 /* Emit the label. We may have deleted the CODE_LABEL because
2357 the label could be proved to be unreachable, though still
2358 referenced (in the form of having its address taken. */
2359 ASM_OUTPUT_DEBUG_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
));
2362 case NOTE_INSN_DELETED_DEBUG_LABEL
:
2363 /* Similarly, but need to use different namespace for it. */
2364 if (CODE_LABEL_NUMBER (insn
) != -1)
2365 ASM_OUTPUT_DEBUG_LABEL (file
, "LDL", CODE_LABEL_NUMBER (insn
));
2368 case NOTE_INSN_VAR_LOCATION
:
2369 case NOTE_INSN_CALL_ARG_LOCATION
:
2370 if (!DECL_IGNORED_P (current_function_decl
))
2371 debug_hooks
->var_location (insn
);
2384 /* The target port might emit labels in the output function for
2385 some insn, e.g. sh.c output_branchy_insn. */
2386 if (CODE_LABEL_NUMBER (insn
) <= max_labelno
)
2388 int align
= LABEL_TO_ALIGNMENT (insn
);
2389 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2390 int max_skip
= LABEL_TO_MAX_SKIP (insn
);
2393 if (align
&& NEXT_INSN (insn
))
2395 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2396 ASM_OUTPUT_MAX_SKIP_ALIGN (file
, align
, max_skip
);
2398 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2399 ASM_OUTPUT_ALIGN_WITH_NOP (file
, align
);
2401 ASM_OUTPUT_ALIGN (file
, align
);
2408 if (!DECL_IGNORED_P (current_function_decl
) && LABEL_NAME (insn
))
2409 debug_hooks
->label (as_a
<rtx_code_label
*> (insn
));
2413 next
= next_nonnote_insn (insn
);
2414 /* If this label is followed by a jump-table, make sure we put
2415 the label in the read-only section. Also possibly write the
2416 label and jump table together. */
2417 if (next
!= 0 && JUMP_TABLE_DATA_P (next
))
2419 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2420 /* In this case, the case vector is being moved by the
2421 target, so don't output the label at all. Leave that
2422 to the back end macros. */
2424 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2428 switch_to_section (targetm
.asm_out
.function_rodata_section
2429 (current_function_decl
));
2431 #ifdef ADDR_VEC_ALIGN
2432 log_align
= ADDR_VEC_ALIGN (next
);
2434 log_align
= exact_log2 (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2436 ASM_OUTPUT_ALIGN (file
, log_align
);
2439 switch_to_section (current_function_section ());
2441 #ifdef ASM_OUTPUT_CASE_LABEL
2442 ASM_OUTPUT_CASE_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
),
2445 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2450 if (LABEL_ALT_ENTRY_P (insn
))
2451 output_alternate_entry_point (file
, insn
);
2453 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2458 rtx body
= PATTERN (insn
);
2459 int insn_code_number
;
2463 /* Reset this early so it is correct for ASM statements. */
2464 current_insn_predicate
= NULL_RTX
;
2466 /* An INSN, JUMP_INSN or CALL_INSN.
2467 First check for special kinds that recog doesn't recognize. */
2469 if (GET_CODE (body
) == USE
/* These are just declarations. */
2470 || GET_CODE (body
) == CLOBBER
)
2475 /* If there is a REG_CC_SETTER note on this insn, it means that
2476 the setting of the condition code was done in the delay slot
2477 of the insn that branched here. So recover the cc status
2478 from the insn that set it. */
2480 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
2483 rtx_insn
*other
= as_a
<rtx_insn
*> (XEXP (note
, 0));
2484 NOTICE_UPDATE_CC (PATTERN (other
), other
);
2485 cc_prev_status
= cc_status
;
2490 /* Detect insns that are really jump-tables
2491 and output them as such. */
2493 if (JUMP_TABLE_DATA_P (insn
))
2495 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2499 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2500 switch_to_section (targetm
.asm_out
.function_rodata_section
2501 (current_function_decl
));
2503 switch_to_section (current_function_section ());
2507 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2508 if (GET_CODE (body
) == ADDR_VEC
)
2510 #ifdef ASM_OUTPUT_ADDR_VEC
2511 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn
), body
);
2518 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2519 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn
), body
);
2525 vlen
= XVECLEN (body
, GET_CODE (body
) == ADDR_DIFF_VEC
);
2526 for (idx
= 0; idx
< vlen
; idx
++)
2528 if (GET_CODE (body
) == ADDR_VEC
)
2530 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2531 ASM_OUTPUT_ADDR_VEC_ELT
2532 (file
, CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 0, idx
), 0)));
2539 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2540 ASM_OUTPUT_ADDR_DIFF_ELT
2543 CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 1, idx
), 0)),
2544 CODE_LABEL_NUMBER (XEXP (XEXP (body
, 0), 0)));
2550 #ifdef ASM_OUTPUT_CASE_END
2551 ASM_OUTPUT_CASE_END (file
,
2552 CODE_LABEL_NUMBER (PREV_INSN (insn
)),
2557 switch_to_section (current_function_section ());
2561 /* Output this line note if it is the first or the last line
2563 if (!DECL_IGNORED_P (current_function_decl
)
2564 && notice_source_line (insn
, &is_stmt
))
2565 (*debug_hooks
->source_line
) (last_linenum
, last_filename
,
2566 last_discriminator
, is_stmt
);
2568 if (GET_CODE (body
) == ASM_INPUT
)
2570 const char *string
= XSTR (body
, 0);
2572 /* There's no telling what that did to the condition codes. */
2577 expanded_location loc
;
2580 loc
= expand_location (ASM_INPUT_SOURCE_LOCATION (body
));
2581 if (*loc
.file
&& loc
.line
)
2582 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2583 ASM_COMMENT_START
, loc
.line
, loc
.file
);
2584 fprintf (asm_out_file
, "\t%s\n", string
);
2585 #if HAVE_AS_LINE_ZERO
2586 if (*loc
.file
&& loc
.line
)
2587 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2593 /* Detect `asm' construct with operands. */
2594 if (asm_noperands (body
) >= 0)
2596 unsigned int noperands
= asm_noperands (body
);
2597 rtx
*ops
= XALLOCAVEC (rtx
, noperands
);
2600 expanded_location expanded
;
2602 /* There's no telling what that did to the condition codes. */
2605 /* Get out the operand values. */
2606 string
= decode_asm_operands (body
, ops
, NULL
, NULL
, NULL
, &loc
);
2607 /* Inhibit dying on what would otherwise be compiler bugs. */
2608 insn_noperands
= noperands
;
2609 this_is_asm_operands
= insn
;
2610 expanded
= expand_location (loc
);
2612 #ifdef FINAL_PRESCAN_INSN
2613 FINAL_PRESCAN_INSN (insn
, ops
, insn_noperands
);
2616 /* Output the insn using them. */
2620 if (expanded
.file
&& expanded
.line
)
2621 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2622 ASM_COMMENT_START
, expanded
.line
, expanded
.file
);
2623 output_asm_insn (string
, ops
);
2624 #if HAVE_AS_LINE_ZERO
2625 if (expanded
.file
&& expanded
.line
)
2626 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2630 if (targetm
.asm_out
.final_postscan_insn
)
2631 targetm
.asm_out
.final_postscan_insn (file
, insn
, ops
,
2634 this_is_asm_operands
= 0;
2640 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (body
))
2642 /* A delayed-branch sequence */
2645 final_sequence
= seq
;
2647 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2648 force the restoration of a comparison that was previously
2649 thought unnecessary. If that happens, cancel this sequence
2650 and cause that insn to be restored. */
2652 next
= final_scan_insn (seq
->insn (0), file
, 0, 1, seen
);
2653 if (next
!= seq
->insn (1))
2659 for (i
= 1; i
< seq
->len (); i
++)
2661 rtx_insn
*insn
= seq
->insn (i
);
2662 rtx_insn
*next
= NEXT_INSN (insn
);
2663 /* We loop in case any instruction in a delay slot gets
2666 insn
= final_scan_insn (insn
, file
, 0, 1, seen
);
2667 while (insn
!= next
);
2669 #ifdef DBR_OUTPUT_SEQEND
2670 DBR_OUTPUT_SEQEND (file
);
2674 /* If the insn requiring the delay slot was a CALL_INSN, the
2675 insns in the delay slot are actually executed before the
2676 called function. Hence we don't preserve any CC-setting
2677 actions in these insns and the CC must be marked as being
2678 clobbered by the function. */
2679 if (CALL_P (seq
->insn (0)))
2686 /* We have a real machine instruction as rtl. */
2688 body
= PATTERN (insn
);
2691 set
= single_set (insn
);
2693 /* Check for redundant test and compare instructions
2694 (when the condition codes are already set up as desired).
2695 This is done only when optimizing; if not optimizing,
2696 it should be possible for the user to alter a variable
2697 with the debugger in between statements
2698 and the next statement should reexamine the variable
2699 to compute the condition codes. */
2704 && GET_CODE (SET_DEST (set
)) == CC0
2705 && insn
!= last_ignored_compare
)
2708 if (GET_CODE (SET_SRC (set
)) == SUBREG
)
2709 SET_SRC (set
) = alter_subreg (&SET_SRC (set
), true);
2711 src1
= SET_SRC (set
);
2713 if (GET_CODE (SET_SRC (set
)) == COMPARE
)
2715 if (GET_CODE (XEXP (SET_SRC (set
), 0)) == SUBREG
)
2716 XEXP (SET_SRC (set
), 0)
2717 = alter_subreg (&XEXP (SET_SRC (set
), 0), true);
2718 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == SUBREG
)
2719 XEXP (SET_SRC (set
), 1)
2720 = alter_subreg (&XEXP (SET_SRC (set
), 1), true);
2721 if (XEXP (SET_SRC (set
), 1)
2722 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set
), 0))))
2723 src2
= XEXP (SET_SRC (set
), 0);
2725 if ((cc_status
.value1
!= 0
2726 && rtx_equal_p (src1
, cc_status
.value1
))
2727 || (cc_status
.value2
!= 0
2728 && rtx_equal_p (src1
, cc_status
.value2
))
2729 || (src2
!= 0 && cc_status
.value1
!= 0
2730 && rtx_equal_p (src2
, cc_status
.value1
))
2731 || (src2
!= 0 && cc_status
.value2
!= 0
2732 && rtx_equal_p (src2
, cc_status
.value2
)))
2734 /* Don't delete insn if it has an addressing side-effect. */
2735 if (! FIND_REG_INC_NOTE (insn
, NULL_RTX
)
2736 /* or if anything in it is volatile. */
2737 && ! volatile_refs_p (PATTERN (insn
)))
2739 /* We don't really delete the insn; just ignore it. */
2740 last_ignored_compare
= insn
;
2747 /* If this is a conditional branch, maybe modify it
2748 if the cc's are in a nonstandard state
2749 so that it accomplishes the same thing that it would
2750 do straightforwardly if the cc's were set up normally. */
2752 if (cc_status
.flags
!= 0
2754 && GET_CODE (body
) == SET
2755 && SET_DEST (body
) == pc_rtx
2756 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2757 && COMPARISON_P (XEXP (SET_SRC (body
), 0))
2758 && XEXP (XEXP (SET_SRC (body
), 0), 0) == cc0_rtx
)
2760 /* This function may alter the contents of its argument
2761 and clear some of the cc_status.flags bits.
2762 It may also return 1 meaning condition now always true
2763 or -1 meaning condition now always false
2764 or 2 meaning condition nontrivial but altered. */
2765 int result
= alter_cond (XEXP (SET_SRC (body
), 0));
2766 /* If condition now has fixed value, replace the IF_THEN_ELSE
2767 with its then-operand or its else-operand. */
2769 SET_SRC (body
) = XEXP (SET_SRC (body
), 1);
2771 SET_SRC (body
) = XEXP (SET_SRC (body
), 2);
2773 /* The jump is now either unconditional or a no-op.
2774 If it has become a no-op, don't try to output it.
2775 (It would not be recognized.) */
2776 if (SET_SRC (body
) == pc_rtx
)
2781 else if (ANY_RETURN_P (SET_SRC (body
)))
2782 /* Replace (set (pc) (return)) with (return). */
2783 PATTERN (insn
) = body
= SET_SRC (body
);
2785 /* Rerecognize the instruction if it has changed. */
2787 INSN_CODE (insn
) = -1;
2790 /* If this is a conditional trap, maybe modify it if the cc's
2791 are in a nonstandard state so that it accomplishes the same
2792 thing that it would do straightforwardly if the cc's were
2794 if (cc_status
.flags
!= 0
2795 && NONJUMP_INSN_P (insn
)
2796 && GET_CODE (body
) == TRAP_IF
2797 && COMPARISON_P (TRAP_CONDITION (body
))
2798 && XEXP (TRAP_CONDITION (body
), 0) == cc0_rtx
)
2800 /* This function may alter the contents of its argument
2801 and clear some of the cc_status.flags bits.
2802 It may also return 1 meaning condition now always true
2803 or -1 meaning condition now always false
2804 or 2 meaning condition nontrivial but altered. */
2805 int result
= alter_cond (TRAP_CONDITION (body
));
2807 /* If TRAP_CONDITION has become always false, delete the
2815 /* If TRAP_CONDITION has become always true, replace
2816 TRAP_CONDITION with const_true_rtx. */
2818 TRAP_CONDITION (body
) = const_true_rtx
;
2820 /* Rerecognize the instruction if it has changed. */
2822 INSN_CODE (insn
) = -1;
2825 /* Make same adjustments to instructions that examine the
2826 condition codes without jumping and instructions that
2827 handle conditional moves (if this machine has either one). */
2829 if (cc_status
.flags
!= 0
2832 rtx cond_rtx
, then_rtx
, else_rtx
;
2835 && GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
)
2837 cond_rtx
= XEXP (SET_SRC (set
), 0);
2838 then_rtx
= XEXP (SET_SRC (set
), 1);
2839 else_rtx
= XEXP (SET_SRC (set
), 2);
2843 cond_rtx
= SET_SRC (set
);
2844 then_rtx
= const_true_rtx
;
2845 else_rtx
= const0_rtx
;
2848 if (COMPARISON_P (cond_rtx
)
2849 && XEXP (cond_rtx
, 0) == cc0_rtx
)
2852 result
= alter_cond (cond_rtx
);
2854 validate_change (insn
, &SET_SRC (set
), then_rtx
, 0);
2855 else if (result
== -1)
2856 validate_change (insn
, &SET_SRC (set
), else_rtx
, 0);
2857 else if (result
== 2)
2858 INSN_CODE (insn
) = -1;
2859 if (SET_DEST (set
) == SET_SRC (set
))
2866 /* Do machine-specific peephole optimizations if desired. */
2868 if (HAVE_peephole
&& optimize_p
&& !flag_no_peephole
&& !nopeepholes
)
2870 rtx_insn
*next
= peephole (insn
);
2871 /* When peepholing, if there were notes within the peephole,
2872 emit them before the peephole. */
2873 if (next
!= 0 && next
!= NEXT_INSN (insn
))
2875 rtx_insn
*note
, *prev
= PREV_INSN (insn
);
2877 for (note
= NEXT_INSN (insn
); note
!= next
;
2878 note
= NEXT_INSN (note
))
2879 final_scan_insn (note
, file
, optimize_p
, nopeepholes
, seen
);
2881 /* Put the notes in the proper position for a later
2882 rescan. For example, the SH target can do this
2883 when generating a far jump in a delayed branch
2885 note
= NEXT_INSN (insn
);
2886 SET_PREV_INSN (note
) = prev
;
2887 SET_NEXT_INSN (prev
) = note
;
2888 SET_NEXT_INSN (PREV_INSN (next
)) = insn
;
2889 SET_PREV_INSN (insn
) = PREV_INSN (next
);
2890 SET_NEXT_INSN (insn
) = next
;
2891 SET_PREV_INSN (next
) = insn
;
2894 /* PEEPHOLE might have changed this. */
2895 body
= PATTERN (insn
);
2898 /* Try to recognize the instruction.
2899 If successful, verify that the operands satisfy the
2900 constraints for the instruction. Crash if they don't,
2901 since `reload' should have changed them so that they do. */
2903 insn_code_number
= recog_memoized (insn
);
2904 cleanup_subreg_operands (insn
);
2906 /* Dump the insn in the assembly for debugging (-dAP).
2907 If the final dump is requested as slim RTL, dump slim
2908 RTL to the assembly file also. */
2909 if (flag_dump_rtl_in_asm
)
2911 print_rtx_head
= ASM_COMMENT_START
;
2912 if (! (dump_flags
& TDF_SLIM
))
2913 print_rtl_single (asm_out_file
, insn
);
2915 dump_insn_slim (asm_out_file
, insn
);
2916 print_rtx_head
= "";
2919 if (! constrain_operands_cached (insn
, 1))
2920 fatal_insn_not_found (insn
);
2922 /* Some target machines need to prescan each insn before
2925 #ifdef FINAL_PRESCAN_INSN
2926 FINAL_PRESCAN_INSN (insn
, recog_data
.operand
, recog_data
.n_operands
);
2929 if (targetm
.have_conditional_execution ()
2930 && GET_CODE (PATTERN (insn
)) == COND_EXEC
)
2931 current_insn_predicate
= COND_EXEC_TEST (PATTERN (insn
));
2934 cc_prev_status
= cc_status
;
2936 /* Update `cc_status' for this instruction.
2937 The instruction's output routine may change it further.
2938 If the output routine for a jump insn needs to depend
2939 on the cc status, it should look at cc_prev_status. */
2941 NOTICE_UPDATE_CC (body
, insn
);
2944 current_output_insn
= debug_insn
= insn
;
2946 /* Find the proper template for this insn. */
2947 templ
= get_insn_template (insn_code_number
, insn
);
2949 /* If the C code returns 0, it means that it is a jump insn
2950 which follows a deleted test insn, and that test insn
2951 needs to be reinserted. */
2956 gcc_assert (prev_nonnote_insn (insn
) == last_ignored_compare
);
2958 /* We have already processed the notes between the setter and
2959 the user. Make sure we don't process them again, this is
2960 particularly important if one of the notes is a block
2961 scope note or an EH note. */
2963 prev
!= last_ignored_compare
;
2964 prev
= PREV_INSN (prev
))
2967 delete_insn (prev
); /* Use delete_note. */
2973 /* If the template is the string "#", it means that this insn must
2975 if (templ
[0] == '#' && templ
[1] == '\0')
2977 rtx_insn
*new_rtx
= try_split (body
, insn
, 0);
2979 /* If we didn't split the insn, go away. */
2980 if (new_rtx
== insn
&& PATTERN (new_rtx
) == body
)
2981 fatal_insn ("could not split insn", insn
);
2983 /* If we have a length attribute, this instruction should have
2984 been split in shorten_branches, to ensure that we would have
2985 valid length info for the splitees. */
2986 gcc_assert (!HAVE_ATTR_length
);
2991 /* ??? This will put the directives in the wrong place if
2992 get_insn_template outputs assembly directly. However calling it
2993 before get_insn_template breaks if the insns is split. */
2994 if (targetm
.asm_out
.unwind_emit_before_insn
2995 && targetm
.asm_out
.unwind_emit
)
2996 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
2998 if (rtx_call_insn
*call_insn
= dyn_cast
<rtx_call_insn
*> (insn
))
3000 rtx x
= call_from_call_insn (call_insn
);
3002 if (x
&& MEM_P (x
) && GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
3006 t
= SYMBOL_REF_DECL (x
);
3008 assemble_external (t
);
3010 if (!DECL_IGNORED_P (current_function_decl
))
3011 debug_hooks
->var_location (insn
);
3014 /* Output assembler code from the template. */
3015 output_asm_insn (templ
, recog_data
.operand
);
3017 /* Some target machines need to postscan each insn after
3019 if (targetm
.asm_out
.final_postscan_insn
)
3020 targetm
.asm_out
.final_postscan_insn (file
, insn
, recog_data
.operand
,
3021 recog_data
.n_operands
);
3023 if (!targetm
.asm_out
.unwind_emit_before_insn
3024 && targetm
.asm_out
.unwind_emit
)
3025 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
3027 current_output_insn
= debug_insn
= 0;
3030 return NEXT_INSN (insn
);
3033 /* Return whether a source line note needs to be emitted before INSN.
3034 Sets IS_STMT to TRUE if the line should be marked as a possible
3035 breakpoint location. */
3038 notice_source_line (rtx_insn
*insn
, bool *is_stmt
)
3040 const char *filename
;
3043 if (override_filename
)
3045 filename
= override_filename
;
3046 linenum
= override_linenum
;
3048 else if (INSN_HAS_LOCATION (insn
))
3050 expanded_location xloc
= insn_location (insn
);
3051 filename
= xloc
.file
;
3052 linenum
= xloc
.line
;
3060 if (filename
== NULL
)
3063 if (force_source_line
3064 || filename
!= last_filename
3065 || last_linenum
!= linenum
)
3067 force_source_line
= false;
3068 last_filename
= filename
;
3069 last_linenum
= linenum
;
3070 last_discriminator
= discriminator
;
3072 high_block_linenum
= MAX (last_linenum
, high_block_linenum
);
3073 high_function_linenum
= MAX (last_linenum
, high_function_linenum
);
3077 if (SUPPORTS_DISCRIMINATOR
&& last_discriminator
!= discriminator
)
3079 /* If the discriminator changed, but the line number did not,
3080 output the line table entry with is_stmt false so the
3081 debugger does not treat this as a breakpoint location. */
3082 last_discriminator
= discriminator
;
3090 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3091 directly to the desired hard register. */
3094 cleanup_subreg_operands (rtx_insn
*insn
)
3097 bool changed
= false;
3098 extract_insn_cached (insn
);
3099 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3101 /* The following test cannot use recog_data.operand when testing
3102 for a SUBREG: the underlying object might have been changed
3103 already if we are inside a match_operator expression that
3104 matches the else clause. Instead we test the underlying
3105 expression directly. */
3106 if (GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
3108 recog_data
.operand
[i
] = alter_subreg (recog_data
.operand_loc
[i
], true);
3111 else if (GET_CODE (recog_data
.operand
[i
]) == PLUS
3112 || GET_CODE (recog_data
.operand
[i
]) == MULT
3113 || MEM_P (recog_data
.operand
[i
]))
3114 recog_data
.operand
[i
] = walk_alter_subreg (recog_data
.operand_loc
[i
], &changed
);
3117 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3119 if (GET_CODE (*recog_data
.dup_loc
[i
]) == SUBREG
)
3121 *recog_data
.dup_loc
[i
] = alter_subreg (recog_data
.dup_loc
[i
], true);
3124 else if (GET_CODE (*recog_data
.dup_loc
[i
]) == PLUS
3125 || GET_CODE (*recog_data
.dup_loc
[i
]) == MULT
3126 || MEM_P (*recog_data
.dup_loc
[i
]))
3127 *recog_data
.dup_loc
[i
] = walk_alter_subreg (recog_data
.dup_loc
[i
], &changed
);
3130 df_insn_rescan (insn
);
3133 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3134 the thing it is a subreg of. Do it anyway if FINAL_P. */
3137 alter_subreg (rtx
*xp
, bool final_p
)
3140 rtx y
= SUBREG_REG (x
);
3142 /* simplify_subreg does not remove subreg from volatile references.
3143 We are required to. */
3146 int offset
= SUBREG_BYTE (x
);
3148 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3149 contains 0 instead of the proper offset. See simplify_subreg. */
3151 && GET_MODE_SIZE (GET_MODE (y
)) < GET_MODE_SIZE (GET_MODE (x
)))
3153 int difference
= GET_MODE_SIZE (GET_MODE (y
))
3154 - GET_MODE_SIZE (GET_MODE (x
));
3155 if (WORDS_BIG_ENDIAN
)
3156 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3157 if (BYTES_BIG_ENDIAN
)
3158 offset
+= difference
% UNITS_PER_WORD
;
3162 *xp
= adjust_address (y
, GET_MODE (x
), offset
);
3164 *xp
= adjust_address_nv (y
, GET_MODE (x
), offset
);
3166 else if (REG_P (y
) && HARD_REGISTER_P (y
))
3168 rtx new_rtx
= simplify_subreg (GET_MODE (x
), y
, GET_MODE (y
),
3173 else if (final_p
&& REG_P (y
))
3175 /* Simplify_subreg can't handle some REG cases, but we have to. */
3177 HOST_WIDE_INT offset
;
3179 regno
= subreg_regno (x
);
3180 if (subreg_lowpart_p (x
))
3181 offset
= byte_lowpart_offset (GET_MODE (x
), GET_MODE (y
));
3183 offset
= SUBREG_BYTE (x
);
3184 *xp
= gen_rtx_REG_offset (y
, GET_MODE (x
), regno
, offset
);
3191 /* Do alter_subreg on all the SUBREGs contained in X. */
3194 walk_alter_subreg (rtx
*xp
, bool *changed
)
3197 switch (GET_CODE (x
))
3202 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
3203 XEXP (x
, 1) = walk_alter_subreg (&XEXP (x
, 1), changed
);
3208 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
3213 return alter_subreg (xp
, true);
3224 /* Given BODY, the body of a jump instruction, alter the jump condition
3225 as required by the bits that are set in cc_status.flags.
3226 Not all of the bits there can be handled at this level in all cases.
3228 The value is normally 0.
3229 1 means that the condition has become always true.
3230 -1 means that the condition has become always false.
3231 2 means that COND has been altered. */
3234 alter_cond (rtx cond
)
3238 if (cc_status
.flags
& CC_REVERSED
)
3241 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
3244 if (cc_status
.flags
& CC_INVERTED
)
3247 PUT_CODE (cond
, reverse_condition (GET_CODE (cond
)));
3250 if (cc_status
.flags
& CC_NOT_POSITIVE
)
3251 switch (GET_CODE (cond
))
3256 /* Jump becomes unconditional. */
3262 /* Jump becomes no-op. */
3266 PUT_CODE (cond
, EQ
);
3271 PUT_CODE (cond
, NE
);
3279 if (cc_status
.flags
& CC_NOT_NEGATIVE
)
3280 switch (GET_CODE (cond
))
3284 /* Jump becomes unconditional. */
3289 /* Jump becomes no-op. */
3294 PUT_CODE (cond
, EQ
);
3300 PUT_CODE (cond
, NE
);
3308 if (cc_status
.flags
& CC_NO_OVERFLOW
)
3309 switch (GET_CODE (cond
))
3312 /* Jump becomes unconditional. */
3316 PUT_CODE (cond
, EQ
);
3321 PUT_CODE (cond
, NE
);
3326 /* Jump becomes no-op. */
3333 if (cc_status
.flags
& (CC_Z_IN_NOT_N
| CC_Z_IN_N
))
3334 switch (GET_CODE (cond
))
3340 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? GE
: LT
);
3345 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? LT
: GE
);
3350 if (cc_status
.flags
& CC_NOT_SIGNED
)
3351 /* The flags are valid if signed condition operators are converted
3353 switch (GET_CODE (cond
))
3356 PUT_CODE (cond
, LEU
);
3361 PUT_CODE (cond
, LTU
);
3366 PUT_CODE (cond
, GTU
);
3371 PUT_CODE (cond
, GEU
);
3383 /* Report inconsistency between the assembler template and the operands.
3384 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3387 output_operand_lossage (const char *cmsgid
, ...)
3391 const char *pfx_str
;
3394 va_start (ap
, cmsgid
);
3396 pfx_str
= this_is_asm_operands
? _("invalid 'asm': ") : "output_operand: ";
3397 fmt_string
= xasprintf ("%s%s", pfx_str
, _(cmsgid
));
3398 new_message
= xvasprintf (fmt_string
, ap
);
3400 if (this_is_asm_operands
)
3401 error_for_asm (this_is_asm_operands
, "%s", new_message
);
3403 internal_error ("%s", new_message
);
3410 /* Output of assembler code from a template, and its subroutines. */
3412 /* Annotate the assembly with a comment describing the pattern and
3413 alternative used. */
3416 output_asm_name (void)
3420 int num
= INSN_CODE (debug_insn
);
3421 fprintf (asm_out_file
, "\t%s %d\t%s",
3422 ASM_COMMENT_START
, INSN_UID (debug_insn
),
3423 insn_data
[num
].name
);
3424 if (insn_data
[num
].n_alternatives
> 1)
3425 fprintf (asm_out_file
, "/%d", which_alternative
+ 1);
3427 if (HAVE_ATTR_length
)
3428 fprintf (asm_out_file
, "\t[length = %d]",
3429 get_attr_length (debug_insn
));
3431 /* Clear this so only the first assembler insn
3432 of any rtl insn will get the special comment for -dp. */
3437 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3438 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3439 corresponds to the address of the object and 0 if to the object. */
3442 get_mem_expr_from_op (rtx op
, int *paddressp
)
3450 return REG_EXPR (op
);
3451 else if (!MEM_P (op
))
3454 if (MEM_EXPR (op
) != 0)
3455 return MEM_EXPR (op
);
3457 /* Otherwise we have an address, so indicate it and look at the address. */
3461 /* First check if we have a decl for the address, then look at the right side
3462 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3463 But don't allow the address to itself be indirect. */
3464 if ((expr
= get_mem_expr_from_op (op
, &inner_addressp
)) && ! inner_addressp
)
3466 else if (GET_CODE (op
) == PLUS
3467 && (expr
= get_mem_expr_from_op (XEXP (op
, 1), &inner_addressp
)))
3471 || GET_RTX_CLASS (GET_CODE (op
)) == RTX_BIN_ARITH
)
3474 expr
= get_mem_expr_from_op (op
, &inner_addressp
);
3475 return inner_addressp
? 0 : expr
;
3478 /* Output operand names for assembler instructions. OPERANDS is the
3479 operand vector, OPORDER is the order to write the operands, and NOPS
3480 is the number of operands to write. */
3483 output_asm_operand_names (rtx
*operands
, int *oporder
, int nops
)
3488 for (i
= 0; i
< nops
; i
++)
3491 rtx op
= operands
[oporder
[i
]];
3492 tree expr
= get_mem_expr_from_op (op
, &addressp
);
3494 fprintf (asm_out_file
, "%c%s",
3495 wrote
? ',' : '\t', wrote
? "" : ASM_COMMENT_START
);
3499 fprintf (asm_out_file
, "%s",
3500 addressp
? "*" : "");
3501 print_mem_expr (asm_out_file
, expr
);
3504 else if (REG_P (op
) && ORIGINAL_REGNO (op
)
3505 && ORIGINAL_REGNO (op
) != REGNO (op
))
3506 fprintf (asm_out_file
, " tmp%i", ORIGINAL_REGNO (op
));
3510 #ifdef ASSEMBLER_DIALECT
3511 /* Helper function to parse assembler dialects in the asm string.
3512 This is called from output_asm_insn and asm_fprintf. */
3514 do_assembler_dialects (const char *p
, int *dialect
)
3525 output_operand_lossage ("nested assembly dialect alternatives");
3529 /* If we want the first dialect, do nothing. Otherwise, skip
3530 DIALECT_NUMBER of strings ending with '|'. */
3531 for (i
= 0; i
< dialect_number
; i
++)
3533 while (*p
&& *p
!= '}')
3541 /* Skip over any character after a percent sign. */
3553 output_operand_lossage ("unterminated assembly dialect alternative");
3560 /* Skip to close brace. */
3565 output_operand_lossage ("unterminated assembly dialect alternative");
3569 /* Skip over any character after a percent sign. */
3570 if (*p
== '%' && p
[1])
3584 putc (c
, asm_out_file
);
3589 putc (c
, asm_out_file
);
3600 /* Output text from TEMPLATE to the assembler output file,
3601 obeying %-directions to substitute operands taken from
3602 the vector OPERANDS.
3604 %N (for N a digit) means print operand N in usual manner.
3605 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3606 and print the label name with no punctuation.
3607 %cN means require operand N to be a constant
3608 and print the constant expression with no punctuation.
3609 %aN means expect operand N to be a memory address
3610 (not a memory reference!) and print a reference
3612 %nN means expect operand N to be a constant
3613 and print a constant expression for minus the value
3614 of the operand, with no other punctuation. */
3617 output_asm_insn (const char *templ
, rtx
*operands
)
3621 #ifdef ASSEMBLER_DIALECT
3624 int oporder
[MAX_RECOG_OPERANDS
];
3625 char opoutput
[MAX_RECOG_OPERANDS
];
3628 /* An insn may return a null string template
3629 in a case where no assembler code is needed. */
3633 memset (opoutput
, 0, sizeof opoutput
);
3635 putc ('\t', asm_out_file
);
3637 #ifdef ASM_OUTPUT_OPCODE
3638 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3645 if (flag_verbose_asm
)
3646 output_asm_operand_names (operands
, oporder
, ops
);
3647 if (flag_print_asm_name
)
3651 memset (opoutput
, 0, sizeof opoutput
);
3653 putc (c
, asm_out_file
);
3654 #ifdef ASM_OUTPUT_OPCODE
3655 while ((c
= *p
) == '\t')
3657 putc (c
, asm_out_file
);
3660 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3664 #ifdef ASSEMBLER_DIALECT
3668 p
= do_assembler_dialects (p
, &dialect
);
3673 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3674 if ASSEMBLER_DIALECT defined and these characters have a special
3675 meaning as dialect delimiters.*/
3677 #ifdef ASSEMBLER_DIALECT
3678 || *p
== '{' || *p
== '}' || *p
== '|'
3682 putc (*p
, asm_out_file
);
3685 /* %= outputs a number which is unique to each insn in the entire
3686 compilation. This is useful for making local labels that are
3687 referred to more than once in a given insn. */
3691 fprintf (asm_out_file
, "%d", insn_counter
);
3693 /* % followed by a letter and some digits
3694 outputs an operand in a special way depending on the letter.
3695 Letters `acln' are implemented directly.
3696 Other letters are passed to `output_operand' so that
3697 the TARGET_PRINT_OPERAND hook can define them. */
3698 else if (ISALPHA (*p
))
3701 unsigned long opnum
;
3704 opnum
= strtoul (p
, &endptr
, 10);
3707 output_operand_lossage ("operand number missing "
3709 else if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3710 output_operand_lossage ("operand number out of range");
3711 else if (letter
== 'l')
3712 output_asm_label (operands
[opnum
]);
3713 else if (letter
== 'a')
3714 output_address (VOIDmode
, operands
[opnum
]);
3715 else if (letter
== 'c')
3717 if (CONSTANT_ADDRESS_P (operands
[opnum
]))
3718 output_addr_const (asm_out_file
, operands
[opnum
]);
3720 output_operand (operands
[opnum
], 'c');
3722 else if (letter
== 'n')
3724 if (CONST_INT_P (operands
[opnum
]))
3725 fprintf (asm_out_file
, HOST_WIDE_INT_PRINT_DEC
,
3726 - INTVAL (operands
[opnum
]));
3729 putc ('-', asm_out_file
);
3730 output_addr_const (asm_out_file
, operands
[opnum
]);
3734 output_operand (operands
[opnum
], letter
);
3736 if (!opoutput
[opnum
])
3737 oporder
[ops
++] = opnum
;
3738 opoutput
[opnum
] = 1;
3743 /* % followed by a digit outputs an operand the default way. */
3744 else if (ISDIGIT (*p
))
3746 unsigned long opnum
;
3749 opnum
= strtoul (p
, &endptr
, 10);
3750 if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3751 output_operand_lossage ("operand number out of range");
3753 output_operand (operands
[opnum
], 0);
3755 if (!opoutput
[opnum
])
3756 oporder
[ops
++] = opnum
;
3757 opoutput
[opnum
] = 1;
3762 /* % followed by punctuation: output something for that
3763 punctuation character alone, with no operand. The
3764 TARGET_PRINT_OPERAND hook decides what is actually done. */
3765 else if (targetm
.asm_out
.print_operand_punct_valid_p ((unsigned char) *p
))
3766 output_operand (NULL_RTX
, *p
++);
3768 output_operand_lossage ("invalid %%-code");
3772 putc (c
, asm_out_file
);
3775 /* Write out the variable names for operands, if we know them. */
3776 if (flag_verbose_asm
)
3777 output_asm_operand_names (operands
, oporder
, ops
);
3778 if (flag_print_asm_name
)
3781 putc ('\n', asm_out_file
);
3784 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3787 output_asm_label (rtx x
)
3791 if (GET_CODE (x
) == LABEL_REF
)
3792 x
= LABEL_REF_LABEL (x
);
3795 && NOTE_KIND (x
) == NOTE_INSN_DELETED_LABEL
))
3796 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3798 output_operand_lossage ("'%%l' operand isn't a label");
3800 assemble_name (asm_out_file
, buf
);
3803 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3806 mark_symbol_refs_as_used (rtx x
)
3808 subrtx_iterator::array_type array
;
3809 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
3811 const_rtx x
= *iter
;
3812 if (GET_CODE (x
) == SYMBOL_REF
)
3813 if (tree t
= SYMBOL_REF_DECL (x
))
3814 assemble_external (t
);
3818 /* Print operand X using machine-dependent assembler syntax.
3819 CODE is a non-digit that preceded the operand-number in the % spec,
3820 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3821 between the % and the digits.
3822 When CODE is a non-letter, X is 0.
3824 The meanings of the letters are machine-dependent and controlled
3825 by TARGET_PRINT_OPERAND. */
3828 output_operand (rtx x
, int code ATTRIBUTE_UNUSED
)
3830 if (x
&& GET_CODE (x
) == SUBREG
)
3831 x
= alter_subreg (&x
, true);
3833 /* X must not be a pseudo reg. */
3834 if (!targetm
.no_register_allocation
)
3835 gcc_assert (!x
|| !REG_P (x
) || REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3837 targetm
.asm_out
.print_operand (asm_out_file
, x
, code
);
3842 mark_symbol_refs_as_used (x
);
3845 /* Print a memory reference operand for address X using
3846 machine-dependent assembler syntax. */
3849 output_address (machine_mode mode
, rtx x
)
3851 bool changed
= false;
3852 walk_alter_subreg (&x
, &changed
);
3853 targetm
.asm_out
.print_operand_address (asm_out_file
, mode
, x
);
3856 /* Print an integer constant expression in assembler syntax.
3857 Addition and subtraction are the only arithmetic
3858 that may appear in these expressions. */
3861 output_addr_const (FILE *file
, rtx x
)
3866 switch (GET_CODE (x
))
3873 if (SYMBOL_REF_DECL (x
))
3874 assemble_external (SYMBOL_REF_DECL (x
));
3875 #ifdef ASM_OUTPUT_SYMBOL_REF
3876 ASM_OUTPUT_SYMBOL_REF (file
, x
);
3878 assemble_name (file
, XSTR (x
, 0));
3883 x
= LABEL_REF_LABEL (x
);
3886 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3887 #ifdef ASM_OUTPUT_LABEL_REF
3888 ASM_OUTPUT_LABEL_REF (file
, buf
);
3890 assemble_name (file
, buf
);
3895 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3899 /* This used to output parentheses around the expression,
3900 but that does not work on the 386 (either ATT or BSD assembler). */
3901 output_addr_const (file
, XEXP (x
, 0));
3904 case CONST_WIDE_INT
:
3905 /* We do not know the mode here so we have to use a round about
3906 way to build a wide-int to get it printed properly. */
3908 wide_int w
= wide_int::from_array (&CONST_WIDE_INT_ELT (x
, 0),
3909 CONST_WIDE_INT_NUNITS (x
),
3910 CONST_WIDE_INT_NUNITS (x
)
3911 * HOST_BITS_PER_WIDE_INT
,
3913 print_decs (w
, file
);
3918 if (CONST_DOUBLE_AS_INT_P (x
))
3920 /* We can use %d if the number is one word and positive. */
3921 if (CONST_DOUBLE_HIGH (x
))
3922 fprintf (file
, HOST_WIDE_INT_PRINT_DOUBLE_HEX
,
3923 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (x
),
3924 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
));
3925 else if (CONST_DOUBLE_LOW (x
) < 0)
3926 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
3927 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
));
3929 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3932 /* We can't handle floating point constants;
3933 PRINT_OPERAND must handle them. */
3934 output_operand_lossage ("floating constant misused");
3938 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_FIXED_VALUE_LOW (x
));
3942 /* Some assemblers need integer constants to appear last (eg masm). */
3943 if (CONST_INT_P (XEXP (x
, 0)))
3945 output_addr_const (file
, XEXP (x
, 1));
3946 if (INTVAL (XEXP (x
, 0)) >= 0)
3947 fprintf (file
, "+");
3948 output_addr_const (file
, XEXP (x
, 0));
3952 output_addr_const (file
, XEXP (x
, 0));
3953 if (!CONST_INT_P (XEXP (x
, 1))
3954 || INTVAL (XEXP (x
, 1)) >= 0)
3955 fprintf (file
, "+");
3956 output_addr_const (file
, XEXP (x
, 1));
3961 /* Avoid outputting things like x-x or x+5-x,
3962 since some assemblers can't handle that. */
3963 x
= simplify_subtraction (x
);
3964 if (GET_CODE (x
) != MINUS
)
3967 output_addr_const (file
, XEXP (x
, 0));
3968 fprintf (file
, "-");
3969 if ((CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) >= 0)
3970 || GET_CODE (XEXP (x
, 1)) == PC
3971 || GET_CODE (XEXP (x
, 1)) == SYMBOL_REF
)
3972 output_addr_const (file
, XEXP (x
, 1));
3975 fputs (targetm
.asm_out
.open_paren
, file
);
3976 output_addr_const (file
, XEXP (x
, 1));
3977 fputs (targetm
.asm_out
.close_paren
, file
);
3985 output_addr_const (file
, XEXP (x
, 0));
3989 if (targetm
.asm_out
.output_addr_const_extra (file
, x
))
3992 output_operand_lossage ("invalid expression as operand");
3996 /* Output a quoted string. */
3999 output_quoted_string (FILE *asm_file
, const char *string
)
4001 #ifdef OUTPUT_QUOTED_STRING
4002 OUTPUT_QUOTED_STRING (asm_file
, string
);
4006 putc ('\"', asm_file
);
4007 while ((c
= *string
++) != 0)
4011 if (c
== '\"' || c
== '\\')
4012 putc ('\\', asm_file
);
4016 fprintf (asm_file
, "\\%03o", (unsigned char) c
);
4018 putc ('\"', asm_file
);
4022 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4025 fprint_whex (FILE *f
, unsigned HOST_WIDE_INT value
)
4027 char buf
[2 + CHAR_BIT
* sizeof (value
) / 4];
4032 char *p
= buf
+ sizeof (buf
);
4034 *--p
= "0123456789abcdef"[value
% 16];
4035 while ((value
/= 16) != 0);
4038 fwrite (p
, 1, buf
+ sizeof (buf
) - p
, f
);
4042 /* Internal function that prints an unsigned long in decimal in reverse.
4043 The output string IS NOT null-terminated. */
4046 sprint_ul_rev (char *s
, unsigned long value
)
4051 s
[i
] = "0123456789"[value
% 10];
4054 /* alternate version, without modulo */
4055 /* oldval = value; */
4057 /* s[i] = "0123456789" [oldval - 10*value]; */
4064 /* Write an unsigned long as decimal to a file, fast. */
4067 fprint_ul (FILE *f
, unsigned long value
)
4069 /* python says: len(str(2**64)) == 20 */
4073 i
= sprint_ul_rev (s
, value
);
4075 /* It's probably too small to bother with string reversal and fputs. */
4084 /* Write an unsigned long as decimal to a string, fast.
4085 s must be wide enough to not overflow, at least 21 chars.
4086 Returns the length of the string (without terminating '\0'). */
4089 sprint_ul (char *s
, unsigned long value
)
4091 int len
= sprint_ul_rev (s
, value
);
4094 std::reverse (s
, s
+ len
);
4098 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4099 %R prints the value of REGISTER_PREFIX.
4100 %L prints the value of LOCAL_LABEL_PREFIX.
4101 %U prints the value of USER_LABEL_PREFIX.
4102 %I prints the value of IMMEDIATE_PREFIX.
4103 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4104 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4106 We handle alternate assembler dialects here, just like output_asm_insn. */
4109 asm_fprintf (FILE *file
, const char *p
, ...)
4113 #ifdef ASSEMBLER_DIALECT
4118 va_start (argptr
, p
);
4125 #ifdef ASSEMBLER_DIALECT
4129 p
= do_assembler_dialects (p
, &dialect
);
4136 while (strchr ("-+ #0", c
))
4141 while (ISDIGIT (c
) || c
== '.')
4152 case 'd': case 'i': case 'u':
4153 case 'x': case 'X': case 'o':
4157 fprintf (file
, buf
, va_arg (argptr
, int));
4161 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4162 'o' cases, but we do not check for those cases. It
4163 means that the value is a HOST_WIDE_INT, which may be
4164 either `long' or `long long'. */
4165 memcpy (q
, HOST_WIDE_INT_PRINT
, strlen (HOST_WIDE_INT_PRINT
));
4166 q
+= strlen (HOST_WIDE_INT_PRINT
);
4169 fprintf (file
, buf
, va_arg (argptr
, HOST_WIDE_INT
));
4174 #ifdef HAVE_LONG_LONG
4180 fprintf (file
, buf
, va_arg (argptr
, long long));
4187 fprintf (file
, buf
, va_arg (argptr
, long));
4195 fprintf (file
, buf
, va_arg (argptr
, char *));
4199 #ifdef ASM_OUTPUT_OPCODE
4200 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
4205 #ifdef REGISTER_PREFIX
4206 fprintf (file
, "%s", REGISTER_PREFIX
);
4211 #ifdef IMMEDIATE_PREFIX
4212 fprintf (file
, "%s", IMMEDIATE_PREFIX
);
4217 #ifdef LOCAL_LABEL_PREFIX
4218 fprintf (file
, "%s", LOCAL_LABEL_PREFIX
);
4223 fputs (user_label_prefix
, file
);
4226 #ifdef ASM_FPRINTF_EXTENSIONS
4227 /* Uppercase letters are reserved for general use by asm_fprintf
4228 and so are not available to target specific code. In order to
4229 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4230 they are defined here. As they get turned into real extensions
4231 to asm_fprintf they should be removed from this list. */
4232 case 'A': case 'B': case 'C': case 'D': case 'E':
4233 case 'F': case 'G': case 'H': case 'J': case 'K':
4234 case 'M': case 'N': case 'P': case 'Q': case 'S':
4235 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4238 ASM_FPRINTF_EXTENSIONS (file
, argptr
, p
)
4251 /* Return nonzero if this function has no function calls. */
4254 leaf_function_p (void)
4258 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4259 functions even if they call mcount. */
4260 if (crtl
->profile
&& !targetm
.keep_leaf_when_profiled ())
4263 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4266 && ! SIBLING_CALL_P (insn
))
4268 if (NONJUMP_INSN_P (insn
)
4269 && GET_CODE (PATTERN (insn
)) == SEQUENCE
4270 && CALL_P (XVECEXP (PATTERN (insn
), 0, 0))
4271 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
4278 /* Return 1 if branch is a forward branch.
4279 Uses insn_shuid array, so it works only in the final pass. May be used by
4280 output templates to customary add branch prediction hints.
4283 final_forward_branch_p (rtx_insn
*insn
)
4285 int insn_id
, label_id
;
4287 gcc_assert (uid_shuid
);
4288 insn_id
= INSN_SHUID (insn
);
4289 label_id
= INSN_SHUID (JUMP_LABEL (insn
));
4290 /* We've hit some insns that does not have id information available. */
4291 gcc_assert (insn_id
&& label_id
);
4292 return insn_id
< label_id
;
4295 /* On some machines, a function with no call insns
4296 can run faster if it doesn't create its own register window.
4297 When output, the leaf function should use only the "output"
4298 registers. Ordinarily, the function would be compiled to use
4299 the "input" registers to find its arguments; it is a candidate
4300 for leaf treatment if it uses only the "input" registers.
4301 Leaf function treatment means renumbering so the function
4302 uses the "output" registers instead. */
4304 #ifdef LEAF_REGISTERS
4306 /* Return 1 if this function uses only the registers that can be
4307 safely renumbered. */
4310 only_leaf_regs_used (void)
4313 const char *const permitted_reg_in_leaf_functions
= LEAF_REGISTERS
;
4315 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4316 if ((df_regs_ever_live_p (i
) || global_regs
[i
])
4317 && ! permitted_reg_in_leaf_functions
[i
])
4320 if (crtl
->uses_pic_offset_table
4321 && pic_offset_table_rtx
!= 0
4322 && REG_P (pic_offset_table_rtx
)
4323 && ! permitted_reg_in_leaf_functions
[REGNO (pic_offset_table_rtx
)])
4329 /* Scan all instructions and renumber all registers into those
4330 available in leaf functions. */
4333 leaf_renumber_regs (rtx_insn
*first
)
4337 /* Renumber only the actual patterns.
4338 The reg-notes can contain frame pointer refs,
4339 and renumbering them could crash, and should not be needed. */
4340 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4342 leaf_renumber_regs_insn (PATTERN (insn
));
4345 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4346 available in leaf functions. */
4349 leaf_renumber_regs_insn (rtx in_rtx
)
4352 const char *format_ptr
;
4357 /* Renumber all input-registers into output-registers.
4358 renumbered_regs would be 1 for an output-register;
4365 /* Don't renumber the same reg twice. */
4369 newreg
= REGNO (in_rtx
);
4370 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4371 to reach here as part of a REG_NOTE. */
4372 if (newreg
>= FIRST_PSEUDO_REGISTER
)
4377 newreg
= LEAF_REG_REMAP (newreg
);
4378 gcc_assert (newreg
>= 0);
4379 df_set_regs_ever_live (REGNO (in_rtx
), false);
4380 df_set_regs_ever_live (newreg
, true);
4381 SET_REGNO (in_rtx
, newreg
);
4386 if (INSN_P (in_rtx
))
4388 /* Inside a SEQUENCE, we find insns.
4389 Renumber just the patterns of these insns,
4390 just as we do for the top-level insns. */
4391 leaf_renumber_regs_insn (PATTERN (in_rtx
));
4395 format_ptr
= GET_RTX_FORMAT (GET_CODE (in_rtx
));
4397 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (in_rtx
)); i
++)
4398 switch (*format_ptr
++)
4401 leaf_renumber_regs_insn (XEXP (in_rtx
, i
));
4405 if (NULL
!= XVEC (in_rtx
, i
))
4407 for (j
= 0; j
< XVECLEN (in_rtx
, i
); j
++)
4408 leaf_renumber_regs_insn (XVECEXP (in_rtx
, i
, j
));
4427 /* Turn the RTL into assembly. */
4429 rest_of_handle_final (void)
4431 const char *fnname
= get_fnname_from_decl (current_function_decl
);
4433 assemble_start_function (current_function_decl
, fnname
);
4434 final_start_function (get_insns (), asm_out_file
, optimize
);
4435 final (get_insns (), asm_out_file
, optimize
);
4437 collect_fn_hard_reg_usage ();
4438 final_end_function ();
4440 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4441 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4442 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4443 output_function_exception_table (fnname
);
4445 assemble_end_function (current_function_decl
, fnname
);
4447 user_defined_section_attribute
= false;
4449 /* Free up reg info memory. */
4453 fflush (asm_out_file
);
4455 /* Write DBX symbols if requested. */
4457 /* Note that for those inline functions where we don't initially
4458 know for certain that we will be generating an out-of-line copy,
4459 the first invocation of this routine (rest_of_compilation) will
4460 skip over this code by doing a `goto exit_rest_of_compilation;'.
4461 Later on, wrapup_global_declarations will (indirectly) call
4462 rest_of_compilation again for those inline functions that need
4463 to have out-of-line copies generated. During that call, we
4464 *will* be routed past here. */
4466 timevar_push (TV_SYMOUT
);
4467 if (!DECL_IGNORED_P (current_function_decl
))
4468 debug_hooks
->function_decl (current_function_decl
);
4469 timevar_pop (TV_SYMOUT
);
4471 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4472 DECL_INITIAL (current_function_decl
) = error_mark_node
;
4474 if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
4475 && targetm
.have_ctors_dtors
)
4476 targetm
.asm_out
.constructor (XEXP (DECL_RTL (current_function_decl
), 0),
4477 decl_init_priority_lookup
4478 (current_function_decl
));
4479 if (DECL_STATIC_DESTRUCTOR (current_function_decl
)
4480 && targetm
.have_ctors_dtors
)
4481 targetm
.asm_out
.destructor (XEXP (DECL_RTL (current_function_decl
), 0),
4482 decl_fini_priority_lookup
4483 (current_function_decl
));
4489 const pass_data pass_data_final
=
4491 RTL_PASS
, /* type */
4493 OPTGROUP_NONE
, /* optinfo_flags */
4494 TV_FINAL
, /* tv_id */
4495 0, /* properties_required */
4496 0, /* properties_provided */
4497 0, /* properties_destroyed */
4498 0, /* todo_flags_start */
4499 0, /* todo_flags_finish */
4502 class pass_final
: public rtl_opt_pass
4505 pass_final (gcc::context
*ctxt
)
4506 : rtl_opt_pass (pass_data_final
, ctxt
)
4509 /* opt_pass methods: */
4510 virtual unsigned int execute (function
*) { return rest_of_handle_final (); }
4512 }; // class pass_final
4517 make_pass_final (gcc::context
*ctxt
)
4519 return new pass_final (ctxt
);
4524 rest_of_handle_shorten_branches (void)
4526 /* Shorten branches. */
4527 shorten_branches (get_insns ());
4533 const pass_data pass_data_shorten_branches
=
4535 RTL_PASS
, /* type */
4536 "shorten", /* name */
4537 OPTGROUP_NONE
, /* optinfo_flags */
4538 TV_SHORTEN_BRANCH
, /* tv_id */
4539 0, /* properties_required */
4540 0, /* properties_provided */
4541 0, /* properties_destroyed */
4542 0, /* todo_flags_start */
4543 0, /* todo_flags_finish */
4546 class pass_shorten_branches
: public rtl_opt_pass
4549 pass_shorten_branches (gcc::context
*ctxt
)
4550 : rtl_opt_pass (pass_data_shorten_branches
, ctxt
)
4553 /* opt_pass methods: */
4554 virtual unsigned int execute (function
*)
4556 return rest_of_handle_shorten_branches ();
4559 }; // class pass_shorten_branches
4564 make_pass_shorten_branches (gcc::context
*ctxt
)
4566 return new pass_shorten_branches (ctxt
);
4571 rest_of_clean_state (void)
4573 rtx_insn
*insn
, *next
;
4574 FILE *final_output
= NULL
;
4575 int save_unnumbered
= flag_dump_unnumbered
;
4576 int save_noaddr
= flag_dump_noaddr
;
4578 if (flag_dump_final_insns
)
4580 final_output
= fopen (flag_dump_final_insns
, "a");
4583 error ("could not open final insn dump file %qs: %m",
4584 flag_dump_final_insns
);
4585 flag_dump_final_insns
= NULL
;
4589 flag_dump_noaddr
= flag_dump_unnumbered
= 1;
4590 if (flag_compare_debug_opt
|| flag_compare_debug
)
4591 dump_flags
|= TDF_NOUID
;
4592 dump_function_header (final_output
, current_function_decl
,
4594 final_insns_dump_p
= true;
4596 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4598 INSN_UID (insn
) = CODE_LABEL_NUMBER (insn
);
4602 set_block_for_insn (insn
, NULL
);
4603 INSN_UID (insn
) = 0;
4608 /* It is very important to decompose the RTL instruction chain here:
4609 debug information keeps pointing into CODE_LABEL insns inside the function
4610 body. If these remain pointing to the other insns, we end up preserving
4611 whole RTL chain and attached detailed debug info in memory. */
4612 for (insn
= get_insns (); insn
; insn
= next
)
4614 next
= NEXT_INSN (insn
);
4615 SET_NEXT_INSN (insn
) = NULL
;
4616 SET_PREV_INSN (insn
) = NULL
;
4619 && (!NOTE_P (insn
) ||
4620 (NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
4621 && NOTE_KIND (insn
) != NOTE_INSN_CALL_ARG_LOCATION
4622 && NOTE_KIND (insn
) != NOTE_INSN_BLOCK_BEG
4623 && NOTE_KIND (insn
) != NOTE_INSN_BLOCK_END
4624 && NOTE_KIND (insn
) != NOTE_INSN_DELETED_DEBUG_LABEL
)))
4625 print_rtl_single (final_output
, insn
);
4630 flag_dump_noaddr
= save_noaddr
;
4631 flag_dump_unnumbered
= save_unnumbered
;
4632 final_insns_dump_p
= false;
4634 if (fclose (final_output
))
4636 error ("could not close final insn dump file %qs: %m",
4637 flag_dump_final_insns
);
4638 flag_dump_final_insns
= NULL
;
4642 /* In case the function was not output,
4643 don't leave any temporary anonymous types
4644 queued up for sdb output. */
4645 if (SDB_DEBUGGING_INFO
&& write_symbols
== SDB_DEBUG
)
4646 sdbout_types (NULL_TREE
);
4648 flag_rerun_cse_after_global_opts
= 0;
4649 reload_completed
= 0;
4650 epilogue_completed
= 0;
4652 regstack_completed
= 0;
4655 /* Clear out the insn_length contents now that they are no
4657 init_insn_lengths ();
4659 /* Show no temporary slots allocated. */
4662 free_bb_for_insn ();
4664 delete_tree_ssa (cfun
);
4666 /* We can reduce stack alignment on call site only when we are sure that
4667 the function body just produced will be actually used in the final
4669 if (decl_binds_to_current_def_p (current_function_decl
))
4671 unsigned int pref
= crtl
->preferred_stack_boundary
;
4672 if (crtl
->stack_alignment_needed
> crtl
->preferred_stack_boundary
)
4673 pref
= crtl
->stack_alignment_needed
;
4674 cgraph_node::rtl_info (current_function_decl
)
4675 ->preferred_incoming_stack_boundary
= pref
;
4678 /* Make sure volatile mem refs aren't considered valid operands for
4679 arithmetic insns. We must call this here if this is a nested inline
4680 function, since the above code leaves us in the init_recog state,
4681 and the function context push/pop code does not save/restore volatile_ok.
4683 ??? Maybe it isn't necessary for expand_start_function to call this
4684 anymore if we do it here? */
4686 init_recog_no_volatile ();
4688 /* We're done with this function. Free up memory if we can. */
4689 free_after_parsing (cfun
);
4690 free_after_compilation (cfun
);
4696 const pass_data pass_data_clean_state
=
4698 RTL_PASS
, /* type */
4699 "*clean_state", /* name */
4700 OPTGROUP_NONE
, /* optinfo_flags */
4701 TV_FINAL
, /* tv_id */
4702 0, /* properties_required */
4703 0, /* properties_provided */
4704 PROP_rtl
, /* properties_destroyed */
4705 0, /* todo_flags_start */
4706 0, /* todo_flags_finish */
4709 class pass_clean_state
: public rtl_opt_pass
4712 pass_clean_state (gcc::context
*ctxt
)
4713 : rtl_opt_pass (pass_data_clean_state
, ctxt
)
4716 /* opt_pass methods: */
4717 virtual unsigned int execute (function
*)
4719 return rest_of_clean_state ();
4722 }; // class pass_clean_state
4727 make_pass_clean_state (gcc::context
*ctxt
)
4729 return new pass_clean_state (ctxt
);
4732 /* Return true if INSN is a call to the current function. */
4735 self_recursive_call_p (rtx_insn
*insn
)
4737 tree fndecl
= get_call_fndecl (insn
);
4738 return (fndecl
== current_function_decl
4739 && decl_binds_to_current_def_p (fndecl
));
4742 /* Collect hard register usage for the current function. */
4745 collect_fn_hard_reg_usage (void)
4751 struct cgraph_rtl_info
*node
;
4752 HARD_REG_SET function_used_regs
;
4754 /* ??? To be removed when all the ports have been fixed. */
4755 if (!targetm
.call_fusage_contains_non_callee_clobbers
)
4758 CLEAR_HARD_REG_SET (function_used_regs
);
4760 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= next_insn (insn
))
4762 HARD_REG_SET insn_used_regs
;
4764 if (!NONDEBUG_INSN_P (insn
))
4768 && !self_recursive_call_p (insn
))
4770 if (!get_call_reg_set_usage (insn
, &insn_used_regs
,
4774 IOR_HARD_REG_SET (function_used_regs
, insn_used_regs
);
4777 find_all_hard_reg_sets (insn
, &insn_used_regs
, false);
4778 IOR_HARD_REG_SET (function_used_regs
, insn_used_regs
);
4781 /* Be conservative - mark fixed and global registers as used. */
4782 IOR_HARD_REG_SET (function_used_regs
, fixed_reg_set
);
4785 /* Handle STACK_REGS conservatively, since the df-framework does not
4786 provide accurate information for them. */
4788 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
4789 SET_HARD_REG_BIT (function_used_regs
, i
);
4792 /* The information we have gathered is only interesting if it exposes a
4793 register from the call_used_regs that is not used in this function. */
4794 if (hard_reg_set_subset_p (call_used_reg_set
, function_used_regs
))
4797 node
= cgraph_node::rtl_info (current_function_decl
);
4798 gcc_assert (node
!= NULL
);
4800 COPY_HARD_REG_SET (node
->function_used_regs
, function_used_regs
);
4801 node
->function_used_regs_valid
= 1;
4804 /* Get the declaration of the function called by INSN. */
4807 get_call_fndecl (rtx_insn
*insn
)
4811 note
= find_reg_note (insn
, REG_CALL_DECL
, NULL_RTX
);
4812 if (note
== NULL_RTX
)
4815 datum
= XEXP (note
, 0);
4816 if (datum
!= NULL_RTX
)
4817 return SYMBOL_REF_DECL (datum
);
4822 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4823 call targets that can be overwritten. */
4825 static struct cgraph_rtl_info
*
4826 get_call_cgraph_rtl_info (rtx_insn
*insn
)
4830 if (insn
== NULL_RTX
)
4833 fndecl
= get_call_fndecl (insn
);
4834 if (fndecl
== NULL_TREE
4835 || !decl_binds_to_current_def_p (fndecl
))
4838 return cgraph_node::rtl_info (fndecl
);
4841 /* Find hard registers used by function call instruction INSN, and return them
4842 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4845 get_call_reg_set_usage (rtx_insn
*insn
, HARD_REG_SET
*reg_set
,
4846 HARD_REG_SET default_set
)
4850 struct cgraph_rtl_info
*node
= get_call_cgraph_rtl_info (insn
);
4852 && node
->function_used_regs_valid
)
4854 COPY_HARD_REG_SET (*reg_set
, node
->function_used_regs
);
4855 AND_HARD_REG_SET (*reg_set
, default_set
);
4860 COPY_HARD_REG_SET (*reg_set
, default_set
);