1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
46 #define INCLUDE_ALGORITHM /* reverse */
48 #include "coretypes.h"
57 #include "insn-config.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
64 #include "insn-attr.h"
65 #include "conditions.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
75 #include "tree-pass.h"
81 #include "print-rtl.h"
83 #ifdef XCOFF_DEBUGGING_INFO
84 #include "xcoffout.h" /* Needed for external data declarations. */
87 #include "dwarf2out.h"
89 #ifdef DBX_DEBUGGING_INFO
95 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
96 So define a null default for it to save conditionalization later. */
97 #ifndef CC_STATUS_INIT
98 #define CC_STATUS_INIT
101 /* Is the given character a logical line separator for the assembler? */
102 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
103 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
106 #ifndef JUMP_TABLES_IN_TEXT_SECTION
107 #define JUMP_TABLES_IN_TEXT_SECTION 0
110 /* Bitflags used by final_scan_insn. */
112 #define SEEN_EMITTED 2
114 /* Last insn processed by final_scan_insn. */
115 static rtx_insn
*debug_insn
;
116 rtx_insn
*current_output_insn
;
118 /* Line number of last NOTE. */
119 static int last_linenum
;
121 /* Last discriminator written to assembly. */
122 static int last_discriminator
;
124 /* Discriminator of current block. */
125 static int discriminator
;
127 /* Highest line number in current block. */
128 static int high_block_linenum
;
130 /* Likewise for function. */
131 static int high_function_linenum
;
133 /* Filename of last NOTE. */
134 static const char *last_filename
;
136 /* Override filename and line number. */
137 static const char *override_filename
;
138 static int override_linenum
;
140 /* Whether to force emission of a line note before the next insn. */
141 static bool force_source_line
= false;
143 extern const int length_unit_log
; /* This is defined in insn-attrtab.c. */
145 /* Nonzero while outputting an `asm' with operands.
146 This means that inconsistencies are the user's fault, so don't die.
147 The precise value is the insn being output, to pass to error_for_asm. */
148 const rtx_insn
*this_is_asm_operands
;
150 /* Number of operands of this insn, for an `asm' with operands. */
151 static unsigned int insn_noperands
;
153 /* Compare optimization flag. */
155 static rtx last_ignored_compare
= 0;
157 /* Assign a unique number to each insn that is output.
158 This can be used to generate unique local labels. */
160 static int insn_counter
= 0;
162 /* This variable contains machine-dependent flags (defined in tm.h)
163 set and examined by output routines
164 that describe how to interpret the condition codes properly. */
168 /* During output of an insn, this contains a copy of cc_status
169 from before the insn. */
171 CC_STATUS cc_prev_status
;
173 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
175 static int block_depth
;
177 /* Nonzero if have enabled APP processing of our assembler output. */
181 /* If we are outputting an insn sequence, this contains the sequence rtx.
184 rtx_sequence
*final_sequence
;
186 #ifdef ASSEMBLER_DIALECT
188 /* Number of the assembler dialect to use, starting at 0. */
189 static int dialect_number
;
192 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
193 rtx current_insn_predicate
;
195 /* True if printing into -fdump-final-insns= dump. */
196 bool final_insns_dump_p
;
198 /* True if profile_function should be called, but hasn't been called yet. */
199 static bool need_profile_function
;
201 static int asm_insn_count (rtx
);
202 static void profile_function (FILE *);
203 static void profile_after_prologue (FILE *);
204 static bool notice_source_line (rtx_insn
*, bool *);
205 static rtx
walk_alter_subreg (rtx
*, bool *);
206 static void output_asm_name (void);
207 static void output_alternate_entry_point (FILE *, rtx_insn
*);
208 static tree
get_mem_expr_from_op (rtx
, int *);
209 static void output_asm_operand_names (rtx
*, int *, int);
210 #ifdef LEAF_REGISTERS
211 static void leaf_renumber_regs (rtx_insn
*);
214 static int alter_cond (rtx
);
216 #ifndef ADDR_VEC_ALIGN
217 static int final_addr_vec_align (rtx_insn
*);
219 static int align_fuzz (rtx
, rtx
, int, unsigned);
220 static void collect_fn_hard_reg_usage (void);
221 static tree
get_call_fndecl (rtx_insn
*);
223 /* Initialize data in final at the beginning of a compilation. */
226 init_final (const char *filename ATTRIBUTE_UNUSED
)
231 #ifdef ASSEMBLER_DIALECT
232 dialect_number
= ASSEMBLER_DIALECT
;
236 /* Default target function prologue and epilogue assembler output.
238 If not overridden for epilogue code, then the function body itself
239 contains return instructions wherever needed. */
241 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED
,
242 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
247 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED
,
248 tree decl ATTRIBUTE_UNUSED
,
249 bool new_is_cold ATTRIBUTE_UNUSED
)
253 /* Default target hook that outputs nothing to a stream. */
255 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED
)
259 /* Enable APP processing of subsequent output.
260 Used before the output from an `asm' statement. */
267 fputs (ASM_APP_ON
, asm_out_file
);
272 /* Disable APP processing of subsequent output.
273 Called from varasm.c before most kinds of output. */
280 fputs (ASM_APP_OFF
, asm_out_file
);
285 /* Return the number of slots filled in the current
286 delayed branch sequence (we don't count the insn needing the
287 delay slot). Zero if not in a delayed branch sequence. */
290 dbr_sequence_length (void)
292 if (final_sequence
!= 0)
293 return XVECLEN (final_sequence
, 0) - 1;
298 /* The next two pages contain routines used to compute the length of an insn
299 and to shorten branches. */
301 /* Arrays for insn lengths, and addresses. The latter is referenced by
302 `insn_current_length'. */
304 static int *insn_lengths
;
306 vec
<int> insn_addresses_
;
308 /* Max uid for which the above arrays are valid. */
309 static int insn_lengths_max_uid
;
311 /* Address of insn being processed. Used by `insn_current_length'. */
312 int insn_current_address
;
314 /* Address of insn being processed in previous iteration. */
315 int insn_last_address
;
317 /* known invariant alignment of insn being processed. */
318 int insn_current_align
;
320 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
321 gives the next following alignment insn that increases the known
322 alignment, or NULL_RTX if there is no such insn.
323 For any alignment obtained this way, we can again index uid_align with
324 its uid to obtain the next following align that in turn increases the
325 alignment, till we reach NULL_RTX; the sequence obtained this way
326 for each insn we'll call the alignment chain of this insn in the following
329 struct label_alignment
335 static rtx
*uid_align
;
336 static int *uid_shuid
;
337 static struct label_alignment
*label_align
;
339 /* Indicate that branch shortening hasn't yet been done. */
342 init_insn_lengths (void)
353 insn_lengths_max_uid
= 0;
355 if (HAVE_ATTR_length
)
356 INSN_ADDRESSES_FREE ();
364 /* Obtain the current length of an insn. If branch shortening has been done,
365 get its actual length. Otherwise, use FALLBACK_FN to calculate the
368 get_attr_length_1 (rtx_insn
*insn
, int (*fallback_fn
) (rtx_insn
*))
374 if (!HAVE_ATTR_length
)
377 if (insn_lengths_max_uid
> INSN_UID (insn
))
378 return insn_lengths
[INSN_UID (insn
)];
380 switch (GET_CODE (insn
))
390 length
= fallback_fn (insn
);
394 body
= PATTERN (insn
);
395 if (GET_CODE (body
) == USE
|| GET_CODE (body
) == CLOBBER
)
398 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
399 length
= asm_insn_count (body
) * fallback_fn (insn
);
400 else if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (body
))
401 for (i
= 0; i
< seq
->len (); i
++)
402 length
+= get_attr_length_1 (seq
->insn (i
), fallback_fn
);
404 length
= fallback_fn (insn
);
411 #ifdef ADJUST_INSN_LENGTH
412 ADJUST_INSN_LENGTH (insn
, length
);
417 /* Obtain the current length of an insn. If branch shortening has been done,
418 get its actual length. Otherwise, get its maximum length. */
420 get_attr_length (rtx_insn
*insn
)
422 return get_attr_length_1 (insn
, insn_default_length
);
425 /* Obtain the current length of an insn. If branch shortening has been done,
426 get its actual length. Otherwise, get its minimum length. */
428 get_attr_min_length (rtx_insn
*insn
)
430 return get_attr_length_1 (insn
, insn_min_length
);
433 /* Code to handle alignment inside shorten_branches. */
435 /* Here is an explanation how the algorithm in align_fuzz can give
438 Call a sequence of instructions beginning with alignment point X
439 and continuing until the next alignment point `block X'. When `X'
440 is used in an expression, it means the alignment value of the
443 Call the distance between the start of the first insn of block X, and
444 the end of the last insn of block X `IX', for the `inner size of X'.
445 This is clearly the sum of the instruction lengths.
447 Likewise with the next alignment-delimited block following X, which we
450 Call the distance between the start of the first insn of block X, and
451 the start of the first insn of block Y `OX', for the `outer size of X'.
453 The estimated padding is then OX - IX.
455 OX can be safely estimated as
460 OX = round_up(IX, X) + Y - X
462 Clearly est(IX) >= real(IX), because that only depends on the
463 instruction lengths, and those being overestimated is a given.
465 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
466 we needn't worry about that when thinking about OX.
468 When X >= Y, the alignment provided by Y adds no uncertainty factor
469 for branch ranges starting before X, so we can just round what we have.
470 But when X < Y, we don't know anything about the, so to speak,
471 `middle bits', so we have to assume the worst when aligning up from an
472 address mod X to one mod Y, which is Y - X. */
475 #define LABEL_ALIGN(LABEL) align_labels_log
479 #define LOOP_ALIGN(LABEL) align_loops_log
482 #ifndef LABEL_ALIGN_AFTER_BARRIER
483 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
487 #define JUMP_ALIGN(LABEL) align_jumps_log
491 default_label_align_after_barrier_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
497 default_loop_align_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
499 return align_loops_max_skip
;
503 default_label_align_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
505 return align_labels_max_skip
;
509 default_jump_align_max_skip (rtx_insn
*insn ATTRIBUTE_UNUSED
)
511 return align_jumps_max_skip
;
514 #ifndef ADDR_VEC_ALIGN
516 final_addr_vec_align (rtx_insn
*addr_vec
)
518 int align
= GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec
)));
520 if (align
> BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
521 align
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
522 return exact_log2 (align
);
526 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
529 #ifndef INSN_LENGTH_ALIGNMENT
530 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
533 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
535 static int min_labelno
, max_labelno
;
537 #define LABEL_TO_ALIGNMENT(LABEL) \
538 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
540 #define LABEL_TO_MAX_SKIP(LABEL) \
541 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
543 /* For the benefit of port specific code do this also as a function. */
546 label_to_alignment (rtx label
)
548 if (CODE_LABEL_NUMBER (label
) <= max_labelno
)
549 return LABEL_TO_ALIGNMENT (label
);
554 label_to_max_skip (rtx label
)
556 if (CODE_LABEL_NUMBER (label
) <= max_labelno
)
557 return LABEL_TO_MAX_SKIP (label
);
561 /* The differences in addresses
562 between a branch and its target might grow or shrink depending on
563 the alignment the start insn of the range (the branch for a forward
564 branch or the label for a backward branch) starts out on; if these
565 differences are used naively, they can even oscillate infinitely.
566 We therefore want to compute a 'worst case' address difference that
567 is independent of the alignment the start insn of the range end
568 up on, and that is at least as large as the actual difference.
569 The function align_fuzz calculates the amount we have to add to the
570 naively computed difference, by traversing the part of the alignment
571 chain of the start insn of the range that is in front of the end insn
572 of the range, and considering for each alignment the maximum amount
573 that it might contribute to a size increase.
575 For casesi tables, we also want to know worst case minimum amounts of
576 address difference, in case a machine description wants to introduce
577 some common offset that is added to all offsets in a table.
578 For this purpose, align_fuzz with a growth argument of 0 computes the
579 appropriate adjustment. */
581 /* Compute the maximum delta by which the difference of the addresses of
582 START and END might grow / shrink due to a different address for start
583 which changes the size of alignment insns between START and END.
584 KNOWN_ALIGN_LOG is the alignment known for START.
585 GROWTH should be ~0 if the objective is to compute potential code size
586 increase, and 0 if the objective is to compute potential shrink.
587 The return value is undefined for any other value of GROWTH. */
590 align_fuzz (rtx start
, rtx end
, int known_align_log
, unsigned int growth
)
592 int uid
= INSN_UID (start
);
594 int known_align
= 1 << known_align_log
;
595 int end_shuid
= INSN_SHUID (end
);
598 for (align_label
= uid_align
[uid
]; align_label
; align_label
= uid_align
[uid
])
600 int align_addr
, new_align
;
602 uid
= INSN_UID (align_label
);
603 align_addr
= INSN_ADDRESSES (uid
) - insn_lengths
[uid
];
604 if (uid_shuid
[uid
] > end_shuid
)
606 known_align_log
= LABEL_TO_ALIGNMENT (align_label
);
607 new_align
= 1 << known_align_log
;
608 if (new_align
< known_align
)
610 fuzz
+= (-align_addr
^ growth
) & (new_align
- known_align
);
611 known_align
= new_align
;
616 /* Compute a worst-case reference address of a branch so that it
617 can be safely used in the presence of aligned labels. Since the
618 size of the branch itself is unknown, the size of the branch is
619 not included in the range. I.e. for a forward branch, the reference
620 address is the end address of the branch as known from the previous
621 branch shortening pass, minus a value to account for possible size
622 increase due to alignment. For a backward branch, it is the start
623 address of the branch as known from the current pass, plus a value
624 to account for possible size increase due to alignment.
625 NB.: Therefore, the maximum offset allowed for backward branches needs
626 to exclude the branch size. */
629 insn_current_reference_address (rtx_insn
*branch
)
634 if (! INSN_ADDRESSES_SET_P ())
637 rtx_insn
*seq
= NEXT_INSN (PREV_INSN (branch
));
638 seq_uid
= INSN_UID (seq
);
639 if (!JUMP_P (branch
))
640 /* This can happen for example on the PA; the objective is to know the
641 offset to address something in front of the start of the function.
642 Thus, we can treat it like a backward branch.
643 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
644 any alignment we'd encounter, so we skip the call to align_fuzz. */
645 return insn_current_address
;
646 dest
= JUMP_LABEL (branch
);
648 /* BRANCH has no proper alignment chain set, so use SEQ.
649 BRANCH also has no INSN_SHUID. */
650 if (INSN_SHUID (seq
) < INSN_SHUID (dest
))
652 /* Forward branch. */
653 return (insn_last_address
+ insn_lengths
[seq_uid
]
654 - align_fuzz (seq
, dest
, length_unit_log
, ~0));
658 /* Backward branch. */
659 return (insn_current_address
660 + align_fuzz (dest
, seq
, length_unit_log
, ~0));
664 /* Compute branch alignments based on frequency information in the
668 compute_alignments (void)
670 int log
, max_skip
, max_log
;
673 int freq_threshold
= 0;
681 max_labelno
= max_label_num ();
682 min_labelno
= get_first_label_num ();
683 label_align
= XCNEWVEC (struct label_alignment
, max_labelno
- min_labelno
+ 1);
685 /* If not optimizing or optimizing for size, don't assign any alignments. */
686 if (! optimize
|| optimize_function_for_size_p (cfun
))
691 dump_reg_info (dump_file
);
692 dump_flow_info (dump_file
, TDF_DETAILS
);
693 flow_loops_dump (dump_file
, NULL
, 1);
695 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
696 FOR_EACH_BB_FN (bb
, cfun
)
697 if (bb
->frequency
> freq_max
)
698 freq_max
= bb
->frequency
;
699 freq_threshold
= freq_max
/ PARAM_VALUE (PARAM_ALIGN_THRESHOLD
);
702 fprintf (dump_file
, "freq_max: %i\n",freq_max
);
703 FOR_EACH_BB_FN (bb
, cfun
)
705 rtx_insn
*label
= BB_HEAD (bb
);
706 int fallthru_frequency
= 0, branch_frequency
= 0, has_fallthru
= 0;
711 || optimize_bb_for_size_p (bb
))
715 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
716 bb
->index
, bb
->frequency
, bb
->loop_father
->num
,
720 max_log
= LABEL_ALIGN (label
);
721 max_skip
= targetm
.asm_out
.label_align_max_skip (label
);
723 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
725 if (e
->flags
& EDGE_FALLTHRU
)
726 has_fallthru
= 1, fallthru_frequency
+= EDGE_FREQUENCY (e
);
728 branch_frequency
+= EDGE_FREQUENCY (e
);
732 fprintf (dump_file
, "BB %4i freq %4i loop %2i loop_depth"
733 " %2i fall %4i branch %4i",
734 bb
->index
, bb
->frequency
, bb
->loop_father
->num
,
736 fallthru_frequency
, branch_frequency
);
737 if (!bb
->loop_father
->inner
&& bb
->loop_father
->num
)
738 fprintf (dump_file
, " inner_loop");
739 if (bb
->loop_father
->header
== bb
)
740 fprintf (dump_file
, " loop_header");
741 fprintf (dump_file
, "\n");
744 /* There are two purposes to align block with no fallthru incoming edge:
745 1) to avoid fetch stalls when branch destination is near cache boundary
746 2) to improve cache efficiency in case the previous block is not executed
747 (so it does not need to be in the cache).
749 We to catch first case, we align frequently executed blocks.
750 To catch the second, we align blocks that are executed more frequently
751 than the predecessor and the predecessor is likely to not be executed
752 when function is called. */
755 && (branch_frequency
> freq_threshold
756 || (bb
->frequency
> bb
->prev_bb
->frequency
* 10
757 && (bb
->prev_bb
->frequency
758 <= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
/ 2))))
760 log
= JUMP_ALIGN (label
);
762 fprintf (dump_file
, " jump alignment added.\n");
766 max_skip
= targetm
.asm_out
.jump_align_max_skip (label
);
769 /* In case block is frequent and reached mostly by non-fallthru edge,
770 align it. It is most likely a first block of loop. */
772 && !(single_succ_p (bb
)
773 && single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
))
774 && optimize_bb_for_speed_p (bb
)
775 && branch_frequency
+ fallthru_frequency
> freq_threshold
777 > fallthru_frequency
* PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS
)))
779 log
= LOOP_ALIGN (label
);
781 fprintf (dump_file
, " internal loop alignment added.\n");
785 max_skip
= targetm
.asm_out
.loop_align_max_skip (label
);
788 LABEL_TO_ALIGNMENT (label
) = max_log
;
789 LABEL_TO_MAX_SKIP (label
) = max_skip
;
792 loop_optimizer_finalize ();
793 free_dominance_info (CDI_DOMINATORS
);
797 /* Grow the LABEL_ALIGN array after new labels are created. */
800 grow_label_align (void)
802 int old
= max_labelno
;
806 max_labelno
= max_label_num ();
808 n_labels
= max_labelno
- min_labelno
+ 1;
809 n_old_labels
= old
- min_labelno
+ 1;
811 label_align
= XRESIZEVEC (struct label_alignment
, label_align
, n_labels
);
813 /* Range of labels grows monotonically in the function. Failing here
814 means that the initialization of array got lost. */
815 gcc_assert (n_old_labels
<= n_labels
);
817 memset (label_align
+ n_old_labels
, 0,
818 (n_labels
- n_old_labels
) * sizeof (struct label_alignment
));
821 /* Update the already computed alignment information. LABEL_PAIRS is a vector
822 made up of pairs of labels for which the alignment information of the first
823 element will be copied from that of the second element. */
826 update_alignments (vec
<rtx
> &label_pairs
)
829 rtx iter
, label
= NULL_RTX
;
831 if (max_labelno
!= max_label_num ())
834 FOR_EACH_VEC_ELT (label_pairs
, i
, iter
)
837 LABEL_TO_ALIGNMENT (label
) = LABEL_TO_ALIGNMENT (iter
);
838 LABEL_TO_MAX_SKIP (label
) = LABEL_TO_MAX_SKIP (iter
);
846 const pass_data pass_data_compute_alignments
=
849 "alignments", /* name */
850 OPTGROUP_NONE
, /* optinfo_flags */
852 0, /* properties_required */
853 0, /* properties_provided */
854 0, /* properties_destroyed */
855 0, /* todo_flags_start */
856 0, /* todo_flags_finish */
859 class pass_compute_alignments
: public rtl_opt_pass
862 pass_compute_alignments (gcc::context
*ctxt
)
863 : rtl_opt_pass (pass_data_compute_alignments
, ctxt
)
866 /* opt_pass methods: */
867 virtual unsigned int execute (function
*) { return compute_alignments (); }
869 }; // class pass_compute_alignments
874 make_pass_compute_alignments (gcc::context
*ctxt
)
876 return new pass_compute_alignments (ctxt
);
880 /* Make a pass over all insns and compute their actual lengths by shortening
881 any branches of variable length if possible. */
883 /* shorten_branches might be called multiple times: for example, the SH
884 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
885 In order to do this, it needs proper length information, which it obtains
886 by calling shorten_branches. This cannot be collapsed with
887 shorten_branches itself into a single pass unless we also want to integrate
888 reorg.c, since the branch splitting exposes new instructions with delay
892 shorten_branches (rtx_insn
*first
)
899 #define MAX_CODE_ALIGN 16
901 int something_changed
= 1;
902 char *varying_length
;
905 rtx align_tab
[MAX_CODE_ALIGN
];
907 /* Compute maximum UID and allocate label_align / uid_shuid. */
908 max_uid
= get_max_uid ();
910 /* Free uid_shuid before reallocating it. */
913 uid_shuid
= XNEWVEC (int, max_uid
);
915 if (max_labelno
!= max_label_num ())
918 /* Initialize label_align and set up uid_shuid to be strictly
919 monotonically rising with insn order. */
920 /* We use max_log here to keep track of the maximum alignment we want to
921 impose on the next CODE_LABEL (or the current one if we are processing
922 the CODE_LABEL itself). */
927 for (insn
= get_insns (), i
= 1; insn
; insn
= NEXT_INSN (insn
))
931 INSN_SHUID (insn
) = i
++;
938 bool next_is_jumptable
;
940 /* Merge in alignments computed by compute_alignments. */
941 log
= LABEL_TO_ALIGNMENT (insn
);
945 max_skip
= LABEL_TO_MAX_SKIP (insn
);
948 next
= next_nonnote_insn (insn
);
949 next_is_jumptable
= next
&& JUMP_TABLE_DATA_P (next
);
950 if (!next_is_jumptable
)
952 log
= LABEL_ALIGN (insn
);
956 max_skip
= targetm
.asm_out
.label_align_max_skip (insn
);
959 /* ADDR_VECs only take room if read-only data goes into the text
961 if ((JUMP_TABLES_IN_TEXT_SECTION
962 || readonly_data_section
== text_section
)
963 && next_is_jumptable
)
965 log
= ADDR_VEC_ALIGN (next
);
969 max_skip
= targetm
.asm_out
.label_align_max_skip (insn
);
972 LABEL_TO_ALIGNMENT (insn
) = max_log
;
973 LABEL_TO_MAX_SKIP (insn
) = max_skip
;
977 else if (BARRIER_P (insn
))
981 for (label
= insn
; label
&& ! INSN_P (label
);
982 label
= NEXT_INSN (label
))
985 log
= LABEL_ALIGN_AFTER_BARRIER (insn
);
989 max_skip
= targetm
.asm_out
.label_align_after_barrier_max_skip (label
);
995 if (!HAVE_ATTR_length
)
998 /* Allocate the rest of the arrays. */
999 insn_lengths
= XNEWVEC (int, max_uid
);
1000 insn_lengths_max_uid
= max_uid
;
1001 /* Syntax errors can lead to labels being outside of the main insn stream.
1002 Initialize insn_addresses, so that we get reproducible results. */
1003 INSN_ADDRESSES_ALLOC (max_uid
);
1005 varying_length
= XCNEWVEC (char, max_uid
);
1007 /* Initialize uid_align. We scan instructions
1008 from end to start, and keep in align_tab[n] the last seen insn
1009 that does an alignment of at least n+1, i.e. the successor
1010 in the alignment chain for an insn that does / has a known
1012 uid_align
= XCNEWVEC (rtx
, max_uid
);
1014 for (i
= MAX_CODE_ALIGN
; --i
>= 0;)
1015 align_tab
[i
] = NULL_RTX
;
1016 seq
= get_last_insn ();
1017 for (; seq
; seq
= PREV_INSN (seq
))
1019 int uid
= INSN_UID (seq
);
1021 log
= (LABEL_P (seq
) ? LABEL_TO_ALIGNMENT (seq
) : 0);
1022 uid_align
[uid
] = align_tab
[0];
1025 /* Found an alignment label. */
1026 uid_align
[uid
] = align_tab
[log
];
1027 for (i
= log
- 1; i
>= 0; i
--)
1032 /* When optimizing, we start assuming minimum length, and keep increasing
1033 lengths as we find the need for this, till nothing changes.
1034 When not optimizing, we start assuming maximum lengths, and
1035 do a single pass to update the lengths. */
1036 bool increasing
= optimize
!= 0;
1038 #ifdef CASE_VECTOR_SHORTEN_MODE
1041 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1044 int min_shuid
= INSN_SHUID (get_insns ()) - 1;
1045 int max_shuid
= INSN_SHUID (get_last_insn ()) + 1;
1048 for (insn
= first
; insn
!= 0; insn
= NEXT_INSN (insn
))
1050 rtx min_lab
= NULL_RTX
, max_lab
= NULL_RTX
, pat
;
1051 int len
, i
, min
, max
, insn_shuid
;
1053 addr_diff_vec_flags flags
;
1055 if (! JUMP_TABLE_DATA_P (insn
)
1056 || GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
)
1058 pat
= PATTERN (insn
);
1059 len
= XVECLEN (pat
, 1);
1060 gcc_assert (len
> 0);
1061 min_align
= MAX_CODE_ALIGN
;
1062 for (min
= max_shuid
, max
= min_shuid
, i
= len
- 1; i
>= 0; i
--)
1064 rtx lab
= XEXP (XVECEXP (pat
, 1, i
), 0);
1065 int shuid
= INSN_SHUID (lab
);
1076 if (min_align
> LABEL_TO_ALIGNMENT (lab
))
1077 min_align
= LABEL_TO_ALIGNMENT (lab
);
1079 XEXP (pat
, 2) = gen_rtx_LABEL_REF (Pmode
, min_lab
);
1080 XEXP (pat
, 3) = gen_rtx_LABEL_REF (Pmode
, max_lab
);
1081 insn_shuid
= INSN_SHUID (insn
);
1082 rel
= INSN_SHUID (XEXP (XEXP (pat
, 0), 0));
1083 memset (&flags
, 0, sizeof (flags
));
1084 flags
.min_align
= min_align
;
1085 flags
.base_after_vec
= rel
> insn_shuid
;
1086 flags
.min_after_vec
= min
> insn_shuid
;
1087 flags
.max_after_vec
= max
> insn_shuid
;
1088 flags
.min_after_base
= min
> rel
;
1089 flags
.max_after_base
= max
> rel
;
1090 ADDR_DIFF_VEC_FLAGS (pat
) = flags
;
1093 PUT_MODE (pat
, CASE_VECTOR_SHORTEN_MODE (0, 0, pat
));
1096 #endif /* CASE_VECTOR_SHORTEN_MODE */
1098 /* Compute initial lengths, addresses, and varying flags for each insn. */
1099 int (*length_fun
) (rtx_insn
*) = increasing
? insn_min_length
: insn_default_length
;
1101 for (insn_current_address
= 0, insn
= first
;
1103 insn_current_address
+= insn_lengths
[uid
], insn
= NEXT_INSN (insn
))
1105 uid
= INSN_UID (insn
);
1107 insn_lengths
[uid
] = 0;
1111 int log
= LABEL_TO_ALIGNMENT (insn
);
1114 int align
= 1 << log
;
1115 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1116 insn_lengths
[uid
] = new_address
- insn_current_address
;
1120 INSN_ADDRESSES (uid
) = insn_current_address
+ insn_lengths
[uid
];
1122 if (NOTE_P (insn
) || BARRIER_P (insn
)
1123 || LABEL_P (insn
) || DEBUG_INSN_P (insn
))
1125 if (insn
->deleted ())
1128 body
= PATTERN (insn
);
1129 if (JUMP_TABLE_DATA_P (insn
))
1131 /* This only takes room if read-only data goes into the text
1133 if (JUMP_TABLES_IN_TEXT_SECTION
1134 || readonly_data_section
== text_section
)
1135 insn_lengths
[uid
] = (XVECLEN (body
,
1136 GET_CODE (body
) == ADDR_DIFF_VEC
)
1137 * GET_MODE_SIZE (GET_MODE (body
)));
1138 /* Alignment is handled by ADDR_VEC_ALIGN. */
1140 else if (GET_CODE (body
) == ASM_INPUT
|| asm_noperands (body
) >= 0)
1141 insn_lengths
[uid
] = asm_insn_count (body
) * insn_default_length (insn
);
1142 else if (rtx_sequence
*body_seq
= dyn_cast
<rtx_sequence
*> (body
))
1145 int const_delay_slots
;
1147 const_delay_slots
= const_num_delay_slots (body_seq
->insn (0));
1149 const_delay_slots
= 0;
1151 int (*inner_length_fun
) (rtx_insn
*)
1152 = const_delay_slots
? length_fun
: insn_default_length
;
1153 /* Inside a delay slot sequence, we do not do any branch shortening
1154 if the shortening could change the number of delay slots
1156 for (i
= 0; i
< body_seq
->len (); i
++)
1158 rtx_insn
*inner_insn
= body_seq
->insn (i
);
1159 int inner_uid
= INSN_UID (inner_insn
);
1162 if (GET_CODE (PATTERN (inner_insn
)) == ASM_INPUT
1163 || asm_noperands (PATTERN (inner_insn
)) >= 0)
1164 inner_length
= (asm_insn_count (PATTERN (inner_insn
))
1165 * insn_default_length (inner_insn
));
1167 inner_length
= inner_length_fun (inner_insn
);
1169 insn_lengths
[inner_uid
] = inner_length
;
1170 if (const_delay_slots
)
1172 if ((varying_length
[inner_uid
]
1173 = insn_variable_length_p (inner_insn
)) != 0)
1174 varying_length
[uid
] = 1;
1175 INSN_ADDRESSES (inner_uid
) = (insn_current_address
1176 + insn_lengths
[uid
]);
1179 varying_length
[inner_uid
] = 0;
1180 insn_lengths
[uid
] += inner_length
;
1183 else if (GET_CODE (body
) != USE
&& GET_CODE (body
) != CLOBBER
)
1185 insn_lengths
[uid
] = length_fun (insn
);
1186 varying_length
[uid
] = insn_variable_length_p (insn
);
1189 /* If needed, do any adjustment. */
1190 #ifdef ADJUST_INSN_LENGTH
1191 ADJUST_INSN_LENGTH (insn
, insn_lengths
[uid
]);
1192 if (insn_lengths
[uid
] < 0)
1193 fatal_insn ("negative insn length", insn
);
1197 /* Now loop over all the insns finding varying length insns. For each,
1198 get the current insn length. If it has changed, reflect the change.
1199 When nothing changes for a full pass, we are done. */
1201 while (something_changed
)
1203 something_changed
= 0;
1204 insn_current_align
= MAX_CODE_ALIGN
- 1;
1205 for (insn_current_address
= 0, insn
= first
;
1207 insn
= NEXT_INSN (insn
))
1210 #ifdef ADJUST_INSN_LENGTH
1215 uid
= INSN_UID (insn
);
1219 int log
= LABEL_TO_ALIGNMENT (insn
);
1221 #ifdef CASE_VECTOR_SHORTEN_MODE
1222 /* If the mode of a following jump table was changed, we
1223 may need to update the alignment of this label. */
1225 bool next_is_jumptable
;
1227 next
= next_nonnote_insn (insn
);
1228 next_is_jumptable
= next
&& JUMP_TABLE_DATA_P (next
);
1229 if ((JUMP_TABLES_IN_TEXT_SECTION
1230 || readonly_data_section
== text_section
)
1231 && next_is_jumptable
)
1233 int newlog
= ADDR_VEC_ALIGN (next
);
1237 LABEL_TO_ALIGNMENT (insn
) = log
;
1238 something_changed
= 1;
1243 if (log
> insn_current_align
)
1245 int align
= 1 << log
;
1246 int new_address
= (insn_current_address
+ align
- 1) & -align
;
1247 insn_lengths
[uid
] = new_address
- insn_current_address
;
1248 insn_current_align
= log
;
1249 insn_current_address
= new_address
;
1252 insn_lengths
[uid
] = 0;
1253 INSN_ADDRESSES (uid
) = insn_current_address
;
1257 length_align
= INSN_LENGTH_ALIGNMENT (insn
);
1258 if (length_align
< insn_current_align
)
1259 insn_current_align
= length_align
;
1261 insn_last_address
= INSN_ADDRESSES (uid
);
1262 INSN_ADDRESSES (uid
) = insn_current_address
;
1264 #ifdef CASE_VECTOR_SHORTEN_MODE
1266 && JUMP_TABLE_DATA_P (insn
)
1267 && GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
1269 rtx body
= PATTERN (insn
);
1270 int old_length
= insn_lengths
[uid
];
1272 safe_as_a
<rtx_insn
*> (XEXP (XEXP (body
, 0), 0));
1273 rtx min_lab
= XEXP (XEXP (body
, 2), 0);
1274 rtx max_lab
= XEXP (XEXP (body
, 3), 0);
1275 int rel_addr
= INSN_ADDRESSES (INSN_UID (rel_lab
));
1276 int min_addr
= INSN_ADDRESSES (INSN_UID (min_lab
));
1277 int max_addr
= INSN_ADDRESSES (INSN_UID (max_lab
));
1280 addr_diff_vec_flags flags
;
1281 machine_mode vec_mode
;
1283 /* Avoid automatic aggregate initialization. */
1284 flags
= ADDR_DIFF_VEC_FLAGS (body
);
1286 /* Try to find a known alignment for rel_lab. */
1287 for (prev
= rel_lab
;
1289 && ! insn_lengths
[INSN_UID (prev
)]
1290 && ! (varying_length
[INSN_UID (prev
)] & 1);
1291 prev
= PREV_INSN (prev
))
1292 if (varying_length
[INSN_UID (prev
)] & 2)
1294 rel_align
= LABEL_TO_ALIGNMENT (prev
);
1298 /* See the comment on addr_diff_vec_flags in rtl.h for the
1299 meaning of the flags values. base: REL_LAB vec: INSN */
1300 /* Anything after INSN has still addresses from the last
1301 pass; adjust these so that they reflect our current
1302 estimate for this pass. */
1303 if (flags
.base_after_vec
)
1304 rel_addr
+= insn_current_address
- insn_last_address
;
1305 if (flags
.min_after_vec
)
1306 min_addr
+= insn_current_address
- insn_last_address
;
1307 if (flags
.max_after_vec
)
1308 max_addr
+= insn_current_address
- insn_last_address
;
1309 /* We want to know the worst case, i.e. lowest possible value
1310 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1311 its offset is positive, and we have to be wary of code shrink;
1312 otherwise, it is negative, and we have to be vary of code
1314 if (flags
.min_after_base
)
1316 /* If INSN is between REL_LAB and MIN_LAB, the size
1317 changes we are about to make can change the alignment
1318 within the observed offset, therefore we have to break
1319 it up into two parts that are independent. */
1320 if (! flags
.base_after_vec
&& flags
.min_after_vec
)
1322 min_addr
-= align_fuzz (rel_lab
, insn
, rel_align
, 0);
1323 min_addr
-= align_fuzz (insn
, min_lab
, 0, 0);
1326 min_addr
-= align_fuzz (rel_lab
, min_lab
, rel_align
, 0);
1330 if (flags
.base_after_vec
&& ! flags
.min_after_vec
)
1332 min_addr
-= align_fuzz (min_lab
, insn
, 0, ~0);
1333 min_addr
-= align_fuzz (insn
, rel_lab
, 0, ~0);
1336 min_addr
-= align_fuzz (min_lab
, rel_lab
, 0, ~0);
1338 /* Likewise, determine the highest lowest possible value
1339 for the offset of MAX_LAB. */
1340 if (flags
.max_after_base
)
1342 if (! flags
.base_after_vec
&& flags
.max_after_vec
)
1344 max_addr
+= align_fuzz (rel_lab
, insn
, rel_align
, ~0);
1345 max_addr
+= align_fuzz (insn
, max_lab
, 0, ~0);
1348 max_addr
+= align_fuzz (rel_lab
, max_lab
, rel_align
, ~0);
1352 if (flags
.base_after_vec
&& ! flags
.max_after_vec
)
1354 max_addr
+= align_fuzz (max_lab
, insn
, 0, 0);
1355 max_addr
+= align_fuzz (insn
, rel_lab
, 0, 0);
1358 max_addr
+= align_fuzz (max_lab
, rel_lab
, 0, 0);
1360 vec_mode
= CASE_VECTOR_SHORTEN_MODE (min_addr
- rel_addr
,
1361 max_addr
- rel_addr
, body
);
1363 || (GET_MODE_SIZE (vec_mode
)
1364 >= GET_MODE_SIZE (GET_MODE (body
))))
1365 PUT_MODE (body
, vec_mode
);
1366 if (JUMP_TABLES_IN_TEXT_SECTION
1367 || readonly_data_section
== text_section
)
1370 = (XVECLEN (body
, 1) * GET_MODE_SIZE (GET_MODE (body
)));
1371 insn_current_address
+= insn_lengths
[uid
];
1372 if (insn_lengths
[uid
] != old_length
)
1373 something_changed
= 1;
1378 #endif /* CASE_VECTOR_SHORTEN_MODE */
1380 if (! (varying_length
[uid
]))
1382 if (NONJUMP_INSN_P (insn
)
1383 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1387 body
= PATTERN (insn
);
1388 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
1390 rtx inner_insn
= XVECEXP (body
, 0, i
);
1391 int inner_uid
= INSN_UID (inner_insn
);
1393 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1395 insn_current_address
+= insn_lengths
[inner_uid
];
1399 insn_current_address
+= insn_lengths
[uid
];
1404 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1406 rtx_sequence
*seqn
= as_a
<rtx_sequence
*> (PATTERN (insn
));
1409 body
= PATTERN (insn
);
1411 for (i
= 0; i
< seqn
->len (); i
++)
1413 rtx_insn
*inner_insn
= seqn
->insn (i
);
1414 int inner_uid
= INSN_UID (inner_insn
);
1417 INSN_ADDRESSES (inner_uid
) = insn_current_address
;
1419 /* insn_current_length returns 0 for insns with a
1420 non-varying length. */
1421 if (! varying_length
[inner_uid
])
1422 inner_length
= insn_lengths
[inner_uid
];
1424 inner_length
= insn_current_length (inner_insn
);
1426 if (inner_length
!= insn_lengths
[inner_uid
])
1428 if (!increasing
|| inner_length
> insn_lengths
[inner_uid
])
1430 insn_lengths
[inner_uid
] = inner_length
;
1431 something_changed
= 1;
1434 inner_length
= insn_lengths
[inner_uid
];
1436 insn_current_address
+= inner_length
;
1437 new_length
+= inner_length
;
1442 new_length
= insn_current_length (insn
);
1443 insn_current_address
+= new_length
;
1446 #ifdef ADJUST_INSN_LENGTH
1447 /* If needed, do any adjustment. */
1448 tmp_length
= new_length
;
1449 ADJUST_INSN_LENGTH (insn
, new_length
);
1450 insn_current_address
+= (new_length
- tmp_length
);
1453 if (new_length
!= insn_lengths
[uid
]
1454 && (!increasing
|| new_length
> insn_lengths
[uid
]))
1456 insn_lengths
[uid
] = new_length
;
1457 something_changed
= 1;
1460 insn_current_address
+= insn_lengths
[uid
] - new_length
;
1462 /* For a non-optimizing compile, do only a single pass. */
1467 free (varying_length
);
1470 /* Given the body of an INSN known to be generated by an ASM statement, return
1471 the number of machine instructions likely to be generated for this insn.
1472 This is used to compute its length. */
1475 asm_insn_count (rtx body
)
1479 if (GET_CODE (body
) == ASM_INPUT
)
1480 templ
= XSTR (body
, 0);
1482 templ
= decode_asm_operands (body
, NULL
, NULL
, NULL
, NULL
, NULL
);
1484 return asm_str_count (templ
);
1487 /* Return the number of machine instructions likely to be generated for the
1488 inline-asm template. */
1490 asm_str_count (const char *templ
)
1497 for (; *templ
; templ
++)
1498 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ
, templ
)
1505 /* ??? This is probably the wrong place for these. */
1506 /* Structure recording the mapping from source file and directory
1507 names at compile time to those to be embedded in debug
1509 struct debug_prefix_map
1511 const char *old_prefix
;
1512 const char *new_prefix
;
1515 struct debug_prefix_map
*next
;
1518 /* Linked list of such structures. */
1519 static debug_prefix_map
*debug_prefix_maps
;
1522 /* Record a debug file prefix mapping. ARG is the argument to
1523 -fdebug-prefix-map and must be of the form OLD=NEW. */
1526 add_debug_prefix_map (const char *arg
)
1528 debug_prefix_map
*map
;
1531 p
= strchr (arg
, '=');
1534 error ("invalid argument %qs to -fdebug-prefix-map", arg
);
1537 map
= XNEW (debug_prefix_map
);
1538 map
->old_prefix
= xstrndup (arg
, p
- arg
);
1539 map
->old_len
= p
- arg
;
1541 map
->new_prefix
= xstrdup (p
);
1542 map
->new_len
= strlen (p
);
1543 map
->next
= debug_prefix_maps
;
1544 debug_prefix_maps
= map
;
1547 /* Perform user-specified mapping of debug filename prefixes. Return
1548 the new name corresponding to FILENAME. */
1551 remap_debug_filename (const char *filename
)
1553 debug_prefix_map
*map
;
1558 for (map
= debug_prefix_maps
; map
; map
= map
->next
)
1559 if (filename_ncmp (filename
, map
->old_prefix
, map
->old_len
) == 0)
1563 name
= filename
+ map
->old_len
;
1564 name_len
= strlen (name
) + 1;
1565 s
= (char *) alloca (name_len
+ map
->new_len
);
1566 memcpy (s
, map
->new_prefix
, map
->new_len
);
1567 memcpy (s
+ map
->new_len
, name
, name_len
);
1568 return ggc_strdup (s
);
1571 /* Return true if DWARF2 debug info can be emitted for DECL. */
1574 dwarf2_debug_info_emitted_p (tree decl
)
1576 if (write_symbols
!= DWARF2_DEBUG
&& write_symbols
!= VMS_AND_DWARF2_DEBUG
)
1579 if (DECL_IGNORED_P (decl
))
1585 /* Return scope resulting from combination of S1 and S2. */
1587 choose_inner_scope (tree s1
, tree s2
)
1593 if (BLOCK_NUMBER (s1
) > BLOCK_NUMBER (s2
))
1598 /* Emit lexical block notes needed to change scope from S1 to S2. */
1601 change_scope (rtx_insn
*orig_insn
, tree s1
, tree s2
)
1603 rtx_insn
*insn
= orig_insn
;
1604 tree com
= NULL_TREE
;
1605 tree ts1
= s1
, ts2
= s2
;
1610 gcc_assert (ts1
&& ts2
);
1611 if (BLOCK_NUMBER (ts1
) > BLOCK_NUMBER (ts2
))
1612 ts1
= BLOCK_SUPERCONTEXT (ts1
);
1613 else if (BLOCK_NUMBER (ts1
) < BLOCK_NUMBER (ts2
))
1614 ts2
= BLOCK_SUPERCONTEXT (ts2
);
1617 ts1
= BLOCK_SUPERCONTEXT (ts1
);
1618 ts2
= BLOCK_SUPERCONTEXT (ts2
);
1627 rtx_note
*note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
1628 NOTE_BLOCK (note
) = s
;
1629 s
= BLOCK_SUPERCONTEXT (s
);
1636 insn
= emit_note_before (NOTE_INSN_BLOCK_BEG
, insn
);
1637 NOTE_BLOCK (insn
) = s
;
1638 s
= BLOCK_SUPERCONTEXT (s
);
1642 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1643 on the scope tree and the newly reordered instructions. */
1646 reemit_insn_block_notes (void)
1648 tree cur_block
= DECL_INITIAL (cfun
->decl
);
1652 insn
= get_insns ();
1653 for (; insn
; insn
= NEXT_INSN (insn
))
1657 /* Prevent lexical blocks from straddling section boundaries. */
1658 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
1660 for (tree s
= cur_block
; s
!= DECL_INITIAL (cfun
->decl
);
1661 s
= BLOCK_SUPERCONTEXT (s
))
1663 rtx_note
*note
= emit_note_before (NOTE_INSN_BLOCK_END
, insn
);
1664 NOTE_BLOCK (note
) = s
;
1665 note
= emit_note_after (NOTE_INSN_BLOCK_BEG
, insn
);
1666 NOTE_BLOCK (note
) = s
;
1670 if (!active_insn_p (insn
))
1673 /* Avoid putting scope notes between jump table and its label. */
1674 if (JUMP_TABLE_DATA_P (insn
))
1677 this_block
= insn_scope (insn
);
1678 /* For sequences compute scope resulting from merging all scopes
1679 of instructions nested inside. */
1680 if (rtx_sequence
*body
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
1685 for (i
= 0; i
< body
->len (); i
++)
1686 this_block
= choose_inner_scope (this_block
,
1687 insn_scope (body
->insn (i
)));
1691 if (INSN_LOCATION (insn
) == UNKNOWN_LOCATION
)
1694 this_block
= DECL_INITIAL (cfun
->decl
);
1697 if (this_block
!= cur_block
)
1699 change_scope (insn
, cur_block
, this_block
);
1700 cur_block
= this_block
;
1704 /* change_scope emits before the insn, not after. */
1705 note
= emit_note (NOTE_INSN_DELETED
);
1706 change_scope (note
, cur_block
, DECL_INITIAL (cfun
->decl
));
1712 static const char *some_local_dynamic_name
;
1714 /* Locate some local-dynamic symbol still in use by this function
1715 so that we can print its name in local-dynamic base patterns.
1716 Return null if there are no local-dynamic references. */
1719 get_some_local_dynamic_name ()
1721 subrtx_iterator::array_type array
;
1724 if (some_local_dynamic_name
)
1725 return some_local_dynamic_name
;
1727 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1728 if (NONDEBUG_INSN_P (insn
))
1729 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), ALL
)
1731 const_rtx x
= *iter
;
1732 if (GET_CODE (x
) == SYMBOL_REF
)
1734 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
1735 return some_local_dynamic_name
= XSTR (x
, 0);
1736 if (CONSTANT_POOL_ADDRESS_P (x
))
1737 iter
.substitute (get_pool_constant (x
));
1744 /* Output assembler code for the start of a function,
1745 and initialize some of the variables in this file
1746 for the new function. The label for the function and associated
1747 assembler pseudo-ops have already been output in `assemble_start_function'.
1749 FIRST is the first insn of the rtl for the function being compiled.
1750 FILE is the file to write assembler code to.
1751 OPTIMIZE_P is nonzero if we should eliminate redundant
1752 test and compare insns. */
1755 final_start_function (rtx_insn
*first
, FILE *file
,
1756 int optimize_p ATTRIBUTE_UNUSED
)
1760 this_is_asm_operands
= 0;
1762 need_profile_function
= false;
1764 last_filename
= LOCATION_FILE (prologue_location
);
1765 last_linenum
= LOCATION_LINE (prologue_location
);
1766 last_discriminator
= discriminator
= 0;
1768 high_block_linenum
= high_function_linenum
= last_linenum
;
1770 if (flag_sanitize
& SANITIZE_ADDRESS
)
1771 asan_function_start ();
1773 if (!DECL_IGNORED_P (current_function_decl
))
1774 debug_hooks
->begin_prologue (last_linenum
, last_filename
);
1776 if (!dwarf2_debug_info_emitted_p (current_function_decl
))
1777 dwarf2out_begin_prologue (0, NULL
);
1779 #ifdef LEAF_REG_REMAP
1780 if (crtl
->uses_only_leaf_regs
)
1781 leaf_renumber_regs (first
);
1784 /* The Sun386i and perhaps other machines don't work right
1785 if the profiling code comes after the prologue. */
1786 if (targetm
.profile_before_prologue () && crtl
->profile
)
1788 if (targetm
.asm_out
.function_prologue
== default_function_pro_epilogue
1789 && targetm
.have_prologue ())
1792 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1798 else if (NOTE_KIND (insn
) == NOTE_INSN_BASIC_BLOCK
1799 || NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
)
1801 else if (NOTE_KIND (insn
) == NOTE_INSN_DELETED
1802 || NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
1811 need_profile_function
= true;
1813 profile_function (file
);
1816 profile_function (file
);
1819 /* If debugging, assign block numbers to all of the blocks in this
1823 reemit_insn_block_notes ();
1824 number_blocks (current_function_decl
);
1825 /* We never actually put out begin/end notes for the top-level
1826 block in the function. But, conceptually, that block is
1828 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl
)) = 1;
1831 if (warn_frame_larger_than
1832 && get_frame_size () > frame_larger_than_size
)
1834 /* Issue a warning */
1835 warning (OPT_Wframe_larger_than_
,
1836 "the frame size of %wd bytes is larger than %wd bytes",
1837 get_frame_size (), frame_larger_than_size
);
1840 /* First output the function prologue: code to set up the stack frame. */
1841 targetm
.asm_out
.function_prologue (file
, get_frame_size ());
1843 /* If the machine represents the prologue as RTL, the profiling code must
1844 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1845 if (! targetm
.have_prologue ())
1846 profile_after_prologue (file
);
1850 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED
)
1852 if (!targetm
.profile_before_prologue () && crtl
->profile
)
1853 profile_function (file
);
1857 profile_function (FILE *file ATTRIBUTE_UNUSED
)
1859 #ifndef NO_PROFILE_COUNTERS
1860 # define NO_PROFILE_COUNTERS 0
1862 #ifdef ASM_OUTPUT_REG_PUSH
1863 rtx sval
= NULL
, chain
= NULL
;
1865 if (cfun
->returns_struct
)
1866 sval
= targetm
.calls
.struct_value_rtx (TREE_TYPE (current_function_decl
),
1868 if (cfun
->static_chain_decl
)
1869 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
1870 #endif /* ASM_OUTPUT_REG_PUSH */
1872 if (! NO_PROFILE_COUNTERS
)
1874 int align
= MIN (BIGGEST_ALIGNMENT
, LONG_TYPE_SIZE
);
1875 switch_to_section (data_section
);
1876 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
1877 targetm
.asm_out
.internal_label (file
, "LP", current_function_funcdef_no
);
1878 assemble_integer (const0_rtx
, LONG_TYPE_SIZE
/ BITS_PER_UNIT
, align
, 1);
1881 switch_to_section (current_function_section ());
1883 #ifdef ASM_OUTPUT_REG_PUSH
1884 if (sval
&& REG_P (sval
))
1885 ASM_OUTPUT_REG_PUSH (file
, REGNO (sval
));
1886 if (chain
&& REG_P (chain
))
1887 ASM_OUTPUT_REG_PUSH (file
, REGNO (chain
));
1890 FUNCTION_PROFILER (file
, current_function_funcdef_no
);
1892 #ifdef ASM_OUTPUT_REG_PUSH
1893 if (chain
&& REG_P (chain
))
1894 ASM_OUTPUT_REG_POP (file
, REGNO (chain
));
1895 if (sval
&& REG_P (sval
))
1896 ASM_OUTPUT_REG_POP (file
, REGNO (sval
));
1900 /* Output assembler code for the end of a function.
1901 For clarity, args are same as those of `final_start_function'
1902 even though not all of them are needed. */
1905 final_end_function (void)
1909 if (!DECL_IGNORED_P (current_function_decl
))
1910 debug_hooks
->end_function (high_function_linenum
);
1912 /* Finally, output the function epilogue:
1913 code to restore the stack frame and return to the caller. */
1914 targetm
.asm_out
.function_epilogue (asm_out_file
, get_frame_size ());
1916 /* And debug output. */
1917 if (!DECL_IGNORED_P (current_function_decl
))
1918 debug_hooks
->end_epilogue (last_linenum
, last_filename
);
1920 if (!dwarf2_debug_info_emitted_p (current_function_decl
)
1921 && dwarf2out_do_frame ())
1922 dwarf2out_end_epilogue (last_linenum
, last_filename
);
1924 some_local_dynamic_name
= 0;
1928 /* Dumper helper for basic block information. FILE is the assembly
1929 output file, and INSN is the instruction being emitted. */
1932 dump_basic_block_info (FILE *file
, rtx_insn
*insn
, basic_block
*start_to_bb
,
1933 basic_block
*end_to_bb
, int bb_map_size
, int *bb_seqn
)
1937 if (!flag_debug_asm
)
1940 if (INSN_UID (insn
) < bb_map_size
1941 && (bb
= start_to_bb
[INSN_UID (insn
)]) != NULL
)
1946 fprintf (file
, "%s BLOCK %d", ASM_COMMENT_START
, bb
->index
);
1948 fprintf (file
, " freq:%d", bb
->frequency
);
1950 fprintf (file
, " count:%" PRId64
,
1952 fprintf (file
, " seq:%d", (*bb_seqn
)++);
1953 fprintf (file
, "\n%s PRED:", ASM_COMMENT_START
);
1954 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1956 dump_edge_info (file
, e
, TDF_DETAILS
, 0);
1958 fprintf (file
, "\n");
1960 if (INSN_UID (insn
) < bb_map_size
1961 && (bb
= end_to_bb
[INSN_UID (insn
)]) != NULL
)
1966 fprintf (asm_out_file
, "%s SUCC:", ASM_COMMENT_START
);
1967 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1969 dump_edge_info (asm_out_file
, e
, TDF_DETAILS
, 1);
1971 fprintf (file
, "\n");
1975 /* Output assembler code for some insns: all or part of a function.
1976 For description of args, see `final_start_function', above. */
1979 final (rtx_insn
*first
, FILE *file
, int optimize_p
)
1981 rtx_insn
*insn
, *next
;
1984 /* Used for -dA dump. */
1985 basic_block
*start_to_bb
= NULL
;
1986 basic_block
*end_to_bb
= NULL
;
1987 int bb_map_size
= 0;
1990 last_ignored_compare
= 0;
1993 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1995 /* If CC tracking across branches is enabled, record the insn which
1996 jumps to each branch only reached from one place. */
1997 if (optimize_p
&& JUMP_P (insn
))
1999 rtx lab
= JUMP_LABEL (insn
);
2000 if (lab
&& LABEL_P (lab
) && LABEL_NUSES (lab
) == 1)
2002 LABEL_REFS (lab
) = insn
;
2015 bb_map_size
= get_max_uid () + 1;
2016 start_to_bb
= XCNEWVEC (basic_block
, bb_map_size
);
2017 end_to_bb
= XCNEWVEC (basic_block
, bb_map_size
);
2019 /* There is no cfg for a thunk. */
2020 if (!cfun
->is_thunk
)
2021 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2023 start_to_bb
[INSN_UID (BB_HEAD (bb
))] = bb
;
2024 end_to_bb
[INSN_UID (BB_END (bb
))] = bb
;
2028 /* Output the insns. */
2029 for (insn
= first
; insn
;)
2031 if (HAVE_ATTR_length
)
2033 if ((unsigned) INSN_UID (insn
) >= INSN_ADDRESSES_SIZE ())
2035 /* This can be triggered by bugs elsewhere in the compiler if
2036 new insns are created after init_insn_lengths is called. */
2037 gcc_assert (NOTE_P (insn
));
2038 insn_current_address
= -1;
2041 insn_current_address
= INSN_ADDRESSES (INSN_UID (insn
));
2044 dump_basic_block_info (file
, insn
, start_to_bb
, end_to_bb
,
2045 bb_map_size
, &bb_seqn
);
2046 insn
= final_scan_insn (insn
, file
, optimize_p
, 0, &seen
);
2055 /* Remove CFI notes, to avoid compare-debug failures. */
2056 for (insn
= first
; insn
; insn
= next
)
2058 next
= NEXT_INSN (insn
);
2060 && (NOTE_KIND (insn
) == NOTE_INSN_CFI
2061 || NOTE_KIND (insn
) == NOTE_INSN_CFI_LABEL
))
2067 get_insn_template (int code
, rtx insn
)
2069 switch (insn_data
[code
].output_format
)
2071 case INSN_OUTPUT_FORMAT_SINGLE
:
2072 return insn_data
[code
].output
.single
;
2073 case INSN_OUTPUT_FORMAT_MULTI
:
2074 return insn_data
[code
].output
.multi
[which_alternative
];
2075 case INSN_OUTPUT_FORMAT_FUNCTION
:
2077 return (*insn_data
[code
].output
.function
) (recog_data
.operand
,
2078 as_a
<rtx_insn
*> (insn
));
2085 /* Emit the appropriate declaration for an alternate-entry-point
2086 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2087 LABEL_KIND != LABEL_NORMAL.
2089 The case fall-through in this function is intentional. */
2091 output_alternate_entry_point (FILE *file
, rtx_insn
*insn
)
2093 const char *name
= LABEL_NAME (insn
);
2095 switch (LABEL_KIND (insn
))
2097 case LABEL_WEAK_ENTRY
:
2098 #ifdef ASM_WEAKEN_LABEL
2099 ASM_WEAKEN_LABEL (file
, name
);
2102 case LABEL_GLOBAL_ENTRY
:
2103 targetm
.asm_out
.globalize_label (file
, name
);
2105 case LABEL_STATIC_ENTRY
:
2106 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2107 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
2109 ASM_OUTPUT_LABEL (file
, name
);
2118 /* Given a CALL_INSN, find and return the nested CALL. */
2120 call_from_call_insn (rtx_call_insn
*insn
)
2123 gcc_assert (CALL_P (insn
));
2126 while (GET_CODE (x
) != CALL
)
2128 switch (GET_CODE (x
))
2133 x
= COND_EXEC_CODE (x
);
2136 x
= XVECEXP (x
, 0, 0);
2146 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2147 corresponding source line, if available. */
2150 asm_show_source (const char *filename
, int linenum
)
2156 const char *line
= location_get_source_line (filename
, linenum
, &line_size
);
2160 fprintf (asm_out_file
, "%s %s:%i: ", ASM_COMMENT_START
, filename
, linenum
);
2161 /* "line" is not 0-terminated, so we must use line_size. */
2162 fwrite (line
, 1, line_size
, asm_out_file
);
2163 fputc ('\n', asm_out_file
);
2166 /* The final scan for one insn, INSN.
2167 Args are same as in `final', except that INSN
2168 is the insn being scanned.
2169 Value returned is the next insn to be scanned.
2171 NOPEEPHOLES is the flag to disallow peephole processing (currently
2172 used for within delayed branch sequence output).
2174 SEEN is used to track the end of the prologue, for emitting
2175 debug information. We force the emission of a line note after
2176 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2179 final_scan_insn (rtx_insn
*insn
, FILE *file
, int optimize_p ATTRIBUTE_UNUSED
,
2180 int nopeepholes ATTRIBUTE_UNUSED
, int *seen
)
2189 /* Ignore deleted insns. These can occur when we split insns (due to a
2190 template of "#") while not optimizing. */
2191 if (insn
->deleted ())
2192 return NEXT_INSN (insn
);
2194 switch (GET_CODE (insn
))
2197 switch (NOTE_KIND (insn
))
2199 case NOTE_INSN_DELETED
:
2200 case NOTE_INSN_UPDATE_SJLJ_CONTEXT
:
2203 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
2204 in_cold_section_p
= !in_cold_section_p
;
2206 if (dwarf2out_do_frame ())
2207 dwarf2out_switch_text_section ();
2208 else if (!DECL_IGNORED_P (current_function_decl
))
2209 debug_hooks
->switch_text_section ();
2211 switch_to_section (current_function_section ());
2212 targetm
.asm_out
.function_switched_text_sections (asm_out_file
,
2213 current_function_decl
,
2215 /* Emit a label for the split cold section. Form label name by
2216 suffixing "cold" to the original function's name. */
2217 if (in_cold_section_p
)
2220 = clone_function_name (current_function_decl
, "cold");
2221 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2222 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file
,
2224 (cold_function_name
),
2225 current_function_decl
);
2227 ASM_OUTPUT_LABEL (asm_out_file
,
2228 IDENTIFIER_POINTER (cold_function_name
));
2233 case NOTE_INSN_BASIC_BLOCK
:
2234 if (need_profile_function
)
2236 profile_function (asm_out_file
);
2237 need_profile_function
= false;
2240 if (targetm
.asm_out
.unwind_emit
)
2241 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
2243 discriminator
= NOTE_BASIC_BLOCK (insn
)->discriminator
;
2247 case NOTE_INSN_EH_REGION_BEG
:
2248 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHB",
2249 NOTE_EH_HANDLER (insn
));
2252 case NOTE_INSN_EH_REGION_END
:
2253 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LEHE",
2254 NOTE_EH_HANDLER (insn
));
2257 case NOTE_INSN_PROLOGUE_END
:
2258 targetm
.asm_out
.function_end_prologue (file
);
2259 profile_after_prologue (file
);
2261 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
2263 *seen
|= SEEN_EMITTED
;
2264 force_source_line
= true;
2271 case NOTE_INSN_EPILOGUE_BEG
:
2272 if (!DECL_IGNORED_P (current_function_decl
))
2273 (*debug_hooks
->begin_epilogue
) (last_linenum
, last_filename
);
2274 targetm
.asm_out
.function_begin_epilogue (file
);
2278 dwarf2out_emit_cfi (NOTE_CFI (insn
));
2281 case NOTE_INSN_CFI_LABEL
:
2282 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LCFI",
2283 NOTE_LABEL_NUMBER (insn
));
2286 case NOTE_INSN_FUNCTION_BEG
:
2287 if (need_profile_function
)
2289 profile_function (asm_out_file
);
2290 need_profile_function
= false;
2294 if (!DECL_IGNORED_P (current_function_decl
))
2295 debug_hooks
->end_prologue (last_linenum
, last_filename
);
2297 if ((*seen
& (SEEN_EMITTED
| SEEN_NOTE
)) == SEEN_NOTE
)
2299 *seen
|= SEEN_EMITTED
;
2300 force_source_line
= true;
2307 case NOTE_INSN_BLOCK_BEG
:
2308 if (debug_info_level
== DINFO_LEVEL_NORMAL
2309 || debug_info_level
== DINFO_LEVEL_VERBOSE
2310 || write_symbols
== DWARF2_DEBUG
2311 || write_symbols
== VMS_AND_DWARF2_DEBUG
2312 || write_symbols
== VMS_DEBUG
)
2314 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
2318 high_block_linenum
= last_linenum
;
2320 /* Output debugging info about the symbol-block beginning. */
2321 if (!DECL_IGNORED_P (current_function_decl
))
2322 debug_hooks
->begin_block (last_linenum
, n
);
2324 /* Mark this block as output. */
2325 TREE_ASM_WRITTEN (NOTE_BLOCK (insn
)) = 1;
2326 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn
)) = in_cold_section_p
;
2328 if (write_symbols
== DBX_DEBUG
2329 || write_symbols
== SDB_DEBUG
)
2331 location_t
*locus_ptr
2332 = block_nonartificial_location (NOTE_BLOCK (insn
));
2334 if (locus_ptr
!= NULL
)
2336 override_filename
= LOCATION_FILE (*locus_ptr
);
2337 override_linenum
= LOCATION_LINE (*locus_ptr
);
2342 case NOTE_INSN_BLOCK_END
:
2343 if (debug_info_level
== DINFO_LEVEL_NORMAL
2344 || debug_info_level
== DINFO_LEVEL_VERBOSE
2345 || write_symbols
== DWARF2_DEBUG
2346 || write_symbols
== VMS_AND_DWARF2_DEBUG
2347 || write_symbols
== VMS_DEBUG
)
2349 int n
= BLOCK_NUMBER (NOTE_BLOCK (insn
));
2353 /* End of a symbol-block. */
2355 gcc_assert (block_depth
>= 0);
2357 if (!DECL_IGNORED_P (current_function_decl
))
2358 debug_hooks
->end_block (high_block_linenum
, n
);
2359 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn
))
2360 == in_cold_section_p
);
2362 if (write_symbols
== DBX_DEBUG
2363 || write_symbols
== SDB_DEBUG
)
2365 tree outer_block
= BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn
));
2366 location_t
*locus_ptr
2367 = block_nonartificial_location (outer_block
);
2369 if (locus_ptr
!= NULL
)
2371 override_filename
= LOCATION_FILE (*locus_ptr
);
2372 override_linenum
= LOCATION_LINE (*locus_ptr
);
2376 override_filename
= NULL
;
2377 override_linenum
= 0;
2382 case NOTE_INSN_DELETED_LABEL
:
2383 /* Emit the label. We may have deleted the CODE_LABEL because
2384 the label could be proved to be unreachable, though still
2385 referenced (in the form of having its address taken. */
2386 ASM_OUTPUT_DEBUG_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
));
2389 case NOTE_INSN_DELETED_DEBUG_LABEL
:
2390 /* Similarly, but need to use different namespace for it. */
2391 if (CODE_LABEL_NUMBER (insn
) != -1)
2392 ASM_OUTPUT_DEBUG_LABEL (file
, "LDL", CODE_LABEL_NUMBER (insn
));
2395 case NOTE_INSN_VAR_LOCATION
:
2396 case NOTE_INSN_CALL_ARG_LOCATION
:
2397 if (!DECL_IGNORED_P (current_function_decl
))
2398 debug_hooks
->var_location (insn
);
2411 /* The target port might emit labels in the output function for
2412 some insn, e.g. sh.c output_branchy_insn. */
2413 if (CODE_LABEL_NUMBER (insn
) <= max_labelno
)
2415 int align
= LABEL_TO_ALIGNMENT (insn
);
2416 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2417 int max_skip
= LABEL_TO_MAX_SKIP (insn
);
2420 if (align
&& NEXT_INSN (insn
))
2422 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2423 ASM_OUTPUT_MAX_SKIP_ALIGN (file
, align
, max_skip
);
2425 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2426 ASM_OUTPUT_ALIGN_WITH_NOP (file
, align
);
2428 ASM_OUTPUT_ALIGN (file
, align
);
2435 if (!DECL_IGNORED_P (current_function_decl
) && LABEL_NAME (insn
))
2436 debug_hooks
->label (as_a
<rtx_code_label
*> (insn
));
2440 next
= next_nonnote_insn (insn
);
2441 /* If this label is followed by a jump-table, make sure we put
2442 the label in the read-only section. Also possibly write the
2443 label and jump table together. */
2444 if (next
!= 0 && JUMP_TABLE_DATA_P (next
))
2446 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2447 /* In this case, the case vector is being moved by the
2448 target, so don't output the label at all. Leave that
2449 to the back end macros. */
2451 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2455 switch_to_section (targetm
.asm_out
.function_rodata_section
2456 (current_function_decl
));
2458 #ifdef ADDR_VEC_ALIGN
2459 log_align
= ADDR_VEC_ALIGN (next
);
2461 log_align
= exact_log2 (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2463 ASM_OUTPUT_ALIGN (file
, log_align
);
2466 switch_to_section (current_function_section ());
2468 #ifdef ASM_OUTPUT_CASE_LABEL
2469 ASM_OUTPUT_CASE_LABEL (file
, "L", CODE_LABEL_NUMBER (insn
),
2472 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2477 if (LABEL_ALT_ENTRY_P (insn
))
2478 output_alternate_entry_point (file
, insn
);
2480 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (insn
));
2485 rtx body
= PATTERN (insn
);
2486 int insn_code_number
;
2490 /* Reset this early so it is correct for ASM statements. */
2491 current_insn_predicate
= NULL_RTX
;
2493 /* An INSN, JUMP_INSN or CALL_INSN.
2494 First check for special kinds that recog doesn't recognize. */
2496 if (GET_CODE (body
) == USE
/* These are just declarations. */
2497 || GET_CODE (body
) == CLOBBER
)
2502 /* If there is a REG_CC_SETTER note on this insn, it means that
2503 the setting of the condition code was done in the delay slot
2504 of the insn that branched here. So recover the cc status
2505 from the insn that set it. */
2507 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
2510 rtx_insn
*other
= as_a
<rtx_insn
*> (XEXP (note
, 0));
2511 NOTICE_UPDATE_CC (PATTERN (other
), other
);
2512 cc_prev_status
= cc_status
;
2517 /* Detect insns that are really jump-tables
2518 and output them as such. */
2520 if (JUMP_TABLE_DATA_P (insn
))
2522 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2526 if (! JUMP_TABLES_IN_TEXT_SECTION
)
2527 switch_to_section (targetm
.asm_out
.function_rodata_section
2528 (current_function_decl
));
2530 switch_to_section (current_function_section ());
2534 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2535 if (GET_CODE (body
) == ADDR_VEC
)
2537 #ifdef ASM_OUTPUT_ADDR_VEC
2538 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn
), body
);
2545 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2546 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn
), body
);
2552 vlen
= XVECLEN (body
, GET_CODE (body
) == ADDR_DIFF_VEC
);
2553 for (idx
= 0; idx
< vlen
; idx
++)
2555 if (GET_CODE (body
) == ADDR_VEC
)
2557 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2558 ASM_OUTPUT_ADDR_VEC_ELT
2559 (file
, CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 0, idx
), 0)));
2566 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2567 ASM_OUTPUT_ADDR_DIFF_ELT
2570 CODE_LABEL_NUMBER (XEXP (XVECEXP (body
, 1, idx
), 0)),
2571 CODE_LABEL_NUMBER (XEXP (XEXP (body
, 0), 0)));
2577 #ifdef ASM_OUTPUT_CASE_END
2578 ASM_OUTPUT_CASE_END (file
,
2579 CODE_LABEL_NUMBER (PREV_INSN (insn
)),
2584 switch_to_section (current_function_section ());
2588 /* Output this line note if it is the first or the last line
2590 if (!DECL_IGNORED_P (current_function_decl
)
2591 && notice_source_line (insn
, &is_stmt
))
2593 if (flag_verbose_asm
)
2594 asm_show_source (last_filename
, last_linenum
);
2595 (*debug_hooks
->source_line
) (last_linenum
, last_filename
,
2596 last_discriminator
, is_stmt
);
2599 if (GET_CODE (body
) == PARALLEL
2600 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_INPUT
)
2601 body
= XVECEXP (body
, 0, 0);
2603 if (GET_CODE (body
) == ASM_INPUT
)
2605 const char *string
= XSTR (body
, 0);
2607 /* There's no telling what that did to the condition codes. */
2612 expanded_location loc
;
2615 loc
= expand_location (ASM_INPUT_SOURCE_LOCATION (body
));
2616 if (*loc
.file
&& loc
.line
)
2617 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2618 ASM_COMMENT_START
, loc
.line
, loc
.file
);
2619 fprintf (asm_out_file
, "\t%s\n", string
);
2620 #if HAVE_AS_LINE_ZERO
2621 if (*loc
.file
&& loc
.line
)
2622 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2628 /* Detect `asm' construct with operands. */
2629 if (asm_noperands (body
) >= 0)
2631 unsigned int noperands
= asm_noperands (body
);
2632 rtx
*ops
= XALLOCAVEC (rtx
, noperands
);
2635 expanded_location expanded
;
2637 /* There's no telling what that did to the condition codes. */
2640 /* Get out the operand values. */
2641 string
= decode_asm_operands (body
, ops
, NULL
, NULL
, NULL
, &loc
);
2642 /* Inhibit dying on what would otherwise be compiler bugs. */
2643 insn_noperands
= noperands
;
2644 this_is_asm_operands
= insn
;
2645 expanded
= expand_location (loc
);
2647 #ifdef FINAL_PRESCAN_INSN
2648 FINAL_PRESCAN_INSN (insn
, ops
, insn_noperands
);
2651 /* Output the insn using them. */
2655 if (expanded
.file
&& expanded
.line
)
2656 fprintf (asm_out_file
, "%s %i \"%s\" 1\n",
2657 ASM_COMMENT_START
, expanded
.line
, expanded
.file
);
2658 output_asm_insn (string
, ops
);
2659 #if HAVE_AS_LINE_ZERO
2660 if (expanded
.file
&& expanded
.line
)
2661 fprintf (asm_out_file
, "%s 0 \"\" 2\n", ASM_COMMENT_START
);
2665 if (targetm
.asm_out
.final_postscan_insn
)
2666 targetm
.asm_out
.final_postscan_insn (file
, insn
, ops
,
2669 this_is_asm_operands
= 0;
2675 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (body
))
2677 /* A delayed-branch sequence */
2680 final_sequence
= seq
;
2682 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2683 force the restoration of a comparison that was previously
2684 thought unnecessary. If that happens, cancel this sequence
2685 and cause that insn to be restored. */
2687 next
= final_scan_insn (seq
->insn (0), file
, 0, 1, seen
);
2688 if (next
!= seq
->insn (1))
2694 for (i
= 1; i
< seq
->len (); i
++)
2696 rtx_insn
*insn
= seq
->insn (i
);
2697 rtx_insn
*next
= NEXT_INSN (insn
);
2698 /* We loop in case any instruction in a delay slot gets
2701 insn
= final_scan_insn (insn
, file
, 0, 1, seen
);
2702 while (insn
!= next
);
2704 #ifdef DBR_OUTPUT_SEQEND
2705 DBR_OUTPUT_SEQEND (file
);
2709 /* If the insn requiring the delay slot was a CALL_INSN, the
2710 insns in the delay slot are actually executed before the
2711 called function. Hence we don't preserve any CC-setting
2712 actions in these insns and the CC must be marked as being
2713 clobbered by the function. */
2714 if (CALL_P (seq
->insn (0)))
2721 /* We have a real machine instruction as rtl. */
2723 body
= PATTERN (insn
);
2726 set
= single_set (insn
);
2728 /* Check for redundant test and compare instructions
2729 (when the condition codes are already set up as desired).
2730 This is done only when optimizing; if not optimizing,
2731 it should be possible for the user to alter a variable
2732 with the debugger in between statements
2733 and the next statement should reexamine the variable
2734 to compute the condition codes. */
2739 && GET_CODE (SET_DEST (set
)) == CC0
2740 && insn
!= last_ignored_compare
)
2743 if (GET_CODE (SET_SRC (set
)) == SUBREG
)
2744 SET_SRC (set
) = alter_subreg (&SET_SRC (set
), true);
2746 src1
= SET_SRC (set
);
2748 if (GET_CODE (SET_SRC (set
)) == COMPARE
)
2750 if (GET_CODE (XEXP (SET_SRC (set
), 0)) == SUBREG
)
2751 XEXP (SET_SRC (set
), 0)
2752 = alter_subreg (&XEXP (SET_SRC (set
), 0), true);
2753 if (GET_CODE (XEXP (SET_SRC (set
), 1)) == SUBREG
)
2754 XEXP (SET_SRC (set
), 1)
2755 = alter_subreg (&XEXP (SET_SRC (set
), 1), true);
2756 if (XEXP (SET_SRC (set
), 1)
2757 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set
), 0))))
2758 src2
= XEXP (SET_SRC (set
), 0);
2760 if ((cc_status
.value1
!= 0
2761 && rtx_equal_p (src1
, cc_status
.value1
))
2762 || (cc_status
.value2
!= 0
2763 && rtx_equal_p (src1
, cc_status
.value2
))
2764 || (src2
!= 0 && cc_status
.value1
!= 0
2765 && rtx_equal_p (src2
, cc_status
.value1
))
2766 || (src2
!= 0 && cc_status
.value2
!= 0
2767 && rtx_equal_p (src2
, cc_status
.value2
)))
2769 /* Don't delete insn if it has an addressing side-effect. */
2770 if (! FIND_REG_INC_NOTE (insn
, NULL_RTX
)
2771 /* or if anything in it is volatile. */
2772 && ! volatile_refs_p (PATTERN (insn
)))
2774 /* We don't really delete the insn; just ignore it. */
2775 last_ignored_compare
= insn
;
2782 /* If this is a conditional branch, maybe modify it
2783 if the cc's are in a nonstandard state
2784 so that it accomplishes the same thing that it would
2785 do straightforwardly if the cc's were set up normally. */
2787 if (cc_status
.flags
!= 0
2789 && GET_CODE (body
) == SET
2790 && SET_DEST (body
) == pc_rtx
2791 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2792 && COMPARISON_P (XEXP (SET_SRC (body
), 0))
2793 && XEXP (XEXP (SET_SRC (body
), 0), 0) == cc0_rtx
)
2795 /* This function may alter the contents of its argument
2796 and clear some of the cc_status.flags bits.
2797 It may also return 1 meaning condition now always true
2798 or -1 meaning condition now always false
2799 or 2 meaning condition nontrivial but altered. */
2800 int result
= alter_cond (XEXP (SET_SRC (body
), 0));
2801 /* If condition now has fixed value, replace the IF_THEN_ELSE
2802 with its then-operand or its else-operand. */
2804 SET_SRC (body
) = XEXP (SET_SRC (body
), 1);
2806 SET_SRC (body
) = XEXP (SET_SRC (body
), 2);
2808 /* The jump is now either unconditional or a no-op.
2809 If it has become a no-op, don't try to output it.
2810 (It would not be recognized.) */
2811 if (SET_SRC (body
) == pc_rtx
)
2816 else if (ANY_RETURN_P (SET_SRC (body
)))
2817 /* Replace (set (pc) (return)) with (return). */
2818 PATTERN (insn
) = body
= SET_SRC (body
);
2820 /* Rerecognize the instruction if it has changed. */
2822 INSN_CODE (insn
) = -1;
2825 /* If this is a conditional trap, maybe modify it if the cc's
2826 are in a nonstandard state so that it accomplishes the same
2827 thing that it would do straightforwardly if the cc's were
2829 if (cc_status
.flags
!= 0
2830 && NONJUMP_INSN_P (insn
)
2831 && GET_CODE (body
) == TRAP_IF
2832 && COMPARISON_P (TRAP_CONDITION (body
))
2833 && XEXP (TRAP_CONDITION (body
), 0) == cc0_rtx
)
2835 /* This function may alter the contents of its argument
2836 and clear some of the cc_status.flags bits.
2837 It may also return 1 meaning condition now always true
2838 or -1 meaning condition now always false
2839 or 2 meaning condition nontrivial but altered. */
2840 int result
= alter_cond (TRAP_CONDITION (body
));
2842 /* If TRAP_CONDITION has become always false, delete the
2850 /* If TRAP_CONDITION has become always true, replace
2851 TRAP_CONDITION with const_true_rtx. */
2853 TRAP_CONDITION (body
) = const_true_rtx
;
2855 /* Rerecognize the instruction if it has changed. */
2857 INSN_CODE (insn
) = -1;
2860 /* Make same adjustments to instructions that examine the
2861 condition codes without jumping and instructions that
2862 handle conditional moves (if this machine has either one). */
2864 if (cc_status
.flags
!= 0
2867 rtx cond_rtx
, then_rtx
, else_rtx
;
2870 && GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
)
2872 cond_rtx
= XEXP (SET_SRC (set
), 0);
2873 then_rtx
= XEXP (SET_SRC (set
), 1);
2874 else_rtx
= XEXP (SET_SRC (set
), 2);
2878 cond_rtx
= SET_SRC (set
);
2879 then_rtx
= const_true_rtx
;
2880 else_rtx
= const0_rtx
;
2883 if (COMPARISON_P (cond_rtx
)
2884 && XEXP (cond_rtx
, 0) == cc0_rtx
)
2887 result
= alter_cond (cond_rtx
);
2889 validate_change (insn
, &SET_SRC (set
), then_rtx
, 0);
2890 else if (result
== -1)
2891 validate_change (insn
, &SET_SRC (set
), else_rtx
, 0);
2892 else if (result
== 2)
2893 INSN_CODE (insn
) = -1;
2894 if (SET_DEST (set
) == SET_SRC (set
))
2901 /* Do machine-specific peephole optimizations if desired. */
2903 if (HAVE_peephole
&& optimize_p
&& !flag_no_peephole
&& !nopeepholes
)
2905 rtx_insn
*next
= peephole (insn
);
2906 /* When peepholing, if there were notes within the peephole,
2907 emit them before the peephole. */
2908 if (next
!= 0 && next
!= NEXT_INSN (insn
))
2910 rtx_insn
*note
, *prev
= PREV_INSN (insn
);
2912 for (note
= NEXT_INSN (insn
); note
!= next
;
2913 note
= NEXT_INSN (note
))
2914 final_scan_insn (note
, file
, optimize_p
, nopeepholes
, seen
);
2916 /* Put the notes in the proper position for a later
2917 rescan. For example, the SH target can do this
2918 when generating a far jump in a delayed branch
2920 note
= NEXT_INSN (insn
);
2921 SET_PREV_INSN (note
) = prev
;
2922 SET_NEXT_INSN (prev
) = note
;
2923 SET_NEXT_INSN (PREV_INSN (next
)) = insn
;
2924 SET_PREV_INSN (insn
) = PREV_INSN (next
);
2925 SET_NEXT_INSN (insn
) = next
;
2926 SET_PREV_INSN (next
) = insn
;
2929 /* PEEPHOLE might have changed this. */
2930 body
= PATTERN (insn
);
2933 /* Try to recognize the instruction.
2934 If successful, verify that the operands satisfy the
2935 constraints for the instruction. Crash if they don't,
2936 since `reload' should have changed them so that they do. */
2938 insn_code_number
= recog_memoized (insn
);
2939 cleanup_subreg_operands (insn
);
2941 /* Dump the insn in the assembly for debugging (-dAP).
2942 If the final dump is requested as slim RTL, dump slim
2943 RTL to the assembly file also. */
2944 if (flag_dump_rtl_in_asm
)
2946 print_rtx_head
= ASM_COMMENT_START
;
2947 if (! (dump_flags
& TDF_SLIM
))
2948 print_rtl_single (asm_out_file
, insn
);
2950 dump_insn_slim (asm_out_file
, insn
);
2951 print_rtx_head
= "";
2954 if (! constrain_operands_cached (insn
, 1))
2955 fatal_insn_not_found (insn
);
2957 /* Some target machines need to prescan each insn before
2960 #ifdef FINAL_PRESCAN_INSN
2961 FINAL_PRESCAN_INSN (insn
, recog_data
.operand
, recog_data
.n_operands
);
2964 if (targetm
.have_conditional_execution ()
2965 && GET_CODE (PATTERN (insn
)) == COND_EXEC
)
2966 current_insn_predicate
= COND_EXEC_TEST (PATTERN (insn
));
2969 cc_prev_status
= cc_status
;
2971 /* Update `cc_status' for this instruction.
2972 The instruction's output routine may change it further.
2973 If the output routine for a jump insn needs to depend
2974 on the cc status, it should look at cc_prev_status. */
2976 NOTICE_UPDATE_CC (body
, insn
);
2979 current_output_insn
= debug_insn
= insn
;
2981 /* Find the proper template for this insn. */
2982 templ
= get_insn_template (insn_code_number
, insn
);
2984 /* If the C code returns 0, it means that it is a jump insn
2985 which follows a deleted test insn, and that test insn
2986 needs to be reinserted. */
2991 gcc_assert (prev_nonnote_insn (insn
) == last_ignored_compare
);
2993 /* We have already processed the notes between the setter and
2994 the user. Make sure we don't process them again, this is
2995 particularly important if one of the notes is a block
2996 scope note or an EH note. */
2998 prev
!= last_ignored_compare
;
2999 prev
= PREV_INSN (prev
))
3002 delete_insn (prev
); /* Use delete_note. */
3008 /* If the template is the string "#", it means that this insn must
3010 if (templ
[0] == '#' && templ
[1] == '\0')
3012 rtx_insn
*new_rtx
= try_split (body
, insn
, 0);
3014 /* If we didn't split the insn, go away. */
3015 if (new_rtx
== insn
&& PATTERN (new_rtx
) == body
)
3016 fatal_insn ("could not split insn", insn
);
3018 /* If we have a length attribute, this instruction should have
3019 been split in shorten_branches, to ensure that we would have
3020 valid length info for the splitees. */
3021 gcc_assert (!HAVE_ATTR_length
);
3026 /* ??? This will put the directives in the wrong place if
3027 get_insn_template outputs assembly directly. However calling it
3028 before get_insn_template breaks if the insns is split. */
3029 if (targetm
.asm_out
.unwind_emit_before_insn
3030 && targetm
.asm_out
.unwind_emit
)
3031 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
3033 rtx_call_insn
*call_insn
= dyn_cast
<rtx_call_insn
*> (insn
);
3034 if (call_insn
!= NULL
)
3036 rtx x
= call_from_call_insn (call_insn
);
3038 if (x
&& MEM_P (x
) && GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
3042 t
= SYMBOL_REF_DECL (x
);
3044 assemble_external (t
);
3048 /* Output assembler code from the template. */
3049 output_asm_insn (templ
, recog_data
.operand
);
3051 /* Some target machines need to postscan each insn after
3053 if (targetm
.asm_out
.final_postscan_insn
)
3054 targetm
.asm_out
.final_postscan_insn (file
, insn
, recog_data
.operand
,
3055 recog_data
.n_operands
);
3057 if (!targetm
.asm_out
.unwind_emit_before_insn
3058 && targetm
.asm_out
.unwind_emit
)
3059 targetm
.asm_out
.unwind_emit (asm_out_file
, insn
);
3061 /* Let the debug info back-end know about this call. We do this only
3062 after the instruction has been emitted because labels that may be
3063 created to reference the call instruction must appear after it. */
3064 if (call_insn
!= NULL
&& !DECL_IGNORED_P (current_function_decl
))
3065 debug_hooks
->var_location (insn
);
3067 current_output_insn
= debug_insn
= 0;
3070 return NEXT_INSN (insn
);
3073 /* Return whether a source line note needs to be emitted before INSN.
3074 Sets IS_STMT to TRUE if the line should be marked as a possible
3075 breakpoint location. */
3078 notice_source_line (rtx_insn
*insn
, bool *is_stmt
)
3080 const char *filename
;
3083 if (override_filename
)
3085 filename
= override_filename
;
3086 linenum
= override_linenum
;
3088 else if (INSN_HAS_LOCATION (insn
))
3090 expanded_location xloc
= insn_location (insn
);
3091 filename
= xloc
.file
;
3092 linenum
= xloc
.line
;
3100 if (filename
== NULL
)
3103 if (force_source_line
3104 || filename
!= last_filename
3105 || last_linenum
!= linenum
)
3107 force_source_line
= false;
3108 last_filename
= filename
;
3109 last_linenum
= linenum
;
3110 last_discriminator
= discriminator
;
3112 high_block_linenum
= MAX (last_linenum
, high_block_linenum
);
3113 high_function_linenum
= MAX (last_linenum
, high_function_linenum
);
3117 if (SUPPORTS_DISCRIMINATOR
&& last_discriminator
!= discriminator
)
3119 /* If the discriminator changed, but the line number did not,
3120 output the line table entry with is_stmt false so the
3121 debugger does not treat this as a breakpoint location. */
3122 last_discriminator
= discriminator
;
3130 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3131 directly to the desired hard register. */
3134 cleanup_subreg_operands (rtx_insn
*insn
)
3137 bool changed
= false;
3138 extract_insn_cached (insn
);
3139 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3141 /* The following test cannot use recog_data.operand when testing
3142 for a SUBREG: the underlying object might have been changed
3143 already if we are inside a match_operator expression that
3144 matches the else clause. Instead we test the underlying
3145 expression directly. */
3146 if (GET_CODE (*recog_data
.operand_loc
[i
]) == SUBREG
)
3148 recog_data
.operand
[i
] = alter_subreg (recog_data
.operand_loc
[i
], true);
3151 else if (GET_CODE (recog_data
.operand
[i
]) == PLUS
3152 || GET_CODE (recog_data
.operand
[i
]) == MULT
3153 || MEM_P (recog_data
.operand
[i
]))
3154 recog_data
.operand
[i
] = walk_alter_subreg (recog_data
.operand_loc
[i
], &changed
);
3157 for (i
= 0; i
< recog_data
.n_dups
; i
++)
3159 if (GET_CODE (*recog_data
.dup_loc
[i
]) == SUBREG
)
3161 *recog_data
.dup_loc
[i
] = alter_subreg (recog_data
.dup_loc
[i
], true);
3164 else if (GET_CODE (*recog_data
.dup_loc
[i
]) == PLUS
3165 || GET_CODE (*recog_data
.dup_loc
[i
]) == MULT
3166 || MEM_P (*recog_data
.dup_loc
[i
]))
3167 *recog_data
.dup_loc
[i
] = walk_alter_subreg (recog_data
.dup_loc
[i
], &changed
);
3170 df_insn_rescan (insn
);
3173 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3174 the thing it is a subreg of. Do it anyway if FINAL_P. */
3177 alter_subreg (rtx
*xp
, bool final_p
)
3180 rtx y
= SUBREG_REG (x
);
3182 /* simplify_subreg does not remove subreg from volatile references.
3183 We are required to. */
3186 int offset
= SUBREG_BYTE (x
);
3188 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3189 contains 0 instead of the proper offset. See simplify_subreg. */
3191 && GET_MODE_SIZE (GET_MODE (y
)) < GET_MODE_SIZE (GET_MODE (x
)))
3193 int difference
= GET_MODE_SIZE (GET_MODE (y
))
3194 - GET_MODE_SIZE (GET_MODE (x
));
3195 if (WORDS_BIG_ENDIAN
)
3196 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3197 if (BYTES_BIG_ENDIAN
)
3198 offset
+= difference
% UNITS_PER_WORD
;
3202 *xp
= adjust_address (y
, GET_MODE (x
), offset
);
3204 *xp
= adjust_address_nv (y
, GET_MODE (x
), offset
);
3206 else if (REG_P (y
) && HARD_REGISTER_P (y
))
3208 rtx new_rtx
= simplify_subreg (GET_MODE (x
), y
, GET_MODE (y
),
3213 else if (final_p
&& REG_P (y
))
3215 /* Simplify_subreg can't handle some REG cases, but we have to. */
3217 HOST_WIDE_INT offset
;
3219 regno
= subreg_regno (x
);
3220 if (subreg_lowpart_p (x
))
3221 offset
= byte_lowpart_offset (GET_MODE (x
), GET_MODE (y
));
3223 offset
= SUBREG_BYTE (x
);
3224 *xp
= gen_rtx_REG_offset (y
, GET_MODE (x
), regno
, offset
);
3231 /* Do alter_subreg on all the SUBREGs contained in X. */
3234 walk_alter_subreg (rtx
*xp
, bool *changed
)
3237 switch (GET_CODE (x
))
3242 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
3243 XEXP (x
, 1) = walk_alter_subreg (&XEXP (x
, 1), changed
);
3248 XEXP (x
, 0) = walk_alter_subreg (&XEXP (x
, 0), changed
);
3253 return alter_subreg (xp
, true);
3264 /* Given BODY, the body of a jump instruction, alter the jump condition
3265 as required by the bits that are set in cc_status.flags.
3266 Not all of the bits there can be handled at this level in all cases.
3268 The value is normally 0.
3269 1 means that the condition has become always true.
3270 -1 means that the condition has become always false.
3271 2 means that COND has been altered. */
3274 alter_cond (rtx cond
)
3278 if (cc_status
.flags
& CC_REVERSED
)
3281 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
3284 if (cc_status
.flags
& CC_INVERTED
)
3287 PUT_CODE (cond
, reverse_condition (GET_CODE (cond
)));
3290 if (cc_status
.flags
& CC_NOT_POSITIVE
)
3291 switch (GET_CODE (cond
))
3296 /* Jump becomes unconditional. */
3302 /* Jump becomes no-op. */
3306 PUT_CODE (cond
, EQ
);
3311 PUT_CODE (cond
, NE
);
3319 if (cc_status
.flags
& CC_NOT_NEGATIVE
)
3320 switch (GET_CODE (cond
))
3324 /* Jump becomes unconditional. */
3329 /* Jump becomes no-op. */
3334 PUT_CODE (cond
, EQ
);
3340 PUT_CODE (cond
, NE
);
3348 if (cc_status
.flags
& CC_NO_OVERFLOW
)
3349 switch (GET_CODE (cond
))
3352 /* Jump becomes unconditional. */
3356 PUT_CODE (cond
, EQ
);
3361 PUT_CODE (cond
, NE
);
3366 /* Jump becomes no-op. */
3373 if (cc_status
.flags
& (CC_Z_IN_NOT_N
| CC_Z_IN_N
))
3374 switch (GET_CODE (cond
))
3380 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? GE
: LT
);
3385 PUT_CODE (cond
, cc_status
.flags
& CC_Z_IN_N
? LT
: GE
);
3390 if (cc_status
.flags
& CC_NOT_SIGNED
)
3391 /* The flags are valid if signed condition operators are converted
3393 switch (GET_CODE (cond
))
3396 PUT_CODE (cond
, LEU
);
3401 PUT_CODE (cond
, LTU
);
3406 PUT_CODE (cond
, GTU
);
3411 PUT_CODE (cond
, GEU
);
3423 /* Report inconsistency between the assembler template and the operands.
3424 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3427 output_operand_lossage (const char *cmsgid
, ...)
3431 const char *pfx_str
;
3434 va_start (ap
, cmsgid
);
3436 pfx_str
= this_is_asm_operands
? _("invalid 'asm': ") : "output_operand: ";
3437 fmt_string
= xasprintf ("%s%s", pfx_str
, _(cmsgid
));
3438 new_message
= xvasprintf (fmt_string
, ap
);
3440 if (this_is_asm_operands
)
3441 error_for_asm (this_is_asm_operands
, "%s", new_message
);
3443 internal_error ("%s", new_message
);
3450 /* Output of assembler code from a template, and its subroutines. */
3452 /* Annotate the assembly with a comment describing the pattern and
3453 alternative used. */
3456 output_asm_name (void)
3460 int num
= INSN_CODE (debug_insn
);
3461 fprintf (asm_out_file
, "\t%s %d\t%s",
3462 ASM_COMMENT_START
, INSN_UID (debug_insn
),
3463 insn_data
[num
].name
);
3464 if (insn_data
[num
].n_alternatives
> 1)
3465 fprintf (asm_out_file
, "/%d", which_alternative
+ 1);
3467 if (HAVE_ATTR_length
)
3468 fprintf (asm_out_file
, "\t[length = %d]",
3469 get_attr_length (debug_insn
));
3471 /* Clear this so only the first assembler insn
3472 of any rtl insn will get the special comment for -dp. */
3477 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3478 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3479 corresponds to the address of the object and 0 if to the object. */
3482 get_mem_expr_from_op (rtx op
, int *paddressp
)
3490 return REG_EXPR (op
);
3491 else if (!MEM_P (op
))
3494 if (MEM_EXPR (op
) != 0)
3495 return MEM_EXPR (op
);
3497 /* Otherwise we have an address, so indicate it and look at the address. */
3501 /* First check if we have a decl for the address, then look at the right side
3502 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3503 But don't allow the address to itself be indirect. */
3504 if ((expr
= get_mem_expr_from_op (op
, &inner_addressp
)) && ! inner_addressp
)
3506 else if (GET_CODE (op
) == PLUS
3507 && (expr
= get_mem_expr_from_op (XEXP (op
, 1), &inner_addressp
)))
3511 || GET_RTX_CLASS (GET_CODE (op
)) == RTX_BIN_ARITH
)
3514 expr
= get_mem_expr_from_op (op
, &inner_addressp
);
3515 return inner_addressp
? 0 : expr
;
3518 /* Output operand names for assembler instructions. OPERANDS is the
3519 operand vector, OPORDER is the order to write the operands, and NOPS
3520 is the number of operands to write. */
3523 output_asm_operand_names (rtx
*operands
, int *oporder
, int nops
)
3528 for (i
= 0; i
< nops
; i
++)
3531 rtx op
= operands
[oporder
[i
]];
3532 tree expr
= get_mem_expr_from_op (op
, &addressp
);
3534 fprintf (asm_out_file
, "%c%s",
3535 wrote
? ',' : '\t', wrote
? "" : ASM_COMMENT_START
);
3539 fprintf (asm_out_file
, "%s",
3540 addressp
? "*" : "");
3541 print_mem_expr (asm_out_file
, expr
);
3544 else if (REG_P (op
) && ORIGINAL_REGNO (op
)
3545 && ORIGINAL_REGNO (op
) != REGNO (op
))
3546 fprintf (asm_out_file
, " tmp%i", ORIGINAL_REGNO (op
));
3550 #ifdef ASSEMBLER_DIALECT
3551 /* Helper function to parse assembler dialects in the asm string.
3552 This is called from output_asm_insn and asm_fprintf. */
3554 do_assembler_dialects (const char *p
, int *dialect
)
3565 output_operand_lossage ("nested assembly dialect alternatives");
3569 /* If we want the first dialect, do nothing. Otherwise, skip
3570 DIALECT_NUMBER of strings ending with '|'. */
3571 for (i
= 0; i
< dialect_number
; i
++)
3573 while (*p
&& *p
!= '}')
3581 /* Skip over any character after a percent sign. */
3593 output_operand_lossage ("unterminated assembly dialect alternative");
3600 /* Skip to close brace. */
3605 output_operand_lossage ("unterminated assembly dialect alternative");
3609 /* Skip over any character after a percent sign. */
3610 if (*p
== '%' && p
[1])
3624 putc (c
, asm_out_file
);
3629 putc (c
, asm_out_file
);
3640 /* Output text from TEMPLATE to the assembler output file,
3641 obeying %-directions to substitute operands taken from
3642 the vector OPERANDS.
3644 %N (for N a digit) means print operand N in usual manner.
3645 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3646 and print the label name with no punctuation.
3647 %cN means require operand N to be a constant
3648 and print the constant expression with no punctuation.
3649 %aN means expect operand N to be a memory address
3650 (not a memory reference!) and print a reference
3652 %nN means expect operand N to be a constant
3653 and print a constant expression for minus the value
3654 of the operand, with no other punctuation. */
3657 output_asm_insn (const char *templ
, rtx
*operands
)
3661 #ifdef ASSEMBLER_DIALECT
3664 int oporder
[MAX_RECOG_OPERANDS
];
3665 char opoutput
[MAX_RECOG_OPERANDS
];
3668 /* An insn may return a null string template
3669 in a case where no assembler code is needed. */
3673 memset (opoutput
, 0, sizeof opoutput
);
3675 putc ('\t', asm_out_file
);
3677 #ifdef ASM_OUTPUT_OPCODE
3678 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3685 if (flag_verbose_asm
)
3686 output_asm_operand_names (operands
, oporder
, ops
);
3687 if (flag_print_asm_name
)
3691 memset (opoutput
, 0, sizeof opoutput
);
3693 putc (c
, asm_out_file
);
3694 #ifdef ASM_OUTPUT_OPCODE
3695 while ((c
= *p
) == '\t')
3697 putc (c
, asm_out_file
);
3700 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
3704 #ifdef ASSEMBLER_DIALECT
3708 p
= do_assembler_dialects (p
, &dialect
);
3713 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3714 if ASSEMBLER_DIALECT defined and these characters have a special
3715 meaning as dialect delimiters.*/
3717 #ifdef ASSEMBLER_DIALECT
3718 || *p
== '{' || *p
== '}' || *p
== '|'
3722 putc (*p
, asm_out_file
);
3725 /* %= outputs a number which is unique to each insn in the entire
3726 compilation. This is useful for making local labels that are
3727 referred to more than once in a given insn. */
3731 fprintf (asm_out_file
, "%d", insn_counter
);
3733 /* % followed by a letter and some digits
3734 outputs an operand in a special way depending on the letter.
3735 Letters `acln' are implemented directly.
3736 Other letters are passed to `output_operand' so that
3737 the TARGET_PRINT_OPERAND hook can define them. */
3738 else if (ISALPHA (*p
))
3741 unsigned long opnum
;
3744 opnum
= strtoul (p
, &endptr
, 10);
3747 output_operand_lossage ("operand number missing "
3749 else if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3750 output_operand_lossage ("operand number out of range");
3751 else if (letter
== 'l')
3752 output_asm_label (operands
[opnum
]);
3753 else if (letter
== 'a')
3754 output_address (VOIDmode
, operands
[opnum
]);
3755 else if (letter
== 'c')
3757 if (CONSTANT_ADDRESS_P (operands
[opnum
]))
3758 output_addr_const (asm_out_file
, operands
[opnum
]);
3760 output_operand (operands
[opnum
], 'c');
3762 else if (letter
== 'n')
3764 if (CONST_INT_P (operands
[opnum
]))
3765 fprintf (asm_out_file
, HOST_WIDE_INT_PRINT_DEC
,
3766 - INTVAL (operands
[opnum
]));
3769 putc ('-', asm_out_file
);
3770 output_addr_const (asm_out_file
, operands
[opnum
]);
3774 output_operand (operands
[opnum
], letter
);
3776 if (!opoutput
[opnum
])
3777 oporder
[ops
++] = opnum
;
3778 opoutput
[opnum
] = 1;
3783 /* % followed by a digit outputs an operand the default way. */
3784 else if (ISDIGIT (*p
))
3786 unsigned long opnum
;
3789 opnum
= strtoul (p
, &endptr
, 10);
3790 if (this_is_asm_operands
&& opnum
>= insn_noperands
)
3791 output_operand_lossage ("operand number out of range");
3793 output_operand (operands
[opnum
], 0);
3795 if (!opoutput
[opnum
])
3796 oporder
[ops
++] = opnum
;
3797 opoutput
[opnum
] = 1;
3802 /* % followed by punctuation: output something for that
3803 punctuation character alone, with no operand. The
3804 TARGET_PRINT_OPERAND hook decides what is actually done. */
3805 else if (targetm
.asm_out
.print_operand_punct_valid_p ((unsigned char) *p
))
3806 output_operand (NULL_RTX
, *p
++);
3808 output_operand_lossage ("invalid %%-code");
3812 putc (c
, asm_out_file
);
3815 /* Write out the variable names for operands, if we know them. */
3816 if (flag_verbose_asm
)
3817 output_asm_operand_names (operands
, oporder
, ops
);
3818 if (flag_print_asm_name
)
3821 putc ('\n', asm_out_file
);
3824 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3827 output_asm_label (rtx x
)
3831 if (GET_CODE (x
) == LABEL_REF
)
3832 x
= label_ref_label (x
);
3835 && NOTE_KIND (x
) == NOTE_INSN_DELETED_LABEL
))
3836 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3838 output_operand_lossage ("'%%l' operand isn't a label");
3840 assemble_name (asm_out_file
, buf
);
3843 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3846 mark_symbol_refs_as_used (rtx x
)
3848 subrtx_iterator::array_type array
;
3849 FOR_EACH_SUBRTX (iter
, array
, x
, ALL
)
3851 const_rtx x
= *iter
;
3852 if (GET_CODE (x
) == SYMBOL_REF
)
3853 if (tree t
= SYMBOL_REF_DECL (x
))
3854 assemble_external (t
);
3858 /* Print operand X using machine-dependent assembler syntax.
3859 CODE is a non-digit that preceded the operand-number in the % spec,
3860 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3861 between the % and the digits.
3862 When CODE is a non-letter, X is 0.
3864 The meanings of the letters are machine-dependent and controlled
3865 by TARGET_PRINT_OPERAND. */
3868 output_operand (rtx x
, int code ATTRIBUTE_UNUSED
)
3870 if (x
&& GET_CODE (x
) == SUBREG
)
3871 x
= alter_subreg (&x
, true);
3873 /* X must not be a pseudo reg. */
3874 if (!targetm
.no_register_allocation
)
3875 gcc_assert (!x
|| !REG_P (x
) || REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3877 targetm
.asm_out
.print_operand (asm_out_file
, x
, code
);
3882 mark_symbol_refs_as_used (x
);
3885 /* Print a memory reference operand for address X using
3886 machine-dependent assembler syntax. */
3889 output_address (machine_mode mode
, rtx x
)
3891 bool changed
= false;
3892 walk_alter_subreg (&x
, &changed
);
3893 targetm
.asm_out
.print_operand_address (asm_out_file
, mode
, x
);
3896 /* Print an integer constant expression in assembler syntax.
3897 Addition and subtraction are the only arithmetic
3898 that may appear in these expressions. */
3901 output_addr_const (FILE *file
, rtx x
)
3906 switch (GET_CODE (x
))
3913 if (SYMBOL_REF_DECL (x
))
3914 assemble_external (SYMBOL_REF_DECL (x
));
3915 #ifdef ASM_OUTPUT_SYMBOL_REF
3916 ASM_OUTPUT_SYMBOL_REF (file
, x
);
3918 assemble_name (file
, XSTR (x
, 0));
3923 x
= label_ref_label (x
);
3926 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
3927 #ifdef ASM_OUTPUT_LABEL_REF
3928 ASM_OUTPUT_LABEL_REF (file
, buf
);
3930 assemble_name (file
, buf
);
3935 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3939 /* This used to output parentheses around the expression,
3940 but that does not work on the 386 (either ATT or BSD assembler). */
3941 output_addr_const (file
, XEXP (x
, 0));
3944 case CONST_WIDE_INT
:
3945 /* We do not know the mode here so we have to use a round about
3946 way to build a wide-int to get it printed properly. */
3948 wide_int w
= wide_int::from_array (&CONST_WIDE_INT_ELT (x
, 0),
3949 CONST_WIDE_INT_NUNITS (x
),
3950 CONST_WIDE_INT_NUNITS (x
)
3951 * HOST_BITS_PER_WIDE_INT
,
3953 print_decs (w
, file
);
3958 if (CONST_DOUBLE_AS_INT_P (x
))
3960 /* We can use %d if the number is one word and positive. */
3961 if (CONST_DOUBLE_HIGH (x
))
3962 fprintf (file
, HOST_WIDE_INT_PRINT_DOUBLE_HEX
,
3963 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (x
),
3964 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
));
3965 else if (CONST_DOUBLE_LOW (x
) < 0)
3966 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
3967 (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
));
3969 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
));
3972 /* We can't handle floating point constants;
3973 PRINT_OPERAND must handle them. */
3974 output_operand_lossage ("floating constant misused");
3978 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_FIXED_VALUE_LOW (x
));
3982 /* Some assemblers need integer constants to appear last (eg masm). */
3983 if (CONST_INT_P (XEXP (x
, 0)))
3985 output_addr_const (file
, XEXP (x
, 1));
3986 if (INTVAL (XEXP (x
, 0)) >= 0)
3987 fprintf (file
, "+");
3988 output_addr_const (file
, XEXP (x
, 0));
3992 output_addr_const (file
, XEXP (x
, 0));
3993 if (!CONST_INT_P (XEXP (x
, 1))
3994 || INTVAL (XEXP (x
, 1)) >= 0)
3995 fprintf (file
, "+");
3996 output_addr_const (file
, XEXP (x
, 1));
4001 /* Avoid outputting things like x-x or x+5-x,
4002 since some assemblers can't handle that. */
4003 x
= simplify_subtraction (x
);
4004 if (GET_CODE (x
) != MINUS
)
4007 output_addr_const (file
, XEXP (x
, 0));
4008 fprintf (file
, "-");
4009 if ((CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) >= 0)
4010 || GET_CODE (XEXP (x
, 1)) == PC
4011 || GET_CODE (XEXP (x
, 1)) == SYMBOL_REF
)
4012 output_addr_const (file
, XEXP (x
, 1));
4015 fputs (targetm
.asm_out
.open_paren
, file
);
4016 output_addr_const (file
, XEXP (x
, 1));
4017 fputs (targetm
.asm_out
.close_paren
, file
);
4025 output_addr_const (file
, XEXP (x
, 0));
4029 if (targetm
.asm_out
.output_addr_const_extra (file
, x
))
4032 output_operand_lossage ("invalid expression as operand");
4036 /* Output a quoted string. */
4039 output_quoted_string (FILE *asm_file
, const char *string
)
4041 #ifdef OUTPUT_QUOTED_STRING
4042 OUTPUT_QUOTED_STRING (asm_file
, string
);
4046 putc ('\"', asm_file
);
4047 while ((c
= *string
++) != 0)
4051 if (c
== '\"' || c
== '\\')
4052 putc ('\\', asm_file
);
4056 fprintf (asm_file
, "\\%03o", (unsigned char) c
);
4058 putc ('\"', asm_file
);
4062 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4065 fprint_whex (FILE *f
, unsigned HOST_WIDE_INT value
)
4067 char buf
[2 + CHAR_BIT
* sizeof (value
) / 4];
4072 char *p
= buf
+ sizeof (buf
);
4074 *--p
= "0123456789abcdef"[value
% 16];
4075 while ((value
/= 16) != 0);
4078 fwrite (p
, 1, buf
+ sizeof (buf
) - p
, f
);
4082 /* Internal function that prints an unsigned long in decimal in reverse.
4083 The output string IS NOT null-terminated. */
4086 sprint_ul_rev (char *s
, unsigned long value
)
4091 s
[i
] = "0123456789"[value
% 10];
4094 /* alternate version, without modulo */
4095 /* oldval = value; */
4097 /* s[i] = "0123456789" [oldval - 10*value]; */
4104 /* Write an unsigned long as decimal to a file, fast. */
4107 fprint_ul (FILE *f
, unsigned long value
)
4109 /* python says: len(str(2**64)) == 20 */
4113 i
= sprint_ul_rev (s
, value
);
4115 /* It's probably too small to bother with string reversal and fputs. */
4124 /* Write an unsigned long as decimal to a string, fast.
4125 s must be wide enough to not overflow, at least 21 chars.
4126 Returns the length of the string (without terminating '\0'). */
4129 sprint_ul (char *s
, unsigned long value
)
4131 int len
= sprint_ul_rev (s
, value
);
4134 std::reverse (s
, s
+ len
);
4138 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4139 %R prints the value of REGISTER_PREFIX.
4140 %L prints the value of LOCAL_LABEL_PREFIX.
4141 %U prints the value of USER_LABEL_PREFIX.
4142 %I prints the value of IMMEDIATE_PREFIX.
4143 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4144 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4146 We handle alternate assembler dialects here, just like output_asm_insn. */
4149 asm_fprintf (FILE *file
, const char *p
, ...)
4153 #ifdef ASSEMBLER_DIALECT
4158 va_start (argptr
, p
);
4165 #ifdef ASSEMBLER_DIALECT
4169 p
= do_assembler_dialects (p
, &dialect
);
4176 while (strchr ("-+ #0", c
))
4181 while (ISDIGIT (c
) || c
== '.')
4192 case 'd': case 'i': case 'u':
4193 case 'x': case 'X': case 'o':
4197 fprintf (file
, buf
, va_arg (argptr
, int));
4201 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4202 'o' cases, but we do not check for those cases. It
4203 means that the value is a HOST_WIDE_INT, which may be
4204 either `long' or `long long'. */
4205 memcpy (q
, HOST_WIDE_INT_PRINT
, strlen (HOST_WIDE_INT_PRINT
));
4206 q
+= strlen (HOST_WIDE_INT_PRINT
);
4209 fprintf (file
, buf
, va_arg (argptr
, HOST_WIDE_INT
));
4214 #ifdef HAVE_LONG_LONG
4220 fprintf (file
, buf
, va_arg (argptr
, long long));
4227 fprintf (file
, buf
, va_arg (argptr
, long));
4235 fprintf (file
, buf
, va_arg (argptr
, char *));
4239 #ifdef ASM_OUTPUT_OPCODE
4240 ASM_OUTPUT_OPCODE (asm_out_file
, p
);
4245 #ifdef REGISTER_PREFIX
4246 fprintf (file
, "%s", REGISTER_PREFIX
);
4251 #ifdef IMMEDIATE_PREFIX
4252 fprintf (file
, "%s", IMMEDIATE_PREFIX
);
4257 #ifdef LOCAL_LABEL_PREFIX
4258 fprintf (file
, "%s", LOCAL_LABEL_PREFIX
);
4263 fputs (user_label_prefix
, file
);
4266 #ifdef ASM_FPRINTF_EXTENSIONS
4267 /* Uppercase letters are reserved for general use by asm_fprintf
4268 and so are not available to target specific code. In order to
4269 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4270 they are defined here. As they get turned into real extensions
4271 to asm_fprintf they should be removed from this list. */
4272 case 'A': case 'B': case 'C': case 'D': case 'E':
4273 case 'F': case 'G': case 'H': case 'J': case 'K':
4274 case 'M': case 'N': case 'P': case 'Q': case 'S':
4275 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4278 ASM_FPRINTF_EXTENSIONS (file
, argptr
, p
)
4291 /* Return nonzero if this function has no function calls. */
4294 leaf_function_p (void)
4298 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4299 functions even if they call mcount. */
4300 if (crtl
->profile
&& !targetm
.keep_leaf_when_profiled ())
4303 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4306 && ! SIBLING_CALL_P (insn
))
4308 if (NONJUMP_INSN_P (insn
)
4309 && GET_CODE (PATTERN (insn
)) == SEQUENCE
4310 && CALL_P (XVECEXP (PATTERN (insn
), 0, 0))
4311 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn
), 0, 0)))
4318 /* Return 1 if branch is a forward branch.
4319 Uses insn_shuid array, so it works only in the final pass. May be used by
4320 output templates to customary add branch prediction hints.
4323 final_forward_branch_p (rtx_insn
*insn
)
4325 int insn_id
, label_id
;
4327 gcc_assert (uid_shuid
);
4328 insn_id
= INSN_SHUID (insn
);
4329 label_id
= INSN_SHUID (JUMP_LABEL (insn
));
4330 /* We've hit some insns that does not have id information available. */
4331 gcc_assert (insn_id
&& label_id
);
4332 return insn_id
< label_id
;
4335 /* On some machines, a function with no call insns
4336 can run faster if it doesn't create its own register window.
4337 When output, the leaf function should use only the "output"
4338 registers. Ordinarily, the function would be compiled to use
4339 the "input" registers to find its arguments; it is a candidate
4340 for leaf treatment if it uses only the "input" registers.
4341 Leaf function treatment means renumbering so the function
4342 uses the "output" registers instead. */
4344 #ifdef LEAF_REGISTERS
4346 /* Return 1 if this function uses only the registers that can be
4347 safely renumbered. */
4350 only_leaf_regs_used (void)
4353 const char *const permitted_reg_in_leaf_functions
= LEAF_REGISTERS
;
4355 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4356 if ((df_regs_ever_live_p (i
) || global_regs
[i
])
4357 && ! permitted_reg_in_leaf_functions
[i
])
4360 if (crtl
->uses_pic_offset_table
4361 && pic_offset_table_rtx
!= 0
4362 && REG_P (pic_offset_table_rtx
)
4363 && ! permitted_reg_in_leaf_functions
[REGNO (pic_offset_table_rtx
)])
4369 /* Scan all instructions and renumber all registers into those
4370 available in leaf functions. */
4373 leaf_renumber_regs (rtx_insn
*first
)
4377 /* Renumber only the actual patterns.
4378 The reg-notes can contain frame pointer refs,
4379 and renumbering them could crash, and should not be needed. */
4380 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4382 leaf_renumber_regs_insn (PATTERN (insn
));
4385 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4386 available in leaf functions. */
4389 leaf_renumber_regs_insn (rtx in_rtx
)
4392 const char *format_ptr
;
4397 /* Renumber all input-registers into output-registers.
4398 renumbered_regs would be 1 for an output-register;
4405 /* Don't renumber the same reg twice. */
4409 newreg
= REGNO (in_rtx
);
4410 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4411 to reach here as part of a REG_NOTE. */
4412 if (newreg
>= FIRST_PSEUDO_REGISTER
)
4417 newreg
= LEAF_REG_REMAP (newreg
);
4418 gcc_assert (newreg
>= 0);
4419 df_set_regs_ever_live (REGNO (in_rtx
), false);
4420 df_set_regs_ever_live (newreg
, true);
4421 SET_REGNO (in_rtx
, newreg
);
4426 if (INSN_P (in_rtx
))
4428 /* Inside a SEQUENCE, we find insns.
4429 Renumber just the patterns of these insns,
4430 just as we do for the top-level insns. */
4431 leaf_renumber_regs_insn (PATTERN (in_rtx
));
4435 format_ptr
= GET_RTX_FORMAT (GET_CODE (in_rtx
));
4437 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (in_rtx
)); i
++)
4438 switch (*format_ptr
++)
4441 leaf_renumber_regs_insn (XEXP (in_rtx
, i
));
4445 if (NULL
!= XVEC (in_rtx
, i
))
4447 for (j
= 0; j
< XVECLEN (in_rtx
, i
); j
++)
4448 leaf_renumber_regs_insn (XVECEXP (in_rtx
, i
, j
));
4467 /* Turn the RTL into assembly. */
4469 rest_of_handle_final (void)
4471 const char *fnname
= get_fnname_from_decl (current_function_decl
);
4473 assemble_start_function (current_function_decl
, fnname
);
4474 final_start_function (get_insns (), asm_out_file
, optimize
);
4475 final (get_insns (), asm_out_file
, optimize
);
4477 collect_fn_hard_reg_usage ();
4478 final_end_function ();
4480 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4481 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4482 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4483 output_function_exception_table (fnname
);
4485 assemble_end_function (current_function_decl
, fnname
);
4487 user_defined_section_attribute
= false;
4489 /* Free up reg info memory. */
4493 fflush (asm_out_file
);
4495 /* Write DBX symbols if requested. */
4497 /* Note that for those inline functions where we don't initially
4498 know for certain that we will be generating an out-of-line copy,
4499 the first invocation of this routine (rest_of_compilation) will
4500 skip over this code by doing a `goto exit_rest_of_compilation;'.
4501 Later on, wrapup_global_declarations will (indirectly) call
4502 rest_of_compilation again for those inline functions that need
4503 to have out-of-line copies generated. During that call, we
4504 *will* be routed past here. */
4506 timevar_push (TV_SYMOUT
);
4507 if (!DECL_IGNORED_P (current_function_decl
))
4508 debug_hooks
->function_decl (current_function_decl
);
4509 timevar_pop (TV_SYMOUT
);
4511 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4512 DECL_INITIAL (current_function_decl
) = error_mark_node
;
4514 if (DECL_STATIC_CONSTRUCTOR (current_function_decl
)
4515 && targetm
.have_ctors_dtors
)
4516 targetm
.asm_out
.constructor (XEXP (DECL_RTL (current_function_decl
), 0),
4517 decl_init_priority_lookup
4518 (current_function_decl
));
4519 if (DECL_STATIC_DESTRUCTOR (current_function_decl
)
4520 && targetm
.have_ctors_dtors
)
4521 targetm
.asm_out
.destructor (XEXP (DECL_RTL (current_function_decl
), 0),
4522 decl_fini_priority_lookup
4523 (current_function_decl
));
4529 const pass_data pass_data_final
=
4531 RTL_PASS
, /* type */
4533 OPTGROUP_NONE
, /* optinfo_flags */
4534 TV_FINAL
, /* tv_id */
4535 0, /* properties_required */
4536 0, /* properties_provided */
4537 0, /* properties_destroyed */
4538 0, /* todo_flags_start */
4539 0, /* todo_flags_finish */
4542 class pass_final
: public rtl_opt_pass
4545 pass_final (gcc::context
*ctxt
)
4546 : rtl_opt_pass (pass_data_final
, ctxt
)
4549 /* opt_pass methods: */
4550 virtual unsigned int execute (function
*) { return rest_of_handle_final (); }
4552 }; // class pass_final
4557 make_pass_final (gcc::context
*ctxt
)
4559 return new pass_final (ctxt
);
4564 rest_of_handle_shorten_branches (void)
4566 /* Shorten branches. */
4567 shorten_branches (get_insns ());
4573 const pass_data pass_data_shorten_branches
=
4575 RTL_PASS
, /* type */
4576 "shorten", /* name */
4577 OPTGROUP_NONE
, /* optinfo_flags */
4578 TV_SHORTEN_BRANCH
, /* tv_id */
4579 0, /* properties_required */
4580 0, /* properties_provided */
4581 0, /* properties_destroyed */
4582 0, /* todo_flags_start */
4583 0, /* todo_flags_finish */
4586 class pass_shorten_branches
: public rtl_opt_pass
4589 pass_shorten_branches (gcc::context
*ctxt
)
4590 : rtl_opt_pass (pass_data_shorten_branches
, ctxt
)
4593 /* opt_pass methods: */
4594 virtual unsigned int execute (function
*)
4596 return rest_of_handle_shorten_branches ();
4599 }; // class pass_shorten_branches
4604 make_pass_shorten_branches (gcc::context
*ctxt
)
4606 return new pass_shorten_branches (ctxt
);
4611 rest_of_clean_state (void)
4613 rtx_insn
*insn
, *next
;
4614 FILE *final_output
= NULL
;
4615 int save_unnumbered
= flag_dump_unnumbered
;
4616 int save_noaddr
= flag_dump_noaddr
;
4618 if (flag_dump_final_insns
)
4620 final_output
= fopen (flag_dump_final_insns
, "a");
4623 error ("could not open final insn dump file %qs: %m",
4624 flag_dump_final_insns
);
4625 flag_dump_final_insns
= NULL
;
4629 flag_dump_noaddr
= flag_dump_unnumbered
= 1;
4630 if (flag_compare_debug_opt
|| flag_compare_debug
)
4631 dump_flags
|= TDF_NOUID
;
4632 dump_function_header (final_output
, current_function_decl
,
4634 final_insns_dump_p
= true;
4636 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4638 INSN_UID (insn
) = CODE_LABEL_NUMBER (insn
);
4642 set_block_for_insn (insn
, NULL
);
4643 INSN_UID (insn
) = 0;
4648 /* It is very important to decompose the RTL instruction chain here:
4649 debug information keeps pointing into CODE_LABEL insns inside the function
4650 body. If these remain pointing to the other insns, we end up preserving
4651 whole RTL chain and attached detailed debug info in memory. */
4652 for (insn
= get_insns (); insn
; insn
= next
)
4654 next
= NEXT_INSN (insn
);
4655 SET_NEXT_INSN (insn
) = NULL
;
4656 SET_PREV_INSN (insn
) = NULL
;
4659 && (!NOTE_P (insn
) ||
4660 (NOTE_KIND (insn
) != NOTE_INSN_VAR_LOCATION
4661 && NOTE_KIND (insn
) != NOTE_INSN_CALL_ARG_LOCATION
4662 && NOTE_KIND (insn
) != NOTE_INSN_BLOCK_BEG
4663 && NOTE_KIND (insn
) != NOTE_INSN_BLOCK_END
4664 && NOTE_KIND (insn
) != NOTE_INSN_DELETED_DEBUG_LABEL
)))
4665 print_rtl_single (final_output
, insn
);
4670 flag_dump_noaddr
= save_noaddr
;
4671 flag_dump_unnumbered
= save_unnumbered
;
4672 final_insns_dump_p
= false;
4674 if (fclose (final_output
))
4676 error ("could not close final insn dump file %qs: %m",
4677 flag_dump_final_insns
);
4678 flag_dump_final_insns
= NULL
;
4682 /* In case the function was not output,
4683 don't leave any temporary anonymous types
4684 queued up for sdb output. */
4685 if (SDB_DEBUGGING_INFO
&& write_symbols
== SDB_DEBUG
)
4686 sdbout_types (NULL_TREE
);
4688 flag_rerun_cse_after_global_opts
= 0;
4689 reload_completed
= 0;
4690 epilogue_completed
= 0;
4692 regstack_completed
= 0;
4695 /* Clear out the insn_length contents now that they are no
4697 init_insn_lengths ();
4699 /* Show no temporary slots allocated. */
4702 free_bb_for_insn ();
4704 delete_tree_ssa (cfun
);
4706 /* We can reduce stack alignment on call site only when we are sure that
4707 the function body just produced will be actually used in the final
4709 if (decl_binds_to_current_def_p (current_function_decl
))
4711 unsigned int pref
= crtl
->preferred_stack_boundary
;
4712 if (crtl
->stack_alignment_needed
> crtl
->preferred_stack_boundary
)
4713 pref
= crtl
->stack_alignment_needed
;
4714 cgraph_node::rtl_info (current_function_decl
)
4715 ->preferred_incoming_stack_boundary
= pref
;
4718 /* Make sure volatile mem refs aren't considered valid operands for
4719 arithmetic insns. We must call this here if this is a nested inline
4720 function, since the above code leaves us in the init_recog state,
4721 and the function context push/pop code does not save/restore volatile_ok.
4723 ??? Maybe it isn't necessary for expand_start_function to call this
4724 anymore if we do it here? */
4726 init_recog_no_volatile ();
4728 /* We're done with this function. Free up memory if we can. */
4729 free_after_parsing (cfun
);
4730 free_after_compilation (cfun
);
4736 const pass_data pass_data_clean_state
=
4738 RTL_PASS
, /* type */
4739 "*clean_state", /* name */
4740 OPTGROUP_NONE
, /* optinfo_flags */
4741 TV_FINAL
, /* tv_id */
4742 0, /* properties_required */
4743 0, /* properties_provided */
4744 PROP_rtl
, /* properties_destroyed */
4745 0, /* todo_flags_start */
4746 0, /* todo_flags_finish */
4749 class pass_clean_state
: public rtl_opt_pass
4752 pass_clean_state (gcc::context
*ctxt
)
4753 : rtl_opt_pass (pass_data_clean_state
, ctxt
)
4756 /* opt_pass methods: */
4757 virtual unsigned int execute (function
*)
4759 return rest_of_clean_state ();
4762 }; // class pass_clean_state
4767 make_pass_clean_state (gcc::context
*ctxt
)
4769 return new pass_clean_state (ctxt
);
4772 /* Return true if INSN is a call to the current function. */
4775 self_recursive_call_p (rtx_insn
*insn
)
4777 tree fndecl
= get_call_fndecl (insn
);
4778 return (fndecl
== current_function_decl
4779 && decl_binds_to_current_def_p (fndecl
));
4782 /* Collect hard register usage for the current function. */
4785 collect_fn_hard_reg_usage (void)
4791 struct cgraph_rtl_info
*node
;
4792 HARD_REG_SET function_used_regs
;
4794 /* ??? To be removed when all the ports have been fixed. */
4795 if (!targetm
.call_fusage_contains_non_callee_clobbers
)
4798 CLEAR_HARD_REG_SET (function_used_regs
);
4800 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= next_insn (insn
))
4802 HARD_REG_SET insn_used_regs
;
4804 if (!NONDEBUG_INSN_P (insn
))
4808 && !self_recursive_call_p (insn
))
4810 if (!get_call_reg_set_usage (insn
, &insn_used_regs
,
4814 IOR_HARD_REG_SET (function_used_regs
, insn_used_regs
);
4817 find_all_hard_reg_sets (insn
, &insn_used_regs
, false);
4818 IOR_HARD_REG_SET (function_used_regs
, insn_used_regs
);
4821 /* Be conservative - mark fixed and global registers as used. */
4822 IOR_HARD_REG_SET (function_used_regs
, fixed_reg_set
);
4825 /* Handle STACK_REGS conservatively, since the df-framework does not
4826 provide accurate information for them. */
4828 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
4829 SET_HARD_REG_BIT (function_used_regs
, i
);
4832 /* The information we have gathered is only interesting if it exposes a
4833 register from the call_used_regs that is not used in this function. */
4834 if (hard_reg_set_subset_p (call_used_reg_set
, function_used_regs
))
4837 node
= cgraph_node::rtl_info (current_function_decl
);
4838 gcc_assert (node
!= NULL
);
4840 COPY_HARD_REG_SET (node
->function_used_regs
, function_used_regs
);
4841 node
->function_used_regs_valid
= 1;
4844 /* Get the declaration of the function called by INSN. */
4847 get_call_fndecl (rtx_insn
*insn
)
4851 note
= find_reg_note (insn
, REG_CALL_DECL
, NULL_RTX
);
4852 if (note
== NULL_RTX
)
4855 datum
= XEXP (note
, 0);
4856 if (datum
!= NULL_RTX
)
4857 return SYMBOL_REF_DECL (datum
);
4862 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4863 call targets that can be overwritten. */
4865 static struct cgraph_rtl_info
*
4866 get_call_cgraph_rtl_info (rtx_insn
*insn
)
4870 if (insn
== NULL_RTX
)
4873 fndecl
= get_call_fndecl (insn
);
4874 if (fndecl
== NULL_TREE
4875 || !decl_binds_to_current_def_p (fndecl
))
4878 return cgraph_node::rtl_info (fndecl
);
4881 /* Find hard registers used by function call instruction INSN, and return them
4882 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4885 get_call_reg_set_usage (rtx_insn
*insn
, HARD_REG_SET
*reg_set
,
4886 HARD_REG_SET default_set
)
4890 struct cgraph_rtl_info
*node
= get_call_cgraph_rtl_info (insn
);
4892 && node
->function_used_regs_valid
)
4894 COPY_HARD_REG_SET (*reg_set
, node
->function_used_regs
);
4895 AND_HARD_REG_SET (*reg_set
, default_set
);
4900 COPY_HARD_REG_SET (*reg_set
, default_set
);