* g++.dg/cpp0x/constexpr-53094-2.C: Ignore non-standard ABI
[official-gcc.git] / gcc / final.c
blobd5154db2c4288ddd1b3290c726c1fc5881c430b0
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
50 #include "tree.h"
51 #include "rtl.h"
52 #include "tm_p.h"
53 #include "regs.h"
54 #include "insn-config.h"
55 #include "insn-attr.h"
56 #include "recog.h"
57 #include "conditions.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "output.h"
61 #include "except.h"
62 #include "function.h"
63 #include "rtl-error.h"
64 #include "toplev.h" /* exact_log2, floor_log2 */
65 #include "reload.h"
66 #include "intl.h"
67 #include "basic-block.h"
68 #include "target.h"
69 #include "targhooks.h"
70 #include "debug.h"
71 #include "expr.h"
72 #include "tree-pass.h"
73 #include "tree-flow.h"
74 #include "cgraph.h"
75 #include "coverage.h"
76 #include "df.h"
77 #include "ggc.h"
78 #include "cfgloop.h"
79 #include "params.h"
80 #include "tree-pretty-print.h" /* for dump_function_header */
82 #ifdef XCOFF_DEBUGGING_INFO
83 #include "xcoffout.h" /* Needed for external data
84 declarations for e.g. AIX 4.x. */
85 #endif
87 #include "dwarf2out.h"
89 #ifdef DBX_DEBUGGING_INFO
90 #include "dbxout.h"
91 #endif
93 #ifdef SDB_DEBUGGING_INFO
94 #include "sdbout.h"
95 #endif
97 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
98 So define a null default for it to save conditionalization later. */
99 #ifndef CC_STATUS_INIT
100 #define CC_STATUS_INIT
101 #endif
103 /* Is the given character a logical line separator for the assembler? */
104 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
105 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
106 #endif
108 #ifndef JUMP_TABLES_IN_TEXT_SECTION
109 #define JUMP_TABLES_IN_TEXT_SECTION 0
110 #endif
112 /* Bitflags used by final_scan_insn. */
113 #define SEEN_BB 1
114 #define SEEN_NOTE 2
115 #define SEEN_EMITTED 4
117 /* Last insn processed by final_scan_insn. */
118 static rtx debug_insn;
119 rtx current_output_insn;
121 /* Line number of last NOTE. */
122 static int last_linenum;
124 /* Last discriminator written to assembly. */
125 static int last_discriminator;
127 /* Discriminator of current block. */
128 static int discriminator;
130 /* Highest line number in current block. */
131 static int high_block_linenum;
133 /* Likewise for function. */
134 static int high_function_linenum;
136 /* Filename of last NOTE. */
137 static const char *last_filename;
139 /* Override filename and line number. */
140 static const char *override_filename;
141 static int override_linenum;
143 /* Whether to force emission of a line note before the next insn. */
144 static bool force_source_line = false;
146 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
148 /* Nonzero while outputting an `asm' with operands.
149 This means that inconsistencies are the user's fault, so don't die.
150 The precise value is the insn being output, to pass to error_for_asm. */
151 rtx this_is_asm_operands;
153 /* Number of operands of this insn, for an `asm' with operands. */
154 static unsigned int insn_noperands;
156 /* Compare optimization flag. */
158 static rtx last_ignored_compare = 0;
160 /* Assign a unique number to each insn that is output.
161 This can be used to generate unique local labels. */
163 static int insn_counter = 0;
165 #ifdef HAVE_cc0
166 /* This variable contains machine-dependent flags (defined in tm.h)
167 set and examined by output routines
168 that describe how to interpret the condition codes properly. */
170 CC_STATUS cc_status;
172 /* During output of an insn, this contains a copy of cc_status
173 from before the insn. */
175 CC_STATUS cc_prev_status;
176 #endif
178 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
180 static int block_depth;
182 /* Nonzero if have enabled APP processing of our assembler output. */
184 static int app_on;
186 /* If we are outputting an insn sequence, this contains the sequence rtx.
187 Zero otherwise. */
189 rtx final_sequence;
191 #ifdef ASSEMBLER_DIALECT
193 /* Number of the assembler dialect to use, starting at 0. */
194 static int dialect_number;
195 #endif
197 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
198 rtx current_insn_predicate;
200 /* True if printing into -fdump-final-insns= dump. */
201 bool final_insns_dump_p;
203 static int asm_insn_count (rtx);
204 static void profile_function (FILE *);
205 static void profile_after_prologue (FILE *);
206 static bool notice_source_line (rtx, bool *);
207 static rtx walk_alter_subreg (rtx *, bool *);
208 static void output_asm_name (void);
209 static void output_alternate_entry_point (FILE *, rtx);
210 static tree get_mem_expr_from_op (rtx, int *);
211 static void output_asm_operand_names (rtx *, int *, int);
212 #ifdef LEAF_REGISTERS
213 static void leaf_renumber_regs (rtx);
214 #endif
215 #ifdef HAVE_cc0
216 static int alter_cond (rtx);
217 #endif
218 #ifndef ADDR_VEC_ALIGN
219 static int final_addr_vec_align (rtx);
220 #endif
221 static int align_fuzz (rtx, rtx, int, unsigned);
223 /* Initialize data in final at the beginning of a compilation. */
225 void
226 init_final (const char *filename ATTRIBUTE_UNUSED)
228 app_on = 0;
229 final_sequence = 0;
231 #ifdef ASSEMBLER_DIALECT
232 dialect_number = ASSEMBLER_DIALECT;
233 #endif
236 /* Default target function prologue and epilogue assembler output.
238 If not overridden for epilogue code, then the function body itself
239 contains return instructions wherever needed. */
240 void
241 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
246 void
247 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
248 tree decl ATTRIBUTE_UNUSED,
249 bool new_is_cold ATTRIBUTE_UNUSED)
253 /* Default target hook that outputs nothing to a stream. */
254 void
255 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
259 /* Enable APP processing of subsequent output.
260 Used before the output from an `asm' statement. */
262 void
263 app_enable (void)
265 if (! app_on)
267 fputs (ASM_APP_ON, asm_out_file);
268 app_on = 1;
272 /* Disable APP processing of subsequent output.
273 Called from varasm.c before most kinds of output. */
275 void
276 app_disable (void)
278 if (app_on)
280 fputs (ASM_APP_OFF, asm_out_file);
281 app_on = 0;
285 /* Return the number of slots filled in the current
286 delayed branch sequence (we don't count the insn needing the
287 delay slot). Zero if not in a delayed branch sequence. */
289 #ifdef DELAY_SLOTS
291 dbr_sequence_length (void)
293 if (final_sequence != 0)
294 return XVECLEN (final_sequence, 0) - 1;
295 else
296 return 0;
298 #endif
300 /* The next two pages contain routines used to compute the length of an insn
301 and to shorten branches. */
303 /* Arrays for insn lengths, and addresses. The latter is referenced by
304 `insn_current_length'. */
306 static int *insn_lengths;
308 vec<int> insn_addresses_;
310 /* Max uid for which the above arrays are valid. */
311 static int insn_lengths_max_uid;
313 /* Address of insn being processed. Used by `insn_current_length'. */
314 int insn_current_address;
316 /* Address of insn being processed in previous iteration. */
317 int insn_last_address;
319 /* known invariant alignment of insn being processed. */
320 int insn_current_align;
322 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
323 gives the next following alignment insn that increases the known
324 alignment, or NULL_RTX if there is no such insn.
325 For any alignment obtained this way, we can again index uid_align with
326 its uid to obtain the next following align that in turn increases the
327 alignment, till we reach NULL_RTX; the sequence obtained this way
328 for each insn we'll call the alignment chain of this insn in the following
329 comments. */
331 struct label_alignment
333 short alignment;
334 short max_skip;
337 static rtx *uid_align;
338 static int *uid_shuid;
339 static struct label_alignment *label_align;
341 /* Indicate that branch shortening hasn't yet been done. */
343 void
344 init_insn_lengths (void)
346 if (uid_shuid)
348 free (uid_shuid);
349 uid_shuid = 0;
351 if (insn_lengths)
353 free (insn_lengths);
354 insn_lengths = 0;
355 insn_lengths_max_uid = 0;
357 if (HAVE_ATTR_length)
358 INSN_ADDRESSES_FREE ();
359 if (uid_align)
361 free (uid_align);
362 uid_align = 0;
366 /* Obtain the current length of an insn. If branch shortening has been done,
367 get its actual length. Otherwise, use FALLBACK_FN to calculate the
368 length. */
369 static inline int
370 get_attr_length_1 (rtx insn, int (*fallback_fn) (rtx))
372 rtx body;
373 int i;
374 int length = 0;
376 if (!HAVE_ATTR_length)
377 return 0;
379 if (insn_lengths_max_uid > INSN_UID (insn))
380 return insn_lengths[INSN_UID (insn)];
381 else
382 switch (GET_CODE (insn))
384 case NOTE:
385 case BARRIER:
386 case CODE_LABEL:
387 case DEBUG_INSN:
388 return 0;
390 case CALL_INSN:
391 length = fallback_fn (insn);
392 break;
394 case JUMP_INSN:
395 body = PATTERN (insn);
396 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
398 /* Alignment is machine-dependent and should be handled by
399 ADDR_VEC_ALIGN. */
401 else
402 length = fallback_fn (insn);
403 break;
405 case INSN:
406 body = PATTERN (insn);
407 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
408 return 0;
410 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
411 length = asm_insn_count (body) * fallback_fn (insn);
412 else if (GET_CODE (body) == SEQUENCE)
413 for (i = 0; i < XVECLEN (body, 0); i++)
414 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
415 else
416 length = fallback_fn (insn);
417 break;
419 default:
420 break;
423 #ifdef ADJUST_INSN_LENGTH
424 ADJUST_INSN_LENGTH (insn, length);
425 #endif
426 return length;
429 /* Obtain the current length of an insn. If branch shortening has been done,
430 get its actual length. Otherwise, get its maximum length. */
432 get_attr_length (rtx insn)
434 return get_attr_length_1 (insn, insn_default_length);
437 /* Obtain the current length of an insn. If branch shortening has been done,
438 get its actual length. Otherwise, get its minimum length. */
440 get_attr_min_length (rtx insn)
442 return get_attr_length_1 (insn, insn_min_length);
445 /* Code to handle alignment inside shorten_branches. */
447 /* Here is an explanation how the algorithm in align_fuzz can give
448 proper results:
450 Call a sequence of instructions beginning with alignment point X
451 and continuing until the next alignment point `block X'. When `X'
452 is used in an expression, it means the alignment value of the
453 alignment point.
455 Call the distance between the start of the first insn of block X, and
456 the end of the last insn of block X `IX', for the `inner size of X'.
457 This is clearly the sum of the instruction lengths.
459 Likewise with the next alignment-delimited block following X, which we
460 shall call block Y.
462 Call the distance between the start of the first insn of block X, and
463 the start of the first insn of block Y `OX', for the `outer size of X'.
465 The estimated padding is then OX - IX.
467 OX can be safely estimated as
469 if (X >= Y)
470 OX = round_up(IX, Y)
471 else
472 OX = round_up(IX, X) + Y - X
474 Clearly est(IX) >= real(IX), because that only depends on the
475 instruction lengths, and those being overestimated is a given.
477 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
478 we needn't worry about that when thinking about OX.
480 When X >= Y, the alignment provided by Y adds no uncertainty factor
481 for branch ranges starting before X, so we can just round what we have.
482 But when X < Y, we don't know anything about the, so to speak,
483 `middle bits', so we have to assume the worst when aligning up from an
484 address mod X to one mod Y, which is Y - X. */
486 #ifndef LABEL_ALIGN
487 #define LABEL_ALIGN(LABEL) align_labels_log
488 #endif
490 #ifndef LOOP_ALIGN
491 #define LOOP_ALIGN(LABEL) align_loops_log
492 #endif
494 #ifndef LABEL_ALIGN_AFTER_BARRIER
495 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
496 #endif
498 #ifndef JUMP_ALIGN
499 #define JUMP_ALIGN(LABEL) align_jumps_log
500 #endif
503 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
505 return 0;
509 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
511 return align_loops_max_skip;
515 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
517 return align_labels_max_skip;
521 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
523 return align_jumps_max_skip;
526 #ifndef ADDR_VEC_ALIGN
527 static int
528 final_addr_vec_align (rtx addr_vec)
530 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
532 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
533 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
534 return exact_log2 (align);
538 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
539 #endif
541 #ifndef INSN_LENGTH_ALIGNMENT
542 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
543 #endif
545 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
547 static int min_labelno, max_labelno;
549 #define LABEL_TO_ALIGNMENT(LABEL) \
550 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
552 #define LABEL_TO_MAX_SKIP(LABEL) \
553 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
555 /* For the benefit of port specific code do this also as a function. */
558 label_to_alignment (rtx label)
560 if (CODE_LABEL_NUMBER (label) <= max_labelno)
561 return LABEL_TO_ALIGNMENT (label);
562 return 0;
566 label_to_max_skip (rtx label)
568 if (CODE_LABEL_NUMBER (label) <= max_labelno)
569 return LABEL_TO_MAX_SKIP (label);
570 return 0;
573 /* The differences in addresses
574 between a branch and its target might grow or shrink depending on
575 the alignment the start insn of the range (the branch for a forward
576 branch or the label for a backward branch) starts out on; if these
577 differences are used naively, they can even oscillate infinitely.
578 We therefore want to compute a 'worst case' address difference that
579 is independent of the alignment the start insn of the range end
580 up on, and that is at least as large as the actual difference.
581 The function align_fuzz calculates the amount we have to add to the
582 naively computed difference, by traversing the part of the alignment
583 chain of the start insn of the range that is in front of the end insn
584 of the range, and considering for each alignment the maximum amount
585 that it might contribute to a size increase.
587 For casesi tables, we also want to know worst case minimum amounts of
588 address difference, in case a machine description wants to introduce
589 some common offset that is added to all offsets in a table.
590 For this purpose, align_fuzz with a growth argument of 0 computes the
591 appropriate adjustment. */
593 /* Compute the maximum delta by which the difference of the addresses of
594 START and END might grow / shrink due to a different address for start
595 which changes the size of alignment insns between START and END.
596 KNOWN_ALIGN_LOG is the alignment known for START.
597 GROWTH should be ~0 if the objective is to compute potential code size
598 increase, and 0 if the objective is to compute potential shrink.
599 The return value is undefined for any other value of GROWTH. */
601 static int
602 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
604 int uid = INSN_UID (start);
605 rtx align_label;
606 int known_align = 1 << known_align_log;
607 int end_shuid = INSN_SHUID (end);
608 int fuzz = 0;
610 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
612 int align_addr, new_align;
614 uid = INSN_UID (align_label);
615 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
616 if (uid_shuid[uid] > end_shuid)
617 break;
618 known_align_log = LABEL_TO_ALIGNMENT (align_label);
619 new_align = 1 << known_align_log;
620 if (new_align < known_align)
621 continue;
622 fuzz += (-align_addr ^ growth) & (new_align - known_align);
623 known_align = new_align;
625 return fuzz;
628 /* Compute a worst-case reference address of a branch so that it
629 can be safely used in the presence of aligned labels. Since the
630 size of the branch itself is unknown, the size of the branch is
631 not included in the range. I.e. for a forward branch, the reference
632 address is the end address of the branch as known from the previous
633 branch shortening pass, minus a value to account for possible size
634 increase due to alignment. For a backward branch, it is the start
635 address of the branch as known from the current pass, plus a value
636 to account for possible size increase due to alignment.
637 NB.: Therefore, the maximum offset allowed for backward branches needs
638 to exclude the branch size. */
641 insn_current_reference_address (rtx branch)
643 rtx dest, seq;
644 int seq_uid;
646 if (! INSN_ADDRESSES_SET_P ())
647 return 0;
649 seq = NEXT_INSN (PREV_INSN (branch));
650 seq_uid = INSN_UID (seq);
651 if (!JUMP_P (branch))
652 /* This can happen for example on the PA; the objective is to know the
653 offset to address something in front of the start of the function.
654 Thus, we can treat it like a backward branch.
655 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
656 any alignment we'd encounter, so we skip the call to align_fuzz. */
657 return insn_current_address;
658 dest = JUMP_LABEL (branch);
660 /* BRANCH has no proper alignment chain set, so use SEQ.
661 BRANCH also has no INSN_SHUID. */
662 if (INSN_SHUID (seq) < INSN_SHUID (dest))
664 /* Forward branch. */
665 return (insn_last_address + insn_lengths[seq_uid]
666 - align_fuzz (seq, dest, length_unit_log, ~0));
668 else
670 /* Backward branch. */
671 return (insn_current_address
672 + align_fuzz (dest, seq, length_unit_log, ~0));
676 /* Compute branch alignments based on frequency information in the
677 CFG. */
679 unsigned int
680 compute_alignments (void)
682 int log, max_skip, max_log;
683 basic_block bb;
684 int freq_max = 0;
685 int freq_threshold = 0;
687 if (label_align)
689 free (label_align);
690 label_align = 0;
693 max_labelno = max_label_num ();
694 min_labelno = get_first_label_num ();
695 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
697 /* If not optimizing or optimizing for size, don't assign any alignments. */
698 if (! optimize || optimize_function_for_size_p (cfun))
699 return 0;
701 if (dump_file)
703 dump_reg_info (dump_file);
704 dump_flow_info (dump_file, TDF_DETAILS);
705 flow_loops_dump (dump_file, NULL, 1);
707 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
708 FOR_EACH_BB (bb)
709 if (bb->frequency > freq_max)
710 freq_max = bb->frequency;
711 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
713 if (dump_file)
714 fprintf(dump_file, "freq_max: %i\n",freq_max);
715 FOR_EACH_BB (bb)
717 rtx label = BB_HEAD (bb);
718 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
719 edge e;
720 edge_iterator ei;
722 if (!LABEL_P (label)
723 || optimize_bb_for_size_p (bb))
725 if (dump_file)
726 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
727 bb->index, bb->frequency, bb->loop_father->num,
728 bb_loop_depth (bb));
729 continue;
731 max_log = LABEL_ALIGN (label);
732 max_skip = targetm.asm_out.label_align_max_skip (label);
734 FOR_EACH_EDGE (e, ei, bb->preds)
736 if (e->flags & EDGE_FALLTHRU)
737 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
738 else
739 branch_frequency += EDGE_FREQUENCY (e);
741 if (dump_file)
743 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
744 bb->index, bb->frequency, bb->loop_father->num,
745 bb_loop_depth (bb),
746 fallthru_frequency, branch_frequency);
747 if (!bb->loop_father->inner && bb->loop_father->num)
748 fprintf (dump_file, " inner_loop");
749 if (bb->loop_father->header == bb)
750 fprintf (dump_file, " loop_header");
751 fprintf (dump_file, "\n");
754 /* There are two purposes to align block with no fallthru incoming edge:
755 1) to avoid fetch stalls when branch destination is near cache boundary
756 2) to improve cache efficiency in case the previous block is not executed
757 (so it does not need to be in the cache).
759 We to catch first case, we align frequently executed blocks.
760 To catch the second, we align blocks that are executed more frequently
761 than the predecessor and the predecessor is likely to not be executed
762 when function is called. */
764 if (!has_fallthru
765 && (branch_frequency > freq_threshold
766 || (bb->frequency > bb->prev_bb->frequency * 10
767 && (bb->prev_bb->frequency
768 <= ENTRY_BLOCK_PTR->frequency / 2))))
770 log = JUMP_ALIGN (label);
771 if (dump_file)
772 fprintf(dump_file, " jump alignment added.\n");
773 if (max_log < log)
775 max_log = log;
776 max_skip = targetm.asm_out.jump_align_max_skip (label);
779 /* In case block is frequent and reached mostly by non-fallthru edge,
780 align it. It is most likely a first block of loop. */
781 if (has_fallthru
782 && optimize_bb_for_speed_p (bb)
783 && branch_frequency + fallthru_frequency > freq_threshold
784 && (branch_frequency
785 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
787 log = LOOP_ALIGN (label);
788 if (dump_file)
789 fprintf(dump_file, " internal loop alignment added.\n");
790 if (max_log < log)
792 max_log = log;
793 max_skip = targetm.asm_out.loop_align_max_skip (label);
796 LABEL_TO_ALIGNMENT (label) = max_log;
797 LABEL_TO_MAX_SKIP (label) = max_skip;
800 loop_optimizer_finalize ();
801 free_dominance_info (CDI_DOMINATORS);
802 return 0;
805 struct rtl_opt_pass pass_compute_alignments =
808 RTL_PASS,
809 "alignments", /* name */
810 OPTGROUP_NONE, /* optinfo_flags */
811 NULL, /* gate */
812 compute_alignments, /* execute */
813 NULL, /* sub */
814 NULL, /* next */
815 0, /* static_pass_number */
816 TV_NONE, /* tv_id */
817 0, /* properties_required */
818 0, /* properties_provided */
819 0, /* properties_destroyed */
820 0, /* todo_flags_start */
821 TODO_verify_rtl_sharing
822 | TODO_ggc_collect /* todo_flags_finish */
827 /* Make a pass over all insns and compute their actual lengths by shortening
828 any branches of variable length if possible. */
830 /* shorten_branches might be called multiple times: for example, the SH
831 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
832 In order to do this, it needs proper length information, which it obtains
833 by calling shorten_branches. This cannot be collapsed with
834 shorten_branches itself into a single pass unless we also want to integrate
835 reorg.c, since the branch splitting exposes new instructions with delay
836 slots. */
838 void
839 shorten_branches (rtx first)
841 rtx insn;
842 int max_uid;
843 int i;
844 int max_log;
845 int max_skip;
846 #define MAX_CODE_ALIGN 16
847 rtx seq;
848 int something_changed = 1;
849 char *varying_length;
850 rtx body;
851 int uid;
852 rtx align_tab[MAX_CODE_ALIGN];
854 /* Compute maximum UID and allocate label_align / uid_shuid. */
855 max_uid = get_max_uid ();
857 /* Free uid_shuid before reallocating it. */
858 free (uid_shuid);
860 uid_shuid = XNEWVEC (int, max_uid);
862 if (max_labelno != max_label_num ())
864 int old = max_labelno;
865 int n_labels;
866 int n_old_labels;
868 max_labelno = max_label_num ();
870 n_labels = max_labelno - min_labelno + 1;
871 n_old_labels = old - min_labelno + 1;
873 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
875 /* Range of labels grows monotonically in the function. Failing here
876 means that the initialization of array got lost. */
877 gcc_assert (n_old_labels <= n_labels);
879 memset (label_align + n_old_labels, 0,
880 (n_labels - n_old_labels) * sizeof (struct label_alignment));
883 /* Initialize label_align and set up uid_shuid to be strictly
884 monotonically rising with insn order. */
885 /* We use max_log here to keep track of the maximum alignment we want to
886 impose on the next CODE_LABEL (or the current one if we are processing
887 the CODE_LABEL itself). */
889 max_log = 0;
890 max_skip = 0;
892 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
894 int log;
896 INSN_SHUID (insn) = i++;
897 if (INSN_P (insn))
898 continue;
900 if (LABEL_P (insn))
902 rtx next;
903 bool next_is_jumptable;
905 /* Merge in alignments computed by compute_alignments. */
906 log = LABEL_TO_ALIGNMENT (insn);
907 if (max_log < log)
909 max_log = log;
910 max_skip = LABEL_TO_MAX_SKIP (insn);
913 next = next_nonnote_insn (insn);
914 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
915 if (!next_is_jumptable)
917 log = LABEL_ALIGN (insn);
918 if (max_log < log)
920 max_log = log;
921 max_skip = targetm.asm_out.label_align_max_skip (insn);
924 /* ADDR_VECs only take room if read-only data goes into the text
925 section. */
926 if ((JUMP_TABLES_IN_TEXT_SECTION
927 || readonly_data_section == text_section)
928 && next_is_jumptable)
930 log = ADDR_VEC_ALIGN (next);
931 if (max_log < log)
933 max_log = log;
934 max_skip = targetm.asm_out.label_align_max_skip (insn);
937 LABEL_TO_ALIGNMENT (insn) = max_log;
938 LABEL_TO_MAX_SKIP (insn) = max_skip;
939 max_log = 0;
940 max_skip = 0;
942 else if (BARRIER_P (insn))
944 rtx label;
946 for (label = insn; label && ! INSN_P (label);
947 label = NEXT_INSN (label))
948 if (LABEL_P (label))
950 log = LABEL_ALIGN_AFTER_BARRIER (insn);
951 if (max_log < log)
953 max_log = log;
954 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
956 break;
960 if (!HAVE_ATTR_length)
961 return;
963 /* Allocate the rest of the arrays. */
964 insn_lengths = XNEWVEC (int, max_uid);
965 insn_lengths_max_uid = max_uid;
966 /* Syntax errors can lead to labels being outside of the main insn stream.
967 Initialize insn_addresses, so that we get reproducible results. */
968 INSN_ADDRESSES_ALLOC (max_uid);
970 varying_length = XCNEWVEC (char, max_uid);
972 /* Initialize uid_align. We scan instructions
973 from end to start, and keep in align_tab[n] the last seen insn
974 that does an alignment of at least n+1, i.e. the successor
975 in the alignment chain for an insn that does / has a known
976 alignment of n. */
977 uid_align = XCNEWVEC (rtx, max_uid);
979 for (i = MAX_CODE_ALIGN; --i >= 0;)
980 align_tab[i] = NULL_RTX;
981 seq = get_last_insn ();
982 for (; seq; seq = PREV_INSN (seq))
984 int uid = INSN_UID (seq);
985 int log;
986 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
987 uid_align[uid] = align_tab[0];
988 if (log)
990 /* Found an alignment label. */
991 uid_align[uid] = align_tab[log];
992 for (i = log - 1; i >= 0; i--)
993 align_tab[i] = seq;
997 /* When optimizing, we start assuming minimum length, and keep increasing
998 lengths as we find the need for this, till nothing changes.
999 When not optimizing, we start assuming maximum lengths, and
1000 do a single pass to update the lengths. */
1001 bool increasing = optimize != 0;
1003 #ifdef CASE_VECTOR_SHORTEN_MODE
1004 if (optimize)
1006 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1007 label fields. */
1009 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1010 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1011 int rel;
1013 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1015 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1016 int len, i, min, max, insn_shuid;
1017 int min_align;
1018 addr_diff_vec_flags flags;
1020 if (!JUMP_P (insn)
1021 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1022 continue;
1023 pat = PATTERN (insn);
1024 len = XVECLEN (pat, 1);
1025 gcc_assert (len > 0);
1026 min_align = MAX_CODE_ALIGN;
1027 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1029 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1030 int shuid = INSN_SHUID (lab);
1031 if (shuid < min)
1033 min = shuid;
1034 min_lab = lab;
1036 if (shuid > max)
1038 max = shuid;
1039 max_lab = lab;
1041 if (min_align > LABEL_TO_ALIGNMENT (lab))
1042 min_align = LABEL_TO_ALIGNMENT (lab);
1044 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1045 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1046 insn_shuid = INSN_SHUID (insn);
1047 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1048 memset (&flags, 0, sizeof (flags));
1049 flags.min_align = min_align;
1050 flags.base_after_vec = rel > insn_shuid;
1051 flags.min_after_vec = min > insn_shuid;
1052 flags.max_after_vec = max > insn_shuid;
1053 flags.min_after_base = min > rel;
1054 flags.max_after_base = max > rel;
1055 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1057 if (increasing)
1058 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1061 #endif /* CASE_VECTOR_SHORTEN_MODE */
1063 /* Compute initial lengths, addresses, and varying flags for each insn. */
1064 int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
1066 for (insn_current_address = 0, insn = first;
1067 insn != 0;
1068 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1070 uid = INSN_UID (insn);
1072 insn_lengths[uid] = 0;
1074 if (LABEL_P (insn))
1076 int log = LABEL_TO_ALIGNMENT (insn);
1077 if (log)
1079 int align = 1 << log;
1080 int new_address = (insn_current_address + align - 1) & -align;
1081 insn_lengths[uid] = new_address - insn_current_address;
1085 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1087 if (NOTE_P (insn) || BARRIER_P (insn)
1088 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1089 continue;
1090 if (INSN_DELETED_P (insn))
1091 continue;
1093 body = PATTERN (insn);
1094 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1096 /* This only takes room if read-only data goes into the text
1097 section. */
1098 if (JUMP_TABLES_IN_TEXT_SECTION
1099 || readonly_data_section == text_section)
1100 insn_lengths[uid] = (XVECLEN (body,
1101 GET_CODE (body) == ADDR_DIFF_VEC)
1102 * GET_MODE_SIZE (GET_MODE (body)));
1103 /* Alignment is handled by ADDR_VEC_ALIGN. */
1105 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1106 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1107 else if (GET_CODE (body) == SEQUENCE)
1109 int i;
1110 int const_delay_slots;
1111 #ifdef DELAY_SLOTS
1112 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1113 #else
1114 const_delay_slots = 0;
1115 #endif
1116 int (*inner_length_fun) (rtx)
1117 = const_delay_slots ? length_fun : insn_default_length;
1118 /* Inside a delay slot sequence, we do not do any branch shortening
1119 if the shortening could change the number of delay slots
1120 of the branch. */
1121 for (i = 0; i < XVECLEN (body, 0); i++)
1123 rtx inner_insn = XVECEXP (body, 0, i);
1124 int inner_uid = INSN_UID (inner_insn);
1125 int inner_length;
1127 if (GET_CODE (body) == ASM_INPUT
1128 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1129 inner_length = (asm_insn_count (PATTERN (inner_insn))
1130 * insn_default_length (inner_insn));
1131 else
1132 inner_length = inner_length_fun (inner_insn);
1134 insn_lengths[inner_uid] = inner_length;
1135 if (const_delay_slots)
1137 if ((varying_length[inner_uid]
1138 = insn_variable_length_p (inner_insn)) != 0)
1139 varying_length[uid] = 1;
1140 INSN_ADDRESSES (inner_uid) = (insn_current_address
1141 + insn_lengths[uid]);
1143 else
1144 varying_length[inner_uid] = 0;
1145 insn_lengths[uid] += inner_length;
1148 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1150 insn_lengths[uid] = length_fun (insn);
1151 varying_length[uid] = insn_variable_length_p (insn);
1154 /* If needed, do any adjustment. */
1155 #ifdef ADJUST_INSN_LENGTH
1156 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1157 if (insn_lengths[uid] < 0)
1158 fatal_insn ("negative insn length", insn);
1159 #endif
1162 /* Now loop over all the insns finding varying length insns. For each,
1163 get the current insn length. If it has changed, reflect the change.
1164 When nothing changes for a full pass, we are done. */
1166 while (something_changed)
1168 something_changed = 0;
1169 insn_current_align = MAX_CODE_ALIGN - 1;
1170 for (insn_current_address = 0, insn = first;
1171 insn != 0;
1172 insn = NEXT_INSN (insn))
1174 int new_length;
1175 #ifdef ADJUST_INSN_LENGTH
1176 int tmp_length;
1177 #endif
1178 int length_align;
1180 uid = INSN_UID (insn);
1182 if (LABEL_P (insn))
1184 int log = LABEL_TO_ALIGNMENT (insn);
1186 #ifdef CASE_VECTOR_SHORTEN_MODE
1187 /* If the mode of a following jump table was changed, we
1188 may need to update the alignment of this label. */
1189 rtx next;
1190 bool next_is_jumptable;
1192 next = next_nonnote_insn (insn);
1193 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1194 if ((JUMP_TABLES_IN_TEXT_SECTION
1195 || readonly_data_section == text_section)
1196 && next_is_jumptable)
1198 int newlog = ADDR_VEC_ALIGN (next);
1199 if (newlog != log)
1201 log = newlog;
1202 LABEL_TO_ALIGNMENT (insn) = log;
1203 something_changed = 1;
1206 #endif
1208 if (log > insn_current_align)
1210 int align = 1 << log;
1211 int new_address= (insn_current_address + align - 1) & -align;
1212 insn_lengths[uid] = new_address - insn_current_address;
1213 insn_current_align = log;
1214 insn_current_address = new_address;
1216 else
1217 insn_lengths[uid] = 0;
1218 INSN_ADDRESSES (uid) = insn_current_address;
1219 continue;
1222 length_align = INSN_LENGTH_ALIGNMENT (insn);
1223 if (length_align < insn_current_align)
1224 insn_current_align = length_align;
1226 insn_last_address = INSN_ADDRESSES (uid);
1227 INSN_ADDRESSES (uid) = insn_current_address;
1229 #ifdef CASE_VECTOR_SHORTEN_MODE
1230 if (optimize && JUMP_P (insn)
1231 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1233 rtx body = PATTERN (insn);
1234 int old_length = insn_lengths[uid];
1235 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1236 rtx min_lab = XEXP (XEXP (body, 2), 0);
1237 rtx max_lab = XEXP (XEXP (body, 3), 0);
1238 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1239 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1240 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1241 rtx prev;
1242 int rel_align = 0;
1243 addr_diff_vec_flags flags;
1244 enum machine_mode vec_mode;
1246 /* Avoid automatic aggregate initialization. */
1247 flags = ADDR_DIFF_VEC_FLAGS (body);
1249 /* Try to find a known alignment for rel_lab. */
1250 for (prev = rel_lab;
1251 prev
1252 && ! insn_lengths[INSN_UID (prev)]
1253 && ! (varying_length[INSN_UID (prev)] & 1);
1254 prev = PREV_INSN (prev))
1255 if (varying_length[INSN_UID (prev)] & 2)
1257 rel_align = LABEL_TO_ALIGNMENT (prev);
1258 break;
1261 /* See the comment on addr_diff_vec_flags in rtl.h for the
1262 meaning of the flags values. base: REL_LAB vec: INSN */
1263 /* Anything after INSN has still addresses from the last
1264 pass; adjust these so that they reflect our current
1265 estimate for this pass. */
1266 if (flags.base_after_vec)
1267 rel_addr += insn_current_address - insn_last_address;
1268 if (flags.min_after_vec)
1269 min_addr += insn_current_address - insn_last_address;
1270 if (flags.max_after_vec)
1271 max_addr += insn_current_address - insn_last_address;
1272 /* We want to know the worst case, i.e. lowest possible value
1273 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1274 its offset is positive, and we have to be wary of code shrink;
1275 otherwise, it is negative, and we have to be vary of code
1276 size increase. */
1277 if (flags.min_after_base)
1279 /* If INSN is between REL_LAB and MIN_LAB, the size
1280 changes we are about to make can change the alignment
1281 within the observed offset, therefore we have to break
1282 it up into two parts that are independent. */
1283 if (! flags.base_after_vec && flags.min_after_vec)
1285 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1286 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1288 else
1289 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1291 else
1293 if (flags.base_after_vec && ! flags.min_after_vec)
1295 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1296 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1298 else
1299 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1301 /* Likewise, determine the highest lowest possible value
1302 for the offset of MAX_LAB. */
1303 if (flags.max_after_base)
1305 if (! flags.base_after_vec && flags.max_after_vec)
1307 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1308 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1310 else
1311 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1313 else
1315 if (flags.base_after_vec && ! flags.max_after_vec)
1317 max_addr += align_fuzz (max_lab, insn, 0, 0);
1318 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1320 else
1321 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1323 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1324 max_addr - rel_addr, body);
1325 if (!increasing
1326 || (GET_MODE_SIZE (vec_mode)
1327 >= GET_MODE_SIZE (GET_MODE (body))))
1328 PUT_MODE (body, vec_mode);
1329 if (JUMP_TABLES_IN_TEXT_SECTION
1330 || readonly_data_section == text_section)
1332 insn_lengths[uid]
1333 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1334 insn_current_address += insn_lengths[uid];
1335 if (insn_lengths[uid] != old_length)
1336 something_changed = 1;
1339 continue;
1341 #endif /* CASE_VECTOR_SHORTEN_MODE */
1343 if (! (varying_length[uid]))
1345 if (NONJUMP_INSN_P (insn)
1346 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1348 int i;
1350 body = PATTERN (insn);
1351 for (i = 0; i < XVECLEN (body, 0); i++)
1353 rtx inner_insn = XVECEXP (body, 0, i);
1354 int inner_uid = INSN_UID (inner_insn);
1356 INSN_ADDRESSES (inner_uid) = insn_current_address;
1358 insn_current_address += insn_lengths[inner_uid];
1361 else
1362 insn_current_address += insn_lengths[uid];
1364 continue;
1367 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1369 int i;
1371 body = PATTERN (insn);
1372 new_length = 0;
1373 for (i = 0; i < XVECLEN (body, 0); i++)
1375 rtx inner_insn = XVECEXP (body, 0, i);
1376 int inner_uid = INSN_UID (inner_insn);
1377 int inner_length;
1379 INSN_ADDRESSES (inner_uid) = insn_current_address;
1381 /* insn_current_length returns 0 for insns with a
1382 non-varying length. */
1383 if (! varying_length[inner_uid])
1384 inner_length = insn_lengths[inner_uid];
1385 else
1386 inner_length = insn_current_length (inner_insn);
1388 if (inner_length != insn_lengths[inner_uid])
1390 if (!increasing || inner_length > insn_lengths[inner_uid])
1392 insn_lengths[inner_uid] = inner_length;
1393 something_changed = 1;
1395 else
1396 inner_length = insn_lengths[inner_uid];
1398 insn_current_address += inner_length;
1399 new_length += inner_length;
1402 else
1404 new_length = insn_current_length (insn);
1405 insn_current_address += new_length;
1408 #ifdef ADJUST_INSN_LENGTH
1409 /* If needed, do any adjustment. */
1410 tmp_length = new_length;
1411 ADJUST_INSN_LENGTH (insn, new_length);
1412 insn_current_address += (new_length - tmp_length);
1413 #endif
1415 if (new_length != insn_lengths[uid]
1416 && (!increasing || new_length > insn_lengths[uid]))
1418 insn_lengths[uid] = new_length;
1419 something_changed = 1;
1421 else
1422 insn_current_address += insn_lengths[uid] - new_length;
1424 /* For a non-optimizing compile, do only a single pass. */
1425 if (!increasing)
1426 break;
1429 free (varying_length);
1432 /* Given the body of an INSN known to be generated by an ASM statement, return
1433 the number of machine instructions likely to be generated for this insn.
1434 This is used to compute its length. */
1436 static int
1437 asm_insn_count (rtx body)
1439 const char *templ;
1441 if (GET_CODE (body) == ASM_INPUT)
1442 templ = XSTR (body, 0);
1443 else
1444 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1446 return asm_str_count (templ);
1449 /* Return the number of machine instructions likely to be generated for the
1450 inline-asm template. */
1452 asm_str_count (const char *templ)
1454 int count = 1;
1456 if (!*templ)
1457 return 0;
1459 for (; *templ; templ++)
1460 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1461 || *templ == '\n')
1462 count++;
1464 return count;
1467 /* ??? This is probably the wrong place for these. */
1468 /* Structure recording the mapping from source file and directory
1469 names at compile time to those to be embedded in debug
1470 information. */
1471 typedef struct debug_prefix_map
1473 const char *old_prefix;
1474 const char *new_prefix;
1475 size_t old_len;
1476 size_t new_len;
1477 struct debug_prefix_map *next;
1478 } debug_prefix_map;
1480 /* Linked list of such structures. */
1481 debug_prefix_map *debug_prefix_maps;
1484 /* Record a debug file prefix mapping. ARG is the argument to
1485 -fdebug-prefix-map and must be of the form OLD=NEW. */
1487 void
1488 add_debug_prefix_map (const char *arg)
1490 debug_prefix_map *map;
1491 const char *p;
1493 p = strchr (arg, '=');
1494 if (!p)
1496 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1497 return;
1499 map = XNEW (debug_prefix_map);
1500 map->old_prefix = xstrndup (arg, p - arg);
1501 map->old_len = p - arg;
1502 p++;
1503 map->new_prefix = xstrdup (p);
1504 map->new_len = strlen (p);
1505 map->next = debug_prefix_maps;
1506 debug_prefix_maps = map;
1509 /* Perform user-specified mapping of debug filename prefixes. Return
1510 the new name corresponding to FILENAME. */
1512 const char *
1513 remap_debug_filename (const char *filename)
1515 debug_prefix_map *map;
1516 char *s;
1517 const char *name;
1518 size_t name_len;
1520 for (map = debug_prefix_maps; map; map = map->next)
1521 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1522 break;
1523 if (!map)
1524 return filename;
1525 name = filename + map->old_len;
1526 name_len = strlen (name) + 1;
1527 s = (char *) alloca (name_len + map->new_len);
1528 memcpy (s, map->new_prefix, map->new_len);
1529 memcpy (s + map->new_len, name, name_len);
1530 return ggc_strdup (s);
1533 /* Return true if DWARF2 debug info can be emitted for DECL. */
1535 static bool
1536 dwarf2_debug_info_emitted_p (tree decl)
1538 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1539 return false;
1541 if (DECL_IGNORED_P (decl))
1542 return false;
1544 return true;
1547 /* Return scope resulting from combination of S1 and S2. */
1548 static tree
1549 choose_inner_scope (tree s1, tree s2)
1551 if (!s1)
1552 return s2;
1553 if (!s2)
1554 return s1;
1555 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1556 return s1;
1557 return s2;
1560 /* Emit lexical block notes needed to change scope from S1 to S2. */
1562 static void
1563 change_scope (rtx orig_insn, tree s1, tree s2)
1565 rtx insn = orig_insn;
1566 tree com = NULL_TREE;
1567 tree ts1 = s1, ts2 = s2;
1568 tree s;
1570 while (ts1 != ts2)
1572 gcc_assert (ts1 && ts2);
1573 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1574 ts1 = BLOCK_SUPERCONTEXT (ts1);
1575 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1576 ts2 = BLOCK_SUPERCONTEXT (ts2);
1577 else
1579 ts1 = BLOCK_SUPERCONTEXT (ts1);
1580 ts2 = BLOCK_SUPERCONTEXT (ts2);
1583 com = ts1;
1585 /* Close scopes. */
1586 s = s1;
1587 while (s != com)
1589 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1590 NOTE_BLOCK (note) = s;
1591 s = BLOCK_SUPERCONTEXT (s);
1594 /* Open scopes. */
1595 s = s2;
1596 while (s != com)
1598 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1599 NOTE_BLOCK (insn) = s;
1600 s = BLOCK_SUPERCONTEXT (s);
1604 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1605 on the scope tree and the newly reordered instructions. */
1607 static void
1608 reemit_insn_block_notes (void)
1610 tree cur_block = DECL_INITIAL (cfun->decl);
1611 rtx insn, note;
1613 insn = get_insns ();
1614 if (!active_insn_p (insn))
1615 insn = next_active_insn (insn);
1616 for (; insn; insn = next_active_insn (insn))
1618 tree this_block;
1620 /* Avoid putting scope notes between jump table and its label. */
1621 if (JUMP_TABLE_DATA_P (insn))
1622 continue;
1624 this_block = insn_scope (insn);
1625 /* For sequences compute scope resulting from merging all scopes
1626 of instructions nested inside. */
1627 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1629 int i;
1630 rtx body = PATTERN (insn);
1632 this_block = NULL;
1633 for (i = 0; i < XVECLEN (body, 0); i++)
1634 this_block = choose_inner_scope (this_block,
1635 insn_scope (XVECEXP (body, 0, i)));
1637 if (! this_block)
1639 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1640 continue;
1641 else
1642 this_block = DECL_INITIAL (cfun->decl);
1645 if (this_block != cur_block)
1647 change_scope (insn, cur_block, this_block);
1648 cur_block = this_block;
1652 /* change_scope emits before the insn, not after. */
1653 note = emit_note (NOTE_INSN_DELETED);
1654 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1655 delete_insn (note);
1657 reorder_blocks ();
1660 /* Output assembler code for the start of a function,
1661 and initialize some of the variables in this file
1662 for the new function. The label for the function and associated
1663 assembler pseudo-ops have already been output in `assemble_start_function'.
1665 FIRST is the first insn of the rtl for the function being compiled.
1666 FILE is the file to write assembler code to.
1667 OPTIMIZE_P is nonzero if we should eliminate redundant
1668 test and compare insns. */
1670 void
1671 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1672 int optimize_p ATTRIBUTE_UNUSED)
1674 block_depth = 0;
1676 this_is_asm_operands = 0;
1678 last_filename = LOCATION_FILE (prologue_location);
1679 last_linenum = LOCATION_LINE (prologue_location);
1680 last_discriminator = discriminator = 0;
1682 high_block_linenum = high_function_linenum = last_linenum;
1684 if (!DECL_IGNORED_P (current_function_decl))
1685 debug_hooks->begin_prologue (last_linenum, last_filename);
1687 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1688 dwarf2out_begin_prologue (0, NULL);
1690 #ifdef LEAF_REG_REMAP
1691 if (crtl->uses_only_leaf_regs)
1692 leaf_renumber_regs (first);
1693 #endif
1695 /* The Sun386i and perhaps other machines don't work right
1696 if the profiling code comes after the prologue. */
1697 if (targetm.profile_before_prologue () && crtl->profile)
1698 profile_function (file);
1700 /* If debugging, assign block numbers to all of the blocks in this
1701 function. */
1702 if (write_symbols)
1704 reemit_insn_block_notes ();
1705 number_blocks (current_function_decl);
1706 /* We never actually put out begin/end notes for the top-level
1707 block in the function. But, conceptually, that block is
1708 always needed. */
1709 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1712 if (warn_frame_larger_than
1713 && get_frame_size () > frame_larger_than_size)
1715 /* Issue a warning */
1716 warning (OPT_Wframe_larger_than_,
1717 "the frame size of %wd bytes is larger than %wd bytes",
1718 get_frame_size (), frame_larger_than_size);
1721 /* First output the function prologue: code to set up the stack frame. */
1722 targetm.asm_out.function_prologue (file, get_frame_size ());
1724 /* If the machine represents the prologue as RTL, the profiling code must
1725 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1726 #ifdef HAVE_prologue
1727 if (! HAVE_prologue)
1728 #endif
1729 profile_after_prologue (file);
1732 static void
1733 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1735 if (!targetm.profile_before_prologue () && crtl->profile)
1736 profile_function (file);
1739 static void
1740 profile_function (FILE *file ATTRIBUTE_UNUSED)
1742 #ifndef NO_PROFILE_COUNTERS
1743 # define NO_PROFILE_COUNTERS 0
1744 #endif
1745 #ifdef ASM_OUTPUT_REG_PUSH
1746 rtx sval = NULL, chain = NULL;
1748 if (cfun->returns_struct)
1749 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1750 true);
1751 if (cfun->static_chain_decl)
1752 chain = targetm.calls.static_chain (current_function_decl, true);
1753 #endif /* ASM_OUTPUT_REG_PUSH */
1755 if (! NO_PROFILE_COUNTERS)
1757 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1758 switch_to_section (data_section);
1759 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1760 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1761 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1764 switch_to_section (current_function_section ());
1766 #ifdef ASM_OUTPUT_REG_PUSH
1767 if (sval && REG_P (sval))
1768 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1769 if (chain && REG_P (chain))
1770 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1771 #endif
1773 FUNCTION_PROFILER (file, current_function_funcdef_no);
1775 #ifdef ASM_OUTPUT_REG_PUSH
1776 if (chain && REG_P (chain))
1777 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1778 if (sval && REG_P (sval))
1779 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1780 #endif
1783 /* Output assembler code for the end of a function.
1784 For clarity, args are same as those of `final_start_function'
1785 even though not all of them are needed. */
1787 void
1788 final_end_function (void)
1790 app_disable ();
1792 if (!DECL_IGNORED_P (current_function_decl))
1793 debug_hooks->end_function (high_function_linenum);
1795 /* Finally, output the function epilogue:
1796 code to restore the stack frame and return to the caller. */
1797 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1799 /* And debug output. */
1800 if (!DECL_IGNORED_P (current_function_decl))
1801 debug_hooks->end_epilogue (last_linenum, last_filename);
1803 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1804 && dwarf2out_do_frame ())
1805 dwarf2out_end_epilogue (last_linenum, last_filename);
1809 /* Dumper helper for basic block information. FILE is the assembly
1810 output file, and INSN is the instruction being emitted. */
1812 static void
1813 dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
1814 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1816 basic_block bb;
1818 if (!flag_debug_asm)
1819 return;
1821 if (INSN_UID (insn) < bb_map_size
1822 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1824 edge e;
1825 edge_iterator ei;
1827 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1828 if (bb->frequency)
1829 fprintf (file, " freq:%d", bb->frequency);
1830 if (bb->count)
1831 fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
1832 bb->count);
1833 fprintf (file, " seq:%d", (*bb_seqn)++);
1834 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1835 FOR_EACH_EDGE (e, ei, bb->preds)
1837 dump_edge_info (file, e, TDF_DETAILS, 0);
1839 fprintf (file, "\n");
1841 if (INSN_UID (insn) < bb_map_size
1842 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1844 edge e;
1845 edge_iterator ei;
1847 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1848 FOR_EACH_EDGE (e, ei, bb->succs)
1850 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1852 fprintf (file, "\n");
1856 /* Output assembler code for some insns: all or part of a function.
1857 For description of args, see `final_start_function', above. */
1859 void
1860 final (rtx first, FILE *file, int optimize_p)
1862 rtx insn, next;
1863 int seen = 0;
1865 /* Used for -dA dump. */
1866 basic_block *start_to_bb = NULL;
1867 basic_block *end_to_bb = NULL;
1868 int bb_map_size = 0;
1869 int bb_seqn = 0;
1871 last_ignored_compare = 0;
1873 #ifdef HAVE_cc0
1874 for (insn = first; insn; insn = NEXT_INSN (insn))
1876 /* If CC tracking across branches is enabled, record the insn which
1877 jumps to each branch only reached from one place. */
1878 if (optimize_p && JUMP_P (insn))
1880 rtx lab = JUMP_LABEL (insn);
1881 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
1883 LABEL_REFS (lab) = insn;
1887 #endif
1889 init_recog ();
1891 CC_STATUS_INIT;
1893 if (flag_debug_asm)
1895 basic_block bb;
1897 bb_map_size = get_max_uid () + 1;
1898 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1899 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1901 /* There is no cfg for a thunk. */
1902 if (!cfun->is_thunk)
1903 FOR_EACH_BB_REVERSE (bb)
1905 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1906 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1910 /* Output the insns. */
1911 for (insn = first; insn;)
1913 if (HAVE_ATTR_length)
1915 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1917 /* This can be triggered by bugs elsewhere in the compiler if
1918 new insns are created after init_insn_lengths is called. */
1919 gcc_assert (NOTE_P (insn));
1920 insn_current_address = -1;
1922 else
1923 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1926 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1927 bb_map_size, &bb_seqn);
1928 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1931 if (flag_debug_asm)
1933 free (start_to_bb);
1934 free (end_to_bb);
1937 /* Remove CFI notes, to avoid compare-debug failures. */
1938 for (insn = first; insn; insn = next)
1940 next = NEXT_INSN (insn);
1941 if (NOTE_P (insn)
1942 && (NOTE_KIND (insn) == NOTE_INSN_CFI
1943 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
1944 delete_insn (insn);
1948 const char *
1949 get_insn_template (int code, rtx insn)
1951 switch (insn_data[code].output_format)
1953 case INSN_OUTPUT_FORMAT_SINGLE:
1954 return insn_data[code].output.single;
1955 case INSN_OUTPUT_FORMAT_MULTI:
1956 return insn_data[code].output.multi[which_alternative];
1957 case INSN_OUTPUT_FORMAT_FUNCTION:
1958 gcc_assert (insn);
1959 return (*insn_data[code].output.function) (recog_data.operand, insn);
1961 default:
1962 gcc_unreachable ();
1966 /* Emit the appropriate declaration for an alternate-entry-point
1967 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1968 LABEL_KIND != LABEL_NORMAL.
1970 The case fall-through in this function is intentional. */
1971 static void
1972 output_alternate_entry_point (FILE *file, rtx insn)
1974 const char *name = LABEL_NAME (insn);
1976 switch (LABEL_KIND (insn))
1978 case LABEL_WEAK_ENTRY:
1979 #ifdef ASM_WEAKEN_LABEL
1980 ASM_WEAKEN_LABEL (file, name);
1981 #endif
1982 case LABEL_GLOBAL_ENTRY:
1983 targetm.asm_out.globalize_label (file, name);
1984 case LABEL_STATIC_ENTRY:
1985 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1986 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1987 #endif
1988 ASM_OUTPUT_LABEL (file, name);
1989 break;
1991 case LABEL_NORMAL:
1992 default:
1993 gcc_unreachable ();
1997 /* Given a CALL_INSN, find and return the nested CALL. */
1998 static rtx
1999 call_from_call_insn (rtx insn)
2001 rtx x;
2002 gcc_assert (CALL_P (insn));
2003 x = PATTERN (insn);
2005 while (GET_CODE (x) != CALL)
2007 switch (GET_CODE (x))
2009 default:
2010 gcc_unreachable ();
2011 case COND_EXEC:
2012 x = COND_EXEC_CODE (x);
2013 break;
2014 case PARALLEL:
2015 x = XVECEXP (x, 0, 0);
2016 break;
2017 case SET:
2018 x = XEXP (x, 1);
2019 break;
2022 return x;
2025 /* The final scan for one insn, INSN.
2026 Args are same as in `final', except that INSN
2027 is the insn being scanned.
2028 Value returned is the next insn to be scanned.
2030 NOPEEPHOLES is the flag to disallow peephole processing (currently
2031 used for within delayed branch sequence output).
2033 SEEN is used to track the end of the prologue, for emitting
2034 debug information. We force the emission of a line note after
2035 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
2036 at the beginning of the second basic block, whichever comes
2037 first. */
2040 final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2041 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2043 #ifdef HAVE_cc0
2044 rtx set;
2045 #endif
2046 rtx next;
2048 insn_counter++;
2050 /* Ignore deleted insns. These can occur when we split insns (due to a
2051 template of "#") while not optimizing. */
2052 if (INSN_DELETED_P (insn))
2053 return NEXT_INSN (insn);
2055 switch (GET_CODE (insn))
2057 case NOTE:
2058 switch (NOTE_KIND (insn))
2060 case NOTE_INSN_DELETED:
2061 break;
2063 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2064 in_cold_section_p = !in_cold_section_p;
2066 if (dwarf2out_do_frame ())
2067 dwarf2out_switch_text_section ();
2068 else if (!DECL_IGNORED_P (current_function_decl))
2069 debug_hooks->switch_text_section ();
2071 switch_to_section (current_function_section ());
2072 targetm.asm_out.function_switched_text_sections (asm_out_file,
2073 current_function_decl,
2074 in_cold_section_p);
2075 break;
2077 case NOTE_INSN_BASIC_BLOCK:
2078 if (targetm.asm_out.unwind_emit)
2079 targetm.asm_out.unwind_emit (asm_out_file, insn);
2081 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
2083 *seen |= SEEN_EMITTED;
2084 force_source_line = true;
2086 else
2087 *seen |= SEEN_BB;
2089 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2091 break;
2093 case NOTE_INSN_EH_REGION_BEG:
2094 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2095 NOTE_EH_HANDLER (insn));
2096 break;
2098 case NOTE_INSN_EH_REGION_END:
2099 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2100 NOTE_EH_HANDLER (insn));
2101 break;
2103 case NOTE_INSN_PROLOGUE_END:
2104 targetm.asm_out.function_end_prologue (file);
2105 profile_after_prologue (file);
2107 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2109 *seen |= SEEN_EMITTED;
2110 force_source_line = true;
2112 else
2113 *seen |= SEEN_NOTE;
2115 break;
2117 case NOTE_INSN_EPILOGUE_BEG:
2118 if (!DECL_IGNORED_P (current_function_decl))
2119 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2120 targetm.asm_out.function_begin_epilogue (file);
2121 break;
2123 case NOTE_INSN_CFI:
2124 dwarf2out_emit_cfi (NOTE_CFI (insn));
2125 break;
2127 case NOTE_INSN_CFI_LABEL:
2128 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2129 NOTE_LABEL_NUMBER (insn));
2130 break;
2132 case NOTE_INSN_FUNCTION_BEG:
2133 app_disable ();
2134 if (!DECL_IGNORED_P (current_function_decl))
2135 debug_hooks->end_prologue (last_linenum, last_filename);
2137 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2139 *seen |= SEEN_EMITTED;
2140 force_source_line = true;
2142 else
2143 *seen |= SEEN_NOTE;
2145 break;
2147 case NOTE_INSN_BLOCK_BEG:
2148 if (debug_info_level == DINFO_LEVEL_NORMAL
2149 || debug_info_level == DINFO_LEVEL_VERBOSE
2150 || write_symbols == DWARF2_DEBUG
2151 || write_symbols == VMS_AND_DWARF2_DEBUG
2152 || write_symbols == VMS_DEBUG)
2154 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2156 app_disable ();
2157 ++block_depth;
2158 high_block_linenum = last_linenum;
2160 /* Output debugging info about the symbol-block beginning. */
2161 if (!DECL_IGNORED_P (current_function_decl))
2162 debug_hooks->begin_block (last_linenum, n);
2164 /* Mark this block as output. */
2165 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2167 if (write_symbols == DBX_DEBUG
2168 || write_symbols == SDB_DEBUG)
2170 location_t *locus_ptr
2171 = block_nonartificial_location (NOTE_BLOCK (insn));
2173 if (locus_ptr != NULL)
2175 override_filename = LOCATION_FILE (*locus_ptr);
2176 override_linenum = LOCATION_LINE (*locus_ptr);
2179 break;
2181 case NOTE_INSN_BLOCK_END:
2182 if (debug_info_level == DINFO_LEVEL_NORMAL
2183 || debug_info_level == DINFO_LEVEL_VERBOSE
2184 || write_symbols == DWARF2_DEBUG
2185 || write_symbols == VMS_AND_DWARF2_DEBUG
2186 || write_symbols == VMS_DEBUG)
2188 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2190 app_disable ();
2192 /* End of a symbol-block. */
2193 --block_depth;
2194 gcc_assert (block_depth >= 0);
2196 if (!DECL_IGNORED_P (current_function_decl))
2197 debug_hooks->end_block (high_block_linenum, n);
2199 if (write_symbols == DBX_DEBUG
2200 || write_symbols == SDB_DEBUG)
2202 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2203 location_t *locus_ptr
2204 = block_nonartificial_location (outer_block);
2206 if (locus_ptr != NULL)
2208 override_filename = LOCATION_FILE (*locus_ptr);
2209 override_linenum = LOCATION_LINE (*locus_ptr);
2211 else
2213 override_filename = NULL;
2214 override_linenum = 0;
2217 break;
2219 case NOTE_INSN_DELETED_LABEL:
2220 /* Emit the label. We may have deleted the CODE_LABEL because
2221 the label could be proved to be unreachable, though still
2222 referenced (in the form of having its address taken. */
2223 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2224 break;
2226 case NOTE_INSN_DELETED_DEBUG_LABEL:
2227 /* Similarly, but need to use different namespace for it. */
2228 if (CODE_LABEL_NUMBER (insn) != -1)
2229 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2230 break;
2232 case NOTE_INSN_VAR_LOCATION:
2233 case NOTE_INSN_CALL_ARG_LOCATION:
2234 if (!DECL_IGNORED_P (current_function_decl))
2235 debug_hooks->var_location (insn);
2236 break;
2238 default:
2239 gcc_unreachable ();
2240 break;
2242 break;
2244 case BARRIER:
2245 break;
2247 case CODE_LABEL:
2248 /* The target port might emit labels in the output function for
2249 some insn, e.g. sh.c output_branchy_insn. */
2250 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2252 int align = LABEL_TO_ALIGNMENT (insn);
2253 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2254 int max_skip = LABEL_TO_MAX_SKIP (insn);
2255 #endif
2257 if (align && NEXT_INSN (insn))
2259 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2260 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2261 #else
2262 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2263 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2264 #else
2265 ASM_OUTPUT_ALIGN (file, align);
2266 #endif
2267 #endif
2270 CC_STATUS_INIT;
2272 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2273 debug_hooks->label (insn);
2275 app_disable ();
2277 next = next_nonnote_insn (insn);
2278 /* If this label is followed by a jump-table, make sure we put
2279 the label in the read-only section. Also possibly write the
2280 label and jump table together. */
2281 if (next != 0 && JUMP_TABLE_DATA_P (next))
2283 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2284 /* In this case, the case vector is being moved by the
2285 target, so don't output the label at all. Leave that
2286 to the back end macros. */
2287 #else
2288 if (! JUMP_TABLES_IN_TEXT_SECTION)
2290 int log_align;
2292 switch_to_section (targetm.asm_out.function_rodata_section
2293 (current_function_decl));
2295 #ifdef ADDR_VEC_ALIGN
2296 log_align = ADDR_VEC_ALIGN (next);
2297 #else
2298 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2299 #endif
2300 ASM_OUTPUT_ALIGN (file, log_align);
2302 else
2303 switch_to_section (current_function_section ());
2305 #ifdef ASM_OUTPUT_CASE_LABEL
2306 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2307 next);
2308 #else
2309 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2310 #endif
2311 #endif
2312 break;
2314 if (LABEL_ALT_ENTRY_P (insn))
2315 output_alternate_entry_point (file, insn);
2316 else
2317 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2318 break;
2320 default:
2322 rtx body = PATTERN (insn);
2323 int insn_code_number;
2324 const char *templ;
2325 bool is_stmt;
2327 /* Reset this early so it is correct for ASM statements. */
2328 current_insn_predicate = NULL_RTX;
2330 /* An INSN, JUMP_INSN or CALL_INSN.
2331 First check for special kinds that recog doesn't recognize. */
2333 if (GET_CODE (body) == USE /* These are just declarations. */
2334 || GET_CODE (body) == CLOBBER)
2335 break;
2337 #ifdef HAVE_cc0
2339 /* If there is a REG_CC_SETTER note on this insn, it means that
2340 the setting of the condition code was done in the delay slot
2341 of the insn that branched here. So recover the cc status
2342 from the insn that set it. */
2344 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2345 if (note)
2347 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2348 cc_prev_status = cc_status;
2351 #endif
2353 /* Detect insns that are really jump-tables
2354 and output them as such. */
2356 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2358 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2359 int vlen, idx;
2360 #endif
2362 if (! JUMP_TABLES_IN_TEXT_SECTION)
2363 switch_to_section (targetm.asm_out.function_rodata_section
2364 (current_function_decl));
2365 else
2366 switch_to_section (current_function_section ());
2368 app_disable ();
2370 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2371 if (GET_CODE (body) == ADDR_VEC)
2373 #ifdef ASM_OUTPUT_ADDR_VEC
2374 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2375 #else
2376 gcc_unreachable ();
2377 #endif
2379 else
2381 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2382 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2383 #else
2384 gcc_unreachable ();
2385 #endif
2387 #else
2388 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2389 for (idx = 0; idx < vlen; idx++)
2391 if (GET_CODE (body) == ADDR_VEC)
2393 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2394 ASM_OUTPUT_ADDR_VEC_ELT
2395 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2396 #else
2397 gcc_unreachable ();
2398 #endif
2400 else
2402 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2403 ASM_OUTPUT_ADDR_DIFF_ELT
2404 (file,
2405 body,
2406 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2407 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2408 #else
2409 gcc_unreachable ();
2410 #endif
2413 #ifdef ASM_OUTPUT_CASE_END
2414 ASM_OUTPUT_CASE_END (file,
2415 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2416 insn);
2417 #endif
2418 #endif
2420 switch_to_section (current_function_section ());
2422 break;
2424 /* Output this line note if it is the first or the last line
2425 note in a row. */
2426 if (!DECL_IGNORED_P (current_function_decl)
2427 && notice_source_line (insn, &is_stmt))
2428 (*debug_hooks->source_line) (last_linenum, last_filename,
2429 last_discriminator, is_stmt);
2431 if (GET_CODE (body) == ASM_INPUT)
2433 const char *string = XSTR (body, 0);
2435 /* There's no telling what that did to the condition codes. */
2436 CC_STATUS_INIT;
2438 if (string[0])
2440 expanded_location loc;
2442 app_enable ();
2443 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2444 if (*loc.file && loc.line)
2445 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2446 ASM_COMMENT_START, loc.line, loc.file);
2447 fprintf (asm_out_file, "\t%s\n", string);
2448 #if HAVE_AS_LINE_ZERO
2449 if (*loc.file && loc.line)
2450 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2451 #endif
2453 break;
2456 /* Detect `asm' construct with operands. */
2457 if (asm_noperands (body) >= 0)
2459 unsigned int noperands = asm_noperands (body);
2460 rtx *ops = XALLOCAVEC (rtx, noperands);
2461 const char *string;
2462 location_t loc;
2463 expanded_location expanded;
2465 /* There's no telling what that did to the condition codes. */
2466 CC_STATUS_INIT;
2468 /* Get out the operand values. */
2469 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2470 /* Inhibit dying on what would otherwise be compiler bugs. */
2471 insn_noperands = noperands;
2472 this_is_asm_operands = insn;
2473 expanded = expand_location (loc);
2475 #ifdef FINAL_PRESCAN_INSN
2476 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2477 #endif
2479 /* Output the insn using them. */
2480 if (string[0])
2482 app_enable ();
2483 if (expanded.file && expanded.line)
2484 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2485 ASM_COMMENT_START, expanded.line, expanded.file);
2486 output_asm_insn (string, ops);
2487 #if HAVE_AS_LINE_ZERO
2488 if (expanded.file && expanded.line)
2489 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2490 #endif
2493 if (targetm.asm_out.final_postscan_insn)
2494 targetm.asm_out.final_postscan_insn (file, insn, ops,
2495 insn_noperands);
2497 this_is_asm_operands = 0;
2498 break;
2501 app_disable ();
2503 if (GET_CODE (body) == SEQUENCE)
2505 /* A delayed-branch sequence */
2506 int i;
2508 final_sequence = body;
2510 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2511 force the restoration of a comparison that was previously
2512 thought unnecessary. If that happens, cancel this sequence
2513 and cause that insn to be restored. */
2515 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2516 if (next != XVECEXP (body, 0, 1))
2518 final_sequence = 0;
2519 return next;
2522 for (i = 1; i < XVECLEN (body, 0); i++)
2524 rtx insn = XVECEXP (body, 0, i);
2525 rtx next = NEXT_INSN (insn);
2526 /* We loop in case any instruction in a delay slot gets
2527 split. */
2529 insn = final_scan_insn (insn, file, 0, 1, seen);
2530 while (insn != next);
2532 #ifdef DBR_OUTPUT_SEQEND
2533 DBR_OUTPUT_SEQEND (file);
2534 #endif
2535 final_sequence = 0;
2537 /* If the insn requiring the delay slot was a CALL_INSN, the
2538 insns in the delay slot are actually executed before the
2539 called function. Hence we don't preserve any CC-setting
2540 actions in these insns and the CC must be marked as being
2541 clobbered by the function. */
2542 if (CALL_P (XVECEXP (body, 0, 0)))
2544 CC_STATUS_INIT;
2546 break;
2549 /* We have a real machine instruction as rtl. */
2551 body = PATTERN (insn);
2553 #ifdef HAVE_cc0
2554 set = single_set (insn);
2556 /* Check for redundant test and compare instructions
2557 (when the condition codes are already set up as desired).
2558 This is done only when optimizing; if not optimizing,
2559 it should be possible for the user to alter a variable
2560 with the debugger in between statements
2561 and the next statement should reexamine the variable
2562 to compute the condition codes. */
2564 if (optimize_p)
2566 if (set
2567 && GET_CODE (SET_DEST (set)) == CC0
2568 && insn != last_ignored_compare)
2570 rtx src1, src2;
2571 if (GET_CODE (SET_SRC (set)) == SUBREG)
2572 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2574 src1 = SET_SRC (set);
2575 src2 = NULL_RTX;
2576 if (GET_CODE (SET_SRC (set)) == COMPARE)
2578 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2579 XEXP (SET_SRC (set), 0)
2580 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2581 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2582 XEXP (SET_SRC (set), 1)
2583 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2584 if (XEXP (SET_SRC (set), 1)
2585 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2586 src2 = XEXP (SET_SRC (set), 0);
2588 if ((cc_status.value1 != 0
2589 && rtx_equal_p (src1, cc_status.value1))
2590 || (cc_status.value2 != 0
2591 && rtx_equal_p (src1, cc_status.value2))
2592 || (src2 != 0 && cc_status.value1 != 0
2593 && rtx_equal_p (src2, cc_status.value1))
2594 || (src2 != 0 && cc_status.value2 != 0
2595 && rtx_equal_p (src2, cc_status.value2)))
2597 /* Don't delete insn if it has an addressing side-effect. */
2598 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2599 /* or if anything in it is volatile. */
2600 && ! volatile_refs_p (PATTERN (insn)))
2602 /* We don't really delete the insn; just ignore it. */
2603 last_ignored_compare = insn;
2604 break;
2610 /* If this is a conditional branch, maybe modify it
2611 if the cc's are in a nonstandard state
2612 so that it accomplishes the same thing that it would
2613 do straightforwardly if the cc's were set up normally. */
2615 if (cc_status.flags != 0
2616 && JUMP_P (insn)
2617 && GET_CODE (body) == SET
2618 && SET_DEST (body) == pc_rtx
2619 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2620 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2621 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2623 /* This function may alter the contents of its argument
2624 and clear some of the cc_status.flags bits.
2625 It may also return 1 meaning condition now always true
2626 or -1 meaning condition now always false
2627 or 2 meaning condition nontrivial but altered. */
2628 int result = alter_cond (XEXP (SET_SRC (body), 0));
2629 /* If condition now has fixed value, replace the IF_THEN_ELSE
2630 with its then-operand or its else-operand. */
2631 if (result == 1)
2632 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2633 if (result == -1)
2634 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2636 /* The jump is now either unconditional or a no-op.
2637 If it has become a no-op, don't try to output it.
2638 (It would not be recognized.) */
2639 if (SET_SRC (body) == pc_rtx)
2641 delete_insn (insn);
2642 break;
2644 else if (ANY_RETURN_P (SET_SRC (body)))
2645 /* Replace (set (pc) (return)) with (return). */
2646 PATTERN (insn) = body = SET_SRC (body);
2648 /* Rerecognize the instruction if it has changed. */
2649 if (result != 0)
2650 INSN_CODE (insn) = -1;
2653 /* If this is a conditional trap, maybe modify it if the cc's
2654 are in a nonstandard state so that it accomplishes the same
2655 thing that it would do straightforwardly if the cc's were
2656 set up normally. */
2657 if (cc_status.flags != 0
2658 && NONJUMP_INSN_P (insn)
2659 && GET_CODE (body) == TRAP_IF
2660 && COMPARISON_P (TRAP_CONDITION (body))
2661 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2663 /* This function may alter the contents of its argument
2664 and clear some of the cc_status.flags bits.
2665 It may also return 1 meaning condition now always true
2666 or -1 meaning condition now always false
2667 or 2 meaning condition nontrivial but altered. */
2668 int result = alter_cond (TRAP_CONDITION (body));
2670 /* If TRAP_CONDITION has become always false, delete the
2671 instruction. */
2672 if (result == -1)
2674 delete_insn (insn);
2675 break;
2678 /* If TRAP_CONDITION has become always true, replace
2679 TRAP_CONDITION with const_true_rtx. */
2680 if (result == 1)
2681 TRAP_CONDITION (body) = const_true_rtx;
2683 /* Rerecognize the instruction if it has changed. */
2684 if (result != 0)
2685 INSN_CODE (insn) = -1;
2688 /* Make same adjustments to instructions that examine the
2689 condition codes without jumping and instructions that
2690 handle conditional moves (if this machine has either one). */
2692 if (cc_status.flags != 0
2693 && set != 0)
2695 rtx cond_rtx, then_rtx, else_rtx;
2697 if (!JUMP_P (insn)
2698 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2700 cond_rtx = XEXP (SET_SRC (set), 0);
2701 then_rtx = XEXP (SET_SRC (set), 1);
2702 else_rtx = XEXP (SET_SRC (set), 2);
2704 else
2706 cond_rtx = SET_SRC (set);
2707 then_rtx = const_true_rtx;
2708 else_rtx = const0_rtx;
2711 if (COMPARISON_P (cond_rtx)
2712 && XEXP (cond_rtx, 0) == cc0_rtx)
2714 int result;
2715 result = alter_cond (cond_rtx);
2716 if (result == 1)
2717 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2718 else if (result == -1)
2719 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2720 else if (result == 2)
2721 INSN_CODE (insn) = -1;
2722 if (SET_DEST (set) == SET_SRC (set))
2723 delete_insn (insn);
2727 #endif
2729 #ifdef HAVE_peephole
2730 /* Do machine-specific peephole optimizations if desired. */
2732 if (optimize_p && !flag_no_peephole && !nopeepholes)
2734 rtx next = peephole (insn);
2735 /* When peepholing, if there were notes within the peephole,
2736 emit them before the peephole. */
2737 if (next != 0 && next != NEXT_INSN (insn))
2739 rtx note, prev = PREV_INSN (insn);
2741 for (note = NEXT_INSN (insn); note != next;
2742 note = NEXT_INSN (note))
2743 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2745 /* Put the notes in the proper position for a later
2746 rescan. For example, the SH target can do this
2747 when generating a far jump in a delayed branch
2748 sequence. */
2749 note = NEXT_INSN (insn);
2750 PREV_INSN (note) = prev;
2751 NEXT_INSN (prev) = note;
2752 NEXT_INSN (PREV_INSN (next)) = insn;
2753 PREV_INSN (insn) = PREV_INSN (next);
2754 NEXT_INSN (insn) = next;
2755 PREV_INSN (next) = insn;
2758 /* PEEPHOLE might have changed this. */
2759 body = PATTERN (insn);
2761 #endif
2763 /* Try to recognize the instruction.
2764 If successful, verify that the operands satisfy the
2765 constraints for the instruction. Crash if they don't,
2766 since `reload' should have changed them so that they do. */
2768 insn_code_number = recog_memoized (insn);
2769 cleanup_subreg_operands (insn);
2771 /* Dump the insn in the assembly for debugging (-dAP).
2772 If the final dump is requested as slim RTL, dump slim
2773 RTL to the assembly file also. */
2774 if (flag_dump_rtl_in_asm)
2776 print_rtx_head = ASM_COMMENT_START;
2777 if (! (dump_flags & TDF_SLIM))
2778 print_rtl_single (asm_out_file, insn);
2779 else
2780 dump_insn_slim (asm_out_file, insn);
2781 print_rtx_head = "";
2784 if (! constrain_operands_cached (1))
2785 fatal_insn_not_found (insn);
2787 /* Some target machines need to prescan each insn before
2788 it is output. */
2790 #ifdef FINAL_PRESCAN_INSN
2791 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2792 #endif
2794 if (targetm.have_conditional_execution ()
2795 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2796 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2798 #ifdef HAVE_cc0
2799 cc_prev_status = cc_status;
2801 /* Update `cc_status' for this instruction.
2802 The instruction's output routine may change it further.
2803 If the output routine for a jump insn needs to depend
2804 on the cc status, it should look at cc_prev_status. */
2806 NOTICE_UPDATE_CC (body, insn);
2807 #endif
2809 current_output_insn = debug_insn = insn;
2811 /* Find the proper template for this insn. */
2812 templ = get_insn_template (insn_code_number, insn);
2814 /* If the C code returns 0, it means that it is a jump insn
2815 which follows a deleted test insn, and that test insn
2816 needs to be reinserted. */
2817 if (templ == 0)
2819 rtx prev;
2821 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2823 /* We have already processed the notes between the setter and
2824 the user. Make sure we don't process them again, this is
2825 particularly important if one of the notes is a block
2826 scope note or an EH note. */
2827 for (prev = insn;
2828 prev != last_ignored_compare;
2829 prev = PREV_INSN (prev))
2831 if (NOTE_P (prev))
2832 delete_insn (prev); /* Use delete_note. */
2835 return prev;
2838 /* If the template is the string "#", it means that this insn must
2839 be split. */
2840 if (templ[0] == '#' && templ[1] == '\0')
2842 rtx new_rtx = try_split (body, insn, 0);
2844 /* If we didn't split the insn, go away. */
2845 if (new_rtx == insn && PATTERN (new_rtx) == body)
2846 fatal_insn ("could not split insn", insn);
2848 /* If we have a length attribute, this instruction should have
2849 been split in shorten_branches, to ensure that we would have
2850 valid length info for the splitees. */
2851 gcc_assert (!HAVE_ATTR_length);
2853 return new_rtx;
2856 /* ??? This will put the directives in the wrong place if
2857 get_insn_template outputs assembly directly. However calling it
2858 before get_insn_template breaks if the insns is split. */
2859 if (targetm.asm_out.unwind_emit_before_insn
2860 && targetm.asm_out.unwind_emit)
2861 targetm.asm_out.unwind_emit (asm_out_file, insn);
2863 if (CALL_P (insn))
2865 rtx x = call_from_call_insn (insn);
2866 x = XEXP (x, 0);
2867 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2869 tree t;
2870 x = XEXP (x, 0);
2871 t = SYMBOL_REF_DECL (x);
2872 if (t)
2873 assemble_external (t);
2875 if (!DECL_IGNORED_P (current_function_decl))
2876 debug_hooks->var_location (insn);
2879 /* Output assembler code from the template. */
2880 output_asm_insn (templ, recog_data.operand);
2882 /* Some target machines need to postscan each insn after
2883 it is output. */
2884 if (targetm.asm_out.final_postscan_insn)
2885 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2886 recog_data.n_operands);
2888 if (!targetm.asm_out.unwind_emit_before_insn
2889 && targetm.asm_out.unwind_emit)
2890 targetm.asm_out.unwind_emit (asm_out_file, insn);
2892 current_output_insn = debug_insn = 0;
2895 return NEXT_INSN (insn);
2898 /* Return whether a source line note needs to be emitted before INSN.
2899 Sets IS_STMT to TRUE if the line should be marked as a possible
2900 breakpoint location. */
2902 static bool
2903 notice_source_line (rtx insn, bool *is_stmt)
2905 const char *filename;
2906 int linenum;
2908 if (override_filename)
2910 filename = override_filename;
2911 linenum = override_linenum;
2913 else
2915 filename = insn_file (insn);
2916 linenum = insn_line (insn);
2919 if (filename == NULL)
2920 return false;
2922 if (force_source_line
2923 || filename != last_filename
2924 || last_linenum != linenum)
2926 force_source_line = false;
2927 last_filename = filename;
2928 last_linenum = linenum;
2929 last_discriminator = discriminator;
2930 *is_stmt = true;
2931 high_block_linenum = MAX (last_linenum, high_block_linenum);
2932 high_function_linenum = MAX (last_linenum, high_function_linenum);
2933 return true;
2936 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
2938 /* If the discriminator changed, but the line number did not,
2939 output the line table entry with is_stmt false so the
2940 debugger does not treat this as a breakpoint location. */
2941 last_discriminator = discriminator;
2942 *is_stmt = false;
2943 return true;
2946 return false;
2949 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2950 directly to the desired hard register. */
2952 void
2953 cleanup_subreg_operands (rtx insn)
2955 int i;
2956 bool changed = false;
2957 extract_insn_cached (insn);
2958 for (i = 0; i < recog_data.n_operands; i++)
2960 /* The following test cannot use recog_data.operand when testing
2961 for a SUBREG: the underlying object might have been changed
2962 already if we are inside a match_operator expression that
2963 matches the else clause. Instead we test the underlying
2964 expression directly. */
2965 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2967 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
2968 changed = true;
2970 else if (GET_CODE (recog_data.operand[i]) == PLUS
2971 || GET_CODE (recog_data.operand[i]) == MULT
2972 || MEM_P (recog_data.operand[i]))
2973 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2976 for (i = 0; i < recog_data.n_dups; i++)
2978 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2980 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
2981 changed = true;
2983 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2984 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2985 || MEM_P (*recog_data.dup_loc[i]))
2986 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2988 if (changed)
2989 df_insn_rescan (insn);
2992 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
2993 the thing it is a subreg of. Do it anyway if FINAL_P. */
2996 alter_subreg (rtx *xp, bool final_p)
2998 rtx x = *xp;
2999 rtx y = SUBREG_REG (x);
3001 /* simplify_subreg does not remove subreg from volatile references.
3002 We are required to. */
3003 if (MEM_P (y))
3005 int offset = SUBREG_BYTE (x);
3007 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3008 contains 0 instead of the proper offset. See simplify_subreg. */
3009 if (offset == 0
3010 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3012 int difference = GET_MODE_SIZE (GET_MODE (y))
3013 - GET_MODE_SIZE (GET_MODE (x));
3014 if (WORDS_BIG_ENDIAN)
3015 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3016 if (BYTES_BIG_ENDIAN)
3017 offset += difference % UNITS_PER_WORD;
3020 if (final_p)
3021 *xp = adjust_address (y, GET_MODE (x), offset);
3022 else
3023 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3025 else
3027 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3028 SUBREG_BYTE (x));
3030 if (new_rtx != 0)
3031 *xp = new_rtx;
3032 else if (final_p && REG_P (y))
3034 /* Simplify_subreg can't handle some REG cases, but we have to. */
3035 unsigned int regno;
3036 HOST_WIDE_INT offset;
3038 regno = subreg_regno (x);
3039 if (subreg_lowpart_p (x))
3040 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3041 else
3042 offset = SUBREG_BYTE (x);
3043 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3047 return *xp;
3050 /* Do alter_subreg on all the SUBREGs contained in X. */
3052 static rtx
3053 walk_alter_subreg (rtx *xp, bool *changed)
3055 rtx x = *xp;
3056 switch (GET_CODE (x))
3058 case PLUS:
3059 case MULT:
3060 case AND:
3061 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3062 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3063 break;
3065 case MEM:
3066 case ZERO_EXTEND:
3067 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3068 break;
3070 case SUBREG:
3071 *changed = true;
3072 return alter_subreg (xp, true);
3074 default:
3075 break;
3078 return *xp;
3081 #ifdef HAVE_cc0
3083 /* Given BODY, the body of a jump instruction, alter the jump condition
3084 as required by the bits that are set in cc_status.flags.
3085 Not all of the bits there can be handled at this level in all cases.
3087 The value is normally 0.
3088 1 means that the condition has become always true.
3089 -1 means that the condition has become always false.
3090 2 means that COND has been altered. */
3092 static int
3093 alter_cond (rtx cond)
3095 int value = 0;
3097 if (cc_status.flags & CC_REVERSED)
3099 value = 2;
3100 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3103 if (cc_status.flags & CC_INVERTED)
3105 value = 2;
3106 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3109 if (cc_status.flags & CC_NOT_POSITIVE)
3110 switch (GET_CODE (cond))
3112 case LE:
3113 case LEU:
3114 case GEU:
3115 /* Jump becomes unconditional. */
3116 return 1;
3118 case GT:
3119 case GTU:
3120 case LTU:
3121 /* Jump becomes no-op. */
3122 return -1;
3124 case GE:
3125 PUT_CODE (cond, EQ);
3126 value = 2;
3127 break;
3129 case LT:
3130 PUT_CODE (cond, NE);
3131 value = 2;
3132 break;
3134 default:
3135 break;
3138 if (cc_status.flags & CC_NOT_NEGATIVE)
3139 switch (GET_CODE (cond))
3141 case GE:
3142 case GEU:
3143 /* Jump becomes unconditional. */
3144 return 1;
3146 case LT:
3147 case LTU:
3148 /* Jump becomes no-op. */
3149 return -1;
3151 case LE:
3152 case LEU:
3153 PUT_CODE (cond, EQ);
3154 value = 2;
3155 break;
3157 case GT:
3158 case GTU:
3159 PUT_CODE (cond, NE);
3160 value = 2;
3161 break;
3163 default:
3164 break;
3167 if (cc_status.flags & CC_NO_OVERFLOW)
3168 switch (GET_CODE (cond))
3170 case GEU:
3171 /* Jump becomes unconditional. */
3172 return 1;
3174 case LEU:
3175 PUT_CODE (cond, EQ);
3176 value = 2;
3177 break;
3179 case GTU:
3180 PUT_CODE (cond, NE);
3181 value = 2;
3182 break;
3184 case LTU:
3185 /* Jump becomes no-op. */
3186 return -1;
3188 default:
3189 break;
3192 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3193 switch (GET_CODE (cond))
3195 default:
3196 gcc_unreachable ();
3198 case NE:
3199 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3200 value = 2;
3201 break;
3203 case EQ:
3204 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3205 value = 2;
3206 break;
3209 if (cc_status.flags & CC_NOT_SIGNED)
3210 /* The flags are valid if signed condition operators are converted
3211 to unsigned. */
3212 switch (GET_CODE (cond))
3214 case LE:
3215 PUT_CODE (cond, LEU);
3216 value = 2;
3217 break;
3219 case LT:
3220 PUT_CODE (cond, LTU);
3221 value = 2;
3222 break;
3224 case GT:
3225 PUT_CODE (cond, GTU);
3226 value = 2;
3227 break;
3229 case GE:
3230 PUT_CODE (cond, GEU);
3231 value = 2;
3232 break;
3234 default:
3235 break;
3238 return value;
3240 #endif
3242 /* Report inconsistency between the assembler template and the operands.
3243 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3245 void
3246 output_operand_lossage (const char *cmsgid, ...)
3248 char *fmt_string;
3249 char *new_message;
3250 const char *pfx_str;
3251 va_list ap;
3253 va_start (ap, cmsgid);
3255 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3256 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3257 vasprintf (&new_message, fmt_string, ap);
3259 if (this_is_asm_operands)
3260 error_for_asm (this_is_asm_operands, "%s", new_message);
3261 else
3262 internal_error ("%s", new_message);
3264 free (fmt_string);
3265 free (new_message);
3266 va_end (ap);
3269 /* Output of assembler code from a template, and its subroutines. */
3271 /* Annotate the assembly with a comment describing the pattern and
3272 alternative used. */
3274 static void
3275 output_asm_name (void)
3277 if (debug_insn)
3279 int num = INSN_CODE (debug_insn);
3280 fprintf (asm_out_file, "\t%s %d\t%s",
3281 ASM_COMMENT_START, INSN_UID (debug_insn),
3282 insn_data[num].name);
3283 if (insn_data[num].n_alternatives > 1)
3284 fprintf (asm_out_file, "/%d", which_alternative + 1);
3286 if (HAVE_ATTR_length)
3287 fprintf (asm_out_file, "\t[length = %d]",
3288 get_attr_length (debug_insn));
3290 /* Clear this so only the first assembler insn
3291 of any rtl insn will get the special comment for -dp. */
3292 debug_insn = 0;
3296 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3297 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3298 corresponds to the address of the object and 0 if to the object. */
3300 static tree
3301 get_mem_expr_from_op (rtx op, int *paddressp)
3303 tree expr;
3304 int inner_addressp;
3306 *paddressp = 0;
3308 if (REG_P (op))
3309 return REG_EXPR (op);
3310 else if (!MEM_P (op))
3311 return 0;
3313 if (MEM_EXPR (op) != 0)
3314 return MEM_EXPR (op);
3316 /* Otherwise we have an address, so indicate it and look at the address. */
3317 *paddressp = 1;
3318 op = XEXP (op, 0);
3320 /* First check if we have a decl for the address, then look at the right side
3321 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3322 But don't allow the address to itself be indirect. */
3323 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3324 return expr;
3325 else if (GET_CODE (op) == PLUS
3326 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3327 return expr;
3329 while (UNARY_P (op)
3330 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3331 op = XEXP (op, 0);
3333 expr = get_mem_expr_from_op (op, &inner_addressp);
3334 return inner_addressp ? 0 : expr;
3337 /* Output operand names for assembler instructions. OPERANDS is the
3338 operand vector, OPORDER is the order to write the operands, and NOPS
3339 is the number of operands to write. */
3341 static void
3342 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3344 int wrote = 0;
3345 int i;
3347 for (i = 0; i < nops; i++)
3349 int addressp;
3350 rtx op = operands[oporder[i]];
3351 tree expr = get_mem_expr_from_op (op, &addressp);
3353 fprintf (asm_out_file, "%c%s",
3354 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3355 wrote = 1;
3356 if (expr)
3358 fprintf (asm_out_file, "%s",
3359 addressp ? "*" : "");
3360 print_mem_expr (asm_out_file, expr);
3361 wrote = 1;
3363 else if (REG_P (op) && ORIGINAL_REGNO (op)
3364 && ORIGINAL_REGNO (op) != REGNO (op))
3365 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3369 #ifdef ASSEMBLER_DIALECT
3370 /* Helper function to parse assembler dialects in the asm string.
3371 This is called from output_asm_insn and asm_fprintf. */
3372 static const char *
3373 do_assembler_dialects (const char *p, int *dialect)
3375 char c = *(p - 1);
3377 switch (c)
3379 case '{':
3381 int i;
3383 if (*dialect)
3384 output_operand_lossage ("nested assembly dialect alternatives");
3385 else
3386 *dialect = 1;
3388 /* If we want the first dialect, do nothing. Otherwise, skip
3389 DIALECT_NUMBER of strings ending with '|'. */
3390 for (i = 0; i < dialect_number; i++)
3392 while (*p && *p != '}' && *p++ != '|')
3394 if (*p == '}')
3395 break;
3398 if (*p == '\0')
3399 output_operand_lossage ("unterminated assembly dialect alternative");
3401 break;
3403 case '|':
3404 if (*dialect)
3406 /* Skip to close brace. */
3409 if (*p == '\0')
3411 output_operand_lossage ("unterminated assembly dialect alternative");
3412 break;
3415 while (*p++ != '}');
3416 *dialect = 0;
3418 else
3419 putc (c, asm_out_file);
3420 break;
3422 case '}':
3423 if (! *dialect)
3424 putc (c, asm_out_file);
3425 *dialect = 0;
3426 break;
3427 default:
3428 gcc_unreachable ();
3431 return p;
3433 #endif
3435 /* Output text from TEMPLATE to the assembler output file,
3436 obeying %-directions to substitute operands taken from
3437 the vector OPERANDS.
3439 %N (for N a digit) means print operand N in usual manner.
3440 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3441 and print the label name with no punctuation.
3442 %cN means require operand N to be a constant
3443 and print the constant expression with no punctuation.
3444 %aN means expect operand N to be a memory address
3445 (not a memory reference!) and print a reference
3446 to that address.
3447 %nN means expect operand N to be a constant
3448 and print a constant expression for minus the value
3449 of the operand, with no other punctuation. */
3451 void
3452 output_asm_insn (const char *templ, rtx *operands)
3454 const char *p;
3455 int c;
3456 #ifdef ASSEMBLER_DIALECT
3457 int dialect = 0;
3458 #endif
3459 int oporder[MAX_RECOG_OPERANDS];
3460 char opoutput[MAX_RECOG_OPERANDS];
3461 int ops = 0;
3463 /* An insn may return a null string template
3464 in a case where no assembler code is needed. */
3465 if (*templ == 0)
3466 return;
3468 memset (opoutput, 0, sizeof opoutput);
3469 p = templ;
3470 putc ('\t', asm_out_file);
3472 #ifdef ASM_OUTPUT_OPCODE
3473 ASM_OUTPUT_OPCODE (asm_out_file, p);
3474 #endif
3476 while ((c = *p++))
3477 switch (c)
3479 case '\n':
3480 if (flag_verbose_asm)
3481 output_asm_operand_names (operands, oporder, ops);
3482 if (flag_print_asm_name)
3483 output_asm_name ();
3485 ops = 0;
3486 memset (opoutput, 0, sizeof opoutput);
3488 putc (c, asm_out_file);
3489 #ifdef ASM_OUTPUT_OPCODE
3490 while ((c = *p) == '\t')
3492 putc (c, asm_out_file);
3493 p++;
3495 ASM_OUTPUT_OPCODE (asm_out_file, p);
3496 #endif
3497 break;
3499 #ifdef ASSEMBLER_DIALECT
3500 case '{':
3501 case '}':
3502 case '|':
3503 p = do_assembler_dialects (p, &dialect);
3504 break;
3505 #endif
3507 case '%':
3508 /* %% outputs a single %. */
3509 if (*p == '%')
3511 p++;
3512 putc (c, asm_out_file);
3514 /* %= outputs a number which is unique to each insn in the entire
3515 compilation. This is useful for making local labels that are
3516 referred to more than once in a given insn. */
3517 else if (*p == '=')
3519 p++;
3520 fprintf (asm_out_file, "%d", insn_counter);
3522 /* % followed by a letter and some digits
3523 outputs an operand in a special way depending on the letter.
3524 Letters `acln' are implemented directly.
3525 Other letters are passed to `output_operand' so that
3526 the TARGET_PRINT_OPERAND hook can define them. */
3527 else if (ISALPHA (*p))
3529 int letter = *p++;
3530 unsigned long opnum;
3531 char *endptr;
3533 opnum = strtoul (p, &endptr, 10);
3535 if (endptr == p)
3536 output_operand_lossage ("operand number missing "
3537 "after %%-letter");
3538 else if (this_is_asm_operands && opnum >= insn_noperands)
3539 output_operand_lossage ("operand number out of range");
3540 else if (letter == 'l')
3541 output_asm_label (operands[opnum]);
3542 else if (letter == 'a')
3543 output_address (operands[opnum]);
3544 else if (letter == 'c')
3546 if (CONSTANT_ADDRESS_P (operands[opnum]))
3547 output_addr_const (asm_out_file, operands[opnum]);
3548 else
3549 output_operand (operands[opnum], 'c');
3551 else if (letter == 'n')
3553 if (CONST_INT_P (operands[opnum]))
3554 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3555 - INTVAL (operands[opnum]));
3556 else
3558 putc ('-', asm_out_file);
3559 output_addr_const (asm_out_file, operands[opnum]);
3562 else
3563 output_operand (operands[opnum], letter);
3565 if (!opoutput[opnum])
3566 oporder[ops++] = opnum;
3567 opoutput[opnum] = 1;
3569 p = endptr;
3570 c = *p;
3572 /* % followed by a digit outputs an operand the default way. */
3573 else if (ISDIGIT (*p))
3575 unsigned long opnum;
3576 char *endptr;
3578 opnum = strtoul (p, &endptr, 10);
3579 if (this_is_asm_operands && opnum >= insn_noperands)
3580 output_operand_lossage ("operand number out of range");
3581 else
3582 output_operand (operands[opnum], 0);
3584 if (!opoutput[opnum])
3585 oporder[ops++] = opnum;
3586 opoutput[opnum] = 1;
3588 p = endptr;
3589 c = *p;
3591 /* % followed by punctuation: output something for that
3592 punctuation character alone, with no operand. The
3593 TARGET_PRINT_OPERAND hook decides what is actually done. */
3594 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3595 output_operand (NULL_RTX, *p++);
3596 else
3597 output_operand_lossage ("invalid %%-code");
3598 break;
3600 default:
3601 putc (c, asm_out_file);
3604 /* Write out the variable names for operands, if we know them. */
3605 if (flag_verbose_asm)
3606 output_asm_operand_names (operands, oporder, ops);
3607 if (flag_print_asm_name)
3608 output_asm_name ();
3610 putc ('\n', asm_out_file);
3613 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3615 void
3616 output_asm_label (rtx x)
3618 char buf[256];
3620 if (GET_CODE (x) == LABEL_REF)
3621 x = XEXP (x, 0);
3622 if (LABEL_P (x)
3623 || (NOTE_P (x)
3624 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3625 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3626 else
3627 output_operand_lossage ("'%%l' operand isn't a label");
3629 assemble_name (asm_out_file, buf);
3632 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3633 output_operand. Marks SYMBOL_REFs as referenced through use of
3634 assemble_external. */
3636 static int
3637 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3639 rtx x = *xp;
3641 /* If we have a used symbol, we may have to emit assembly
3642 annotations corresponding to whether the symbol is external, weak
3643 or has non-default visibility. */
3644 if (GET_CODE (x) == SYMBOL_REF)
3646 tree t;
3648 t = SYMBOL_REF_DECL (x);
3649 if (t)
3650 assemble_external (t);
3652 return -1;
3655 return 0;
3658 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3660 void
3661 mark_symbol_refs_as_used (rtx x)
3663 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3666 /* Print operand X using machine-dependent assembler syntax.
3667 CODE is a non-digit that preceded the operand-number in the % spec,
3668 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3669 between the % and the digits.
3670 When CODE is a non-letter, X is 0.
3672 The meanings of the letters are machine-dependent and controlled
3673 by TARGET_PRINT_OPERAND. */
3675 void
3676 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3678 if (x && GET_CODE (x) == SUBREG)
3679 x = alter_subreg (&x, true);
3681 /* X must not be a pseudo reg. */
3682 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3684 targetm.asm_out.print_operand (asm_out_file, x, code);
3686 if (x == NULL_RTX)
3687 return;
3689 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3692 /* Print a memory reference operand for address X using
3693 machine-dependent assembler syntax. */
3695 void
3696 output_address (rtx x)
3698 bool changed = false;
3699 walk_alter_subreg (&x, &changed);
3700 targetm.asm_out.print_operand_address (asm_out_file, x);
3703 /* Print an integer constant expression in assembler syntax.
3704 Addition and subtraction are the only arithmetic
3705 that may appear in these expressions. */
3707 void
3708 output_addr_const (FILE *file, rtx x)
3710 char buf[256];
3712 restart:
3713 switch (GET_CODE (x))
3715 case PC:
3716 putc ('.', file);
3717 break;
3719 case SYMBOL_REF:
3720 if (SYMBOL_REF_DECL (x))
3721 assemble_external (SYMBOL_REF_DECL (x));
3722 #ifdef ASM_OUTPUT_SYMBOL_REF
3723 ASM_OUTPUT_SYMBOL_REF (file, x);
3724 #else
3725 assemble_name (file, XSTR (x, 0));
3726 #endif
3727 break;
3729 case LABEL_REF:
3730 x = XEXP (x, 0);
3731 /* Fall through. */
3732 case CODE_LABEL:
3733 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3734 #ifdef ASM_OUTPUT_LABEL_REF
3735 ASM_OUTPUT_LABEL_REF (file, buf);
3736 #else
3737 assemble_name (file, buf);
3738 #endif
3739 break;
3741 case CONST_INT:
3742 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3743 break;
3745 case CONST:
3746 /* This used to output parentheses around the expression,
3747 but that does not work on the 386 (either ATT or BSD assembler). */
3748 output_addr_const (file, XEXP (x, 0));
3749 break;
3751 case CONST_DOUBLE:
3752 if (GET_MODE (x) == VOIDmode)
3754 /* We can use %d if the number is one word and positive. */
3755 if (CONST_DOUBLE_HIGH (x))
3756 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3757 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3758 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3759 else if (CONST_DOUBLE_LOW (x) < 0)
3760 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3761 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3762 else
3763 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3765 else
3766 /* We can't handle floating point constants;
3767 PRINT_OPERAND must handle them. */
3768 output_operand_lossage ("floating constant misused");
3769 break;
3771 case CONST_FIXED:
3772 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3773 break;
3775 case PLUS:
3776 /* Some assemblers need integer constants to appear last (eg masm). */
3777 if (CONST_INT_P (XEXP (x, 0)))
3779 output_addr_const (file, XEXP (x, 1));
3780 if (INTVAL (XEXP (x, 0)) >= 0)
3781 fprintf (file, "+");
3782 output_addr_const (file, XEXP (x, 0));
3784 else
3786 output_addr_const (file, XEXP (x, 0));
3787 if (!CONST_INT_P (XEXP (x, 1))
3788 || INTVAL (XEXP (x, 1)) >= 0)
3789 fprintf (file, "+");
3790 output_addr_const (file, XEXP (x, 1));
3792 break;
3794 case MINUS:
3795 /* Avoid outputting things like x-x or x+5-x,
3796 since some assemblers can't handle that. */
3797 x = simplify_subtraction (x);
3798 if (GET_CODE (x) != MINUS)
3799 goto restart;
3801 output_addr_const (file, XEXP (x, 0));
3802 fprintf (file, "-");
3803 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3804 || GET_CODE (XEXP (x, 1)) == PC
3805 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3806 output_addr_const (file, XEXP (x, 1));
3807 else
3809 fputs (targetm.asm_out.open_paren, file);
3810 output_addr_const (file, XEXP (x, 1));
3811 fputs (targetm.asm_out.close_paren, file);
3813 break;
3815 case ZERO_EXTEND:
3816 case SIGN_EXTEND:
3817 case SUBREG:
3818 case TRUNCATE:
3819 output_addr_const (file, XEXP (x, 0));
3820 break;
3822 default:
3823 if (targetm.asm_out.output_addr_const_extra (file, x))
3824 break;
3826 output_operand_lossage ("invalid expression as operand");
3830 /* Output a quoted string. */
3832 void
3833 output_quoted_string (FILE *asm_file, const char *string)
3835 #ifdef OUTPUT_QUOTED_STRING
3836 OUTPUT_QUOTED_STRING (asm_file, string);
3837 #else
3838 char c;
3840 putc ('\"', asm_file);
3841 while ((c = *string++) != 0)
3843 if (ISPRINT (c))
3845 if (c == '\"' || c == '\\')
3846 putc ('\\', asm_file);
3847 putc (c, asm_file);
3849 else
3850 fprintf (asm_file, "\\%03o", (unsigned char) c);
3852 putc ('\"', asm_file);
3853 #endif
3856 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3858 void
3859 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3861 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3862 if (value == 0)
3863 putc ('0', f);
3864 else
3866 char *p = buf + sizeof (buf);
3868 *--p = "0123456789abcdef"[value % 16];
3869 while ((value /= 16) != 0);
3870 *--p = 'x';
3871 *--p = '0';
3872 fwrite (p, 1, buf + sizeof (buf) - p, f);
3876 /* Internal function that prints an unsigned long in decimal in reverse.
3877 The output string IS NOT null-terminated. */
3879 static int
3880 sprint_ul_rev (char *s, unsigned long value)
3882 int i = 0;
3885 s[i] = "0123456789"[value % 10];
3886 value /= 10;
3887 i++;
3888 /* alternate version, without modulo */
3889 /* oldval = value; */
3890 /* value /= 10; */
3891 /* s[i] = "0123456789" [oldval - 10*value]; */
3892 /* i++ */
3894 while (value != 0);
3895 return i;
3898 /* Write an unsigned long as decimal to a file, fast. */
3900 void
3901 fprint_ul (FILE *f, unsigned long value)
3903 /* python says: len(str(2**64)) == 20 */
3904 char s[20];
3905 int i;
3907 i = sprint_ul_rev (s, value);
3909 /* It's probably too small to bother with string reversal and fputs. */
3912 i--;
3913 putc (s[i], f);
3915 while (i != 0);
3918 /* Write an unsigned long as decimal to a string, fast.
3919 s must be wide enough to not overflow, at least 21 chars.
3920 Returns the length of the string (without terminating '\0'). */
3923 sprint_ul (char *s, unsigned long value)
3925 int len;
3926 char tmp_c;
3927 int i;
3928 int j;
3930 len = sprint_ul_rev (s, value);
3931 s[len] = '\0';
3933 /* Reverse the string. */
3934 i = 0;
3935 j = len - 1;
3936 while (i < j)
3938 tmp_c = s[i];
3939 s[i] = s[j];
3940 s[j] = tmp_c;
3941 i++; j--;
3944 return len;
3947 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3948 %R prints the value of REGISTER_PREFIX.
3949 %L prints the value of LOCAL_LABEL_PREFIX.
3950 %U prints the value of USER_LABEL_PREFIX.
3951 %I prints the value of IMMEDIATE_PREFIX.
3952 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3953 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3955 We handle alternate assembler dialects here, just like output_asm_insn. */
3957 void
3958 asm_fprintf (FILE *file, const char *p, ...)
3960 char buf[10];
3961 char *q, c;
3962 #ifdef ASSEMBLER_DIALECT
3963 int dialect = 0;
3964 #endif
3965 va_list argptr;
3967 va_start (argptr, p);
3969 buf[0] = '%';
3971 while ((c = *p++))
3972 switch (c)
3974 #ifdef ASSEMBLER_DIALECT
3975 case '{':
3976 case '}':
3977 case '|':
3978 p = do_assembler_dialects (p, &dialect);
3979 break;
3980 #endif
3982 case '%':
3983 c = *p++;
3984 q = &buf[1];
3985 while (strchr ("-+ #0", c))
3987 *q++ = c;
3988 c = *p++;
3990 while (ISDIGIT (c) || c == '.')
3992 *q++ = c;
3993 c = *p++;
3995 switch (c)
3997 case '%':
3998 putc ('%', file);
3999 break;
4001 case 'd': case 'i': case 'u':
4002 case 'x': case 'X': case 'o':
4003 case 'c':
4004 *q++ = c;
4005 *q = 0;
4006 fprintf (file, buf, va_arg (argptr, int));
4007 break;
4009 case 'w':
4010 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4011 'o' cases, but we do not check for those cases. It
4012 means that the value is a HOST_WIDE_INT, which may be
4013 either `long' or `long long'. */
4014 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4015 q += strlen (HOST_WIDE_INT_PRINT);
4016 *q++ = *p++;
4017 *q = 0;
4018 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4019 break;
4021 case 'l':
4022 *q++ = c;
4023 #ifdef HAVE_LONG_LONG
4024 if (*p == 'l')
4026 *q++ = *p++;
4027 *q++ = *p++;
4028 *q = 0;
4029 fprintf (file, buf, va_arg (argptr, long long));
4031 else
4032 #endif
4034 *q++ = *p++;
4035 *q = 0;
4036 fprintf (file, buf, va_arg (argptr, long));
4039 break;
4041 case 's':
4042 *q++ = c;
4043 *q = 0;
4044 fprintf (file, buf, va_arg (argptr, char *));
4045 break;
4047 case 'O':
4048 #ifdef ASM_OUTPUT_OPCODE
4049 ASM_OUTPUT_OPCODE (asm_out_file, p);
4050 #endif
4051 break;
4053 case 'R':
4054 #ifdef REGISTER_PREFIX
4055 fprintf (file, "%s", REGISTER_PREFIX);
4056 #endif
4057 break;
4059 case 'I':
4060 #ifdef IMMEDIATE_PREFIX
4061 fprintf (file, "%s", IMMEDIATE_PREFIX);
4062 #endif
4063 break;
4065 case 'L':
4066 #ifdef LOCAL_LABEL_PREFIX
4067 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4068 #endif
4069 break;
4071 case 'U':
4072 fputs (user_label_prefix, file);
4073 break;
4075 #ifdef ASM_FPRINTF_EXTENSIONS
4076 /* Uppercase letters are reserved for general use by asm_fprintf
4077 and so are not available to target specific code. In order to
4078 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4079 they are defined here. As they get turned into real extensions
4080 to asm_fprintf they should be removed from this list. */
4081 case 'A': case 'B': case 'C': case 'D': case 'E':
4082 case 'F': case 'G': case 'H': case 'J': case 'K':
4083 case 'M': case 'N': case 'P': case 'Q': case 'S':
4084 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4085 break;
4087 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4088 #endif
4089 default:
4090 gcc_unreachable ();
4092 break;
4094 default:
4095 putc (c, file);
4097 va_end (argptr);
4100 /* Return nonzero if this function has no function calls. */
4103 leaf_function_p (void)
4105 rtx insn;
4107 if (crtl->profile || profile_arc_flag)
4108 return 0;
4110 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4112 if (CALL_P (insn)
4113 && ! SIBLING_CALL_P (insn))
4114 return 0;
4115 if (NONJUMP_INSN_P (insn)
4116 && GET_CODE (PATTERN (insn)) == SEQUENCE
4117 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4118 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4119 return 0;
4122 return 1;
4125 /* Return 1 if branch is a forward branch.
4126 Uses insn_shuid array, so it works only in the final pass. May be used by
4127 output templates to customary add branch prediction hints.
4130 final_forward_branch_p (rtx insn)
4132 int insn_id, label_id;
4134 gcc_assert (uid_shuid);
4135 insn_id = INSN_SHUID (insn);
4136 label_id = INSN_SHUID (JUMP_LABEL (insn));
4137 /* We've hit some insns that does not have id information available. */
4138 gcc_assert (insn_id && label_id);
4139 return insn_id < label_id;
4142 /* On some machines, a function with no call insns
4143 can run faster if it doesn't create its own register window.
4144 When output, the leaf function should use only the "output"
4145 registers. Ordinarily, the function would be compiled to use
4146 the "input" registers to find its arguments; it is a candidate
4147 for leaf treatment if it uses only the "input" registers.
4148 Leaf function treatment means renumbering so the function
4149 uses the "output" registers instead. */
4151 #ifdef LEAF_REGISTERS
4153 /* Return 1 if this function uses only the registers that can be
4154 safely renumbered. */
4157 only_leaf_regs_used (void)
4159 int i;
4160 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4162 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4163 if ((df_regs_ever_live_p (i) || global_regs[i])
4164 && ! permitted_reg_in_leaf_functions[i])
4165 return 0;
4167 if (crtl->uses_pic_offset_table
4168 && pic_offset_table_rtx != 0
4169 && REG_P (pic_offset_table_rtx)
4170 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4171 return 0;
4173 return 1;
4176 /* Scan all instructions and renumber all registers into those
4177 available in leaf functions. */
4179 static void
4180 leaf_renumber_regs (rtx first)
4182 rtx insn;
4184 /* Renumber only the actual patterns.
4185 The reg-notes can contain frame pointer refs,
4186 and renumbering them could crash, and should not be needed. */
4187 for (insn = first; insn; insn = NEXT_INSN (insn))
4188 if (INSN_P (insn))
4189 leaf_renumber_regs_insn (PATTERN (insn));
4192 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4193 available in leaf functions. */
4195 void
4196 leaf_renumber_regs_insn (rtx in_rtx)
4198 int i, j;
4199 const char *format_ptr;
4201 if (in_rtx == 0)
4202 return;
4204 /* Renumber all input-registers into output-registers.
4205 renumbered_regs would be 1 for an output-register;
4206 they */
4208 if (REG_P (in_rtx))
4210 int newreg;
4212 /* Don't renumber the same reg twice. */
4213 if (in_rtx->used)
4214 return;
4216 newreg = REGNO (in_rtx);
4217 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4218 to reach here as part of a REG_NOTE. */
4219 if (newreg >= FIRST_PSEUDO_REGISTER)
4221 in_rtx->used = 1;
4222 return;
4224 newreg = LEAF_REG_REMAP (newreg);
4225 gcc_assert (newreg >= 0);
4226 df_set_regs_ever_live (REGNO (in_rtx), false);
4227 df_set_regs_ever_live (newreg, true);
4228 SET_REGNO (in_rtx, newreg);
4229 in_rtx->used = 1;
4232 if (INSN_P (in_rtx))
4234 /* Inside a SEQUENCE, we find insns.
4235 Renumber just the patterns of these insns,
4236 just as we do for the top-level insns. */
4237 leaf_renumber_regs_insn (PATTERN (in_rtx));
4238 return;
4241 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4243 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4244 switch (*format_ptr++)
4246 case 'e':
4247 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4248 break;
4250 case 'E':
4251 if (NULL != XVEC (in_rtx, i))
4253 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4254 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4256 break;
4258 case 'S':
4259 case 's':
4260 case '0':
4261 case 'i':
4262 case 'w':
4263 case 'n':
4264 case 'u':
4265 break;
4267 default:
4268 gcc_unreachable ();
4271 #endif
4273 /* Turn the RTL into assembly. */
4274 static unsigned int
4275 rest_of_handle_final (void)
4277 rtx x;
4278 const char *fnname;
4280 /* Get the function's name, as described by its RTL. This may be
4281 different from the DECL_NAME name used in the source file. */
4283 x = DECL_RTL (current_function_decl);
4284 gcc_assert (MEM_P (x));
4285 x = XEXP (x, 0);
4286 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4287 fnname = XSTR (x, 0);
4289 assemble_start_function (current_function_decl, fnname);
4290 final_start_function (get_insns (), asm_out_file, optimize);
4291 final (get_insns (), asm_out_file, optimize);
4292 final_end_function ();
4294 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4295 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4296 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4297 output_function_exception_table (fnname);
4299 assemble_end_function (current_function_decl, fnname);
4301 user_defined_section_attribute = false;
4303 /* Free up reg info memory. */
4304 free_reg_info ();
4306 if (! quiet_flag)
4307 fflush (asm_out_file);
4309 /* Write DBX symbols if requested. */
4311 /* Note that for those inline functions where we don't initially
4312 know for certain that we will be generating an out-of-line copy,
4313 the first invocation of this routine (rest_of_compilation) will
4314 skip over this code by doing a `goto exit_rest_of_compilation;'.
4315 Later on, wrapup_global_declarations will (indirectly) call
4316 rest_of_compilation again for those inline functions that need
4317 to have out-of-line copies generated. During that call, we
4318 *will* be routed past here. */
4320 timevar_push (TV_SYMOUT);
4321 if (!DECL_IGNORED_P (current_function_decl))
4322 debug_hooks->function_decl (current_function_decl);
4323 timevar_pop (TV_SYMOUT);
4325 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4326 DECL_INITIAL (current_function_decl) = error_mark_node;
4328 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4329 && targetm.have_ctors_dtors)
4330 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4331 decl_init_priority_lookup
4332 (current_function_decl));
4333 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4334 && targetm.have_ctors_dtors)
4335 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4336 decl_fini_priority_lookup
4337 (current_function_decl));
4338 return 0;
4341 struct rtl_opt_pass pass_final =
4344 RTL_PASS,
4345 "final", /* name */
4346 OPTGROUP_NONE, /* optinfo_flags */
4347 NULL, /* gate */
4348 rest_of_handle_final, /* execute */
4349 NULL, /* sub */
4350 NULL, /* next */
4351 0, /* static_pass_number */
4352 TV_FINAL, /* tv_id */
4353 0, /* properties_required */
4354 0, /* properties_provided */
4355 0, /* properties_destroyed */
4356 0, /* todo_flags_start */
4357 TODO_ggc_collect /* todo_flags_finish */
4362 static unsigned int
4363 rest_of_handle_shorten_branches (void)
4365 /* Shorten branches. */
4366 shorten_branches (get_insns ());
4367 return 0;
4370 struct rtl_opt_pass pass_shorten_branches =
4373 RTL_PASS,
4374 "shorten", /* name */
4375 OPTGROUP_NONE, /* optinfo_flags */
4376 NULL, /* gate */
4377 rest_of_handle_shorten_branches, /* execute */
4378 NULL, /* sub */
4379 NULL, /* next */
4380 0, /* static_pass_number */
4381 TV_SHORTEN_BRANCH, /* tv_id */
4382 0, /* properties_required */
4383 0, /* properties_provided */
4384 0, /* properties_destroyed */
4385 0, /* todo_flags_start */
4386 0 /* todo_flags_finish */
4391 static unsigned int
4392 rest_of_clean_state (void)
4394 rtx insn, next;
4395 FILE *final_output = NULL;
4396 int save_unnumbered = flag_dump_unnumbered;
4397 int save_noaddr = flag_dump_noaddr;
4399 if (flag_dump_final_insns)
4401 final_output = fopen (flag_dump_final_insns, "a");
4402 if (!final_output)
4404 error ("could not open final insn dump file %qs: %m",
4405 flag_dump_final_insns);
4406 flag_dump_final_insns = NULL;
4408 else
4410 flag_dump_noaddr = flag_dump_unnumbered = 1;
4411 if (flag_compare_debug_opt || flag_compare_debug)
4412 dump_flags |= TDF_NOUID;
4413 dump_function_header (final_output, current_function_decl,
4414 dump_flags);
4415 final_insns_dump_p = true;
4417 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4418 if (LABEL_P (insn))
4419 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4420 else
4422 if (NOTE_P (insn))
4423 set_block_for_insn (insn, NULL);
4424 INSN_UID (insn) = 0;
4429 /* It is very important to decompose the RTL instruction chain here:
4430 debug information keeps pointing into CODE_LABEL insns inside the function
4431 body. If these remain pointing to the other insns, we end up preserving
4432 whole RTL chain and attached detailed debug info in memory. */
4433 for (insn = get_insns (); insn; insn = next)
4435 next = NEXT_INSN (insn);
4436 NEXT_INSN (insn) = NULL;
4437 PREV_INSN (insn) = NULL;
4439 if (final_output
4440 && (!NOTE_P (insn) ||
4441 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4442 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4443 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4444 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4445 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4446 print_rtl_single (final_output, insn);
4449 if (final_output)
4451 flag_dump_noaddr = save_noaddr;
4452 flag_dump_unnumbered = save_unnumbered;
4453 final_insns_dump_p = false;
4455 if (fclose (final_output))
4457 error ("could not close final insn dump file %qs: %m",
4458 flag_dump_final_insns);
4459 flag_dump_final_insns = NULL;
4463 /* In case the function was not output,
4464 don't leave any temporary anonymous types
4465 queued up for sdb output. */
4466 #ifdef SDB_DEBUGGING_INFO
4467 if (write_symbols == SDB_DEBUG)
4468 sdbout_types (NULL_TREE);
4469 #endif
4471 flag_rerun_cse_after_global_opts = 0;
4472 reload_completed = 0;
4473 epilogue_completed = 0;
4474 #ifdef STACK_REGS
4475 regstack_completed = 0;
4476 #endif
4478 /* Clear out the insn_length contents now that they are no
4479 longer valid. */
4480 init_insn_lengths ();
4482 /* Show no temporary slots allocated. */
4483 init_temp_slots ();
4485 free_bb_for_insn ();
4487 delete_tree_ssa ();
4489 /* We can reduce stack alignment on call site only when we are sure that
4490 the function body just produced will be actually used in the final
4491 executable. */
4492 if (decl_binds_to_current_def_p (current_function_decl))
4494 unsigned int pref = crtl->preferred_stack_boundary;
4495 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4496 pref = crtl->stack_alignment_needed;
4497 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4498 = pref;
4501 /* Make sure volatile mem refs aren't considered valid operands for
4502 arithmetic insns. We must call this here if this is a nested inline
4503 function, since the above code leaves us in the init_recog state,
4504 and the function context push/pop code does not save/restore volatile_ok.
4506 ??? Maybe it isn't necessary for expand_start_function to call this
4507 anymore if we do it here? */
4509 init_recog_no_volatile ();
4511 /* We're done with this function. Free up memory if we can. */
4512 free_after_parsing (cfun);
4513 free_after_compilation (cfun);
4514 return 0;
4517 struct rtl_opt_pass pass_clean_state =
4520 RTL_PASS,
4521 "*clean_state", /* name */
4522 OPTGROUP_NONE, /* optinfo_flags */
4523 NULL, /* gate */
4524 rest_of_clean_state, /* execute */
4525 NULL, /* sub */
4526 NULL, /* next */
4527 0, /* static_pass_number */
4528 TV_FINAL, /* tv_id */
4529 0, /* properties_required */
4530 0, /* properties_provided */
4531 PROP_rtl, /* properties_destroyed */
4532 0, /* todo_flags_start */
4533 0 /* todo_flags_finish */