PR c++/54955 - Fail to parse alignas expr at the beginning of a declaration
[official-gcc.git] / gcc / final.c
blobceb688e5e312e256269bcc82bc890ab640081ef2
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This is the final pass of the compiler.
24 It looks at the rtl code for a function and outputs assembler code.
26 Call `final_start_function' to output the assembler code for function entry,
27 `final' to output assembler code for some RTL code,
28 `final_end_function' to output assembler code for function exit.
29 If a function is compiled in several pieces, each piece is
30 output separately with `final'.
32 Some optimizations are also done at this level.
33 Move instructions that were made unnecessary by good register allocation
34 are detected and omitted from the output. (Though most of these
35 are removed by the last jump pass.)
37 Instructions to set the condition codes are omitted when it can be
38 seen that the condition codes already had the desired values.
40 In some cases it is sufficient if the inherited condition codes
41 have related values, but this may require the following insn
42 (the one that tests the condition codes) to be modified.
44 The code for the function prologue and epilogue are generated
45 directly in assembler by the target functions function_prologue and
46 function_epilogue. Those instructions never exist as rtl. */
48 #include "config.h"
49 #include "system.h"
50 #include "coretypes.h"
51 #include "tm.h"
53 #include "tree.h"
54 #include "rtl.h"
55 #include "tm_p.h"
56 #include "regs.h"
57 #include "insn-config.h"
58 #include "insn-attr.h"
59 #include "recog.h"
60 #include "conditions.h"
61 #include "flags.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "rtl-error.h"
67 #include "toplev.h" /* exact_log2, floor_log2 */
68 #include "reload.h"
69 #include "intl.h"
70 #include "basic-block.h"
71 #include "target.h"
72 #include "targhooks.h"
73 #include "debug.h"
74 #include "expr.h"
75 #include "tree-pass.h"
76 #include "tree-flow.h"
77 #include "cgraph.h"
78 #include "coverage.h"
79 #include "df.h"
80 #include "vecprim.h"
81 #include "ggc.h"
82 #include "cfgloop.h"
83 #include "params.h"
84 #include "tree-pretty-print.h" /* for dump_function_header */
86 #ifdef XCOFF_DEBUGGING_INFO
87 #include "xcoffout.h" /* Needed for external data
88 declarations for e.g. AIX 4.x. */
89 #endif
91 #include "dwarf2out.h"
93 #ifdef DBX_DEBUGGING_INFO
94 #include "dbxout.h"
95 #endif
97 #ifdef SDB_DEBUGGING_INFO
98 #include "sdbout.h"
99 #endif
101 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
102 So define a null default for it to save conditionalization later. */
103 #ifndef CC_STATUS_INIT
104 #define CC_STATUS_INIT
105 #endif
107 /* Is the given character a logical line separator for the assembler? */
108 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
109 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
110 #endif
112 #ifndef JUMP_TABLES_IN_TEXT_SECTION
113 #define JUMP_TABLES_IN_TEXT_SECTION 0
114 #endif
116 /* Bitflags used by final_scan_insn. */
117 #define SEEN_BB 1
118 #define SEEN_NOTE 2
119 #define SEEN_EMITTED 4
121 /* Last insn processed by final_scan_insn. */
122 static rtx debug_insn;
123 rtx current_output_insn;
125 /* Line number of last NOTE. */
126 static int last_linenum;
128 /* Last discriminator written to assembly. */
129 static int last_discriminator;
131 /* Discriminator of current block. */
132 static int discriminator;
134 /* Highest line number in current block. */
135 static int high_block_linenum;
137 /* Likewise for function. */
138 static int high_function_linenum;
140 /* Filename of last NOTE. */
141 static const char *last_filename;
143 /* Override filename and line number. */
144 static const char *override_filename;
145 static int override_linenum;
147 /* Whether to force emission of a line note before the next insn. */
148 static bool force_source_line = false;
150 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
152 /* Nonzero while outputting an `asm' with operands.
153 This means that inconsistencies are the user's fault, so don't die.
154 The precise value is the insn being output, to pass to error_for_asm. */
155 rtx this_is_asm_operands;
157 /* Number of operands of this insn, for an `asm' with operands. */
158 static unsigned int insn_noperands;
160 /* Compare optimization flag. */
162 static rtx last_ignored_compare = 0;
164 /* Assign a unique number to each insn that is output.
165 This can be used to generate unique local labels. */
167 static int insn_counter = 0;
169 #ifdef HAVE_cc0
170 /* This variable contains machine-dependent flags (defined in tm.h)
171 set and examined by output routines
172 that describe how to interpret the condition codes properly. */
174 CC_STATUS cc_status;
176 /* During output of an insn, this contains a copy of cc_status
177 from before the insn. */
179 CC_STATUS cc_prev_status;
180 #endif
182 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
184 static int block_depth;
186 /* Nonzero if have enabled APP processing of our assembler output. */
188 static int app_on;
190 /* If we are outputting an insn sequence, this contains the sequence rtx.
191 Zero otherwise. */
193 rtx final_sequence;
195 #ifdef ASSEMBLER_DIALECT
197 /* Number of the assembler dialect to use, starting at 0. */
198 static int dialect_number;
199 #endif
201 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
202 rtx current_insn_predicate;
204 /* True if printing into -fdump-final-insns= dump. */
205 bool final_insns_dump_p;
207 #ifdef HAVE_ATTR_length
208 static int asm_insn_count (rtx);
209 #endif
210 static void profile_function (FILE *);
211 static void profile_after_prologue (FILE *);
212 static bool notice_source_line (rtx, bool *);
213 static rtx walk_alter_subreg (rtx *, bool *);
214 static void output_asm_name (void);
215 static void output_alternate_entry_point (FILE *, rtx);
216 static tree get_mem_expr_from_op (rtx, int *);
217 static void output_asm_operand_names (rtx *, int *, int);
218 #ifdef LEAF_REGISTERS
219 static void leaf_renumber_regs (rtx);
220 #endif
221 #ifdef HAVE_cc0
222 static int alter_cond (rtx);
223 #endif
224 #ifndef ADDR_VEC_ALIGN
225 static int final_addr_vec_align (rtx);
226 #endif
227 #ifdef HAVE_ATTR_length
228 static int align_fuzz (rtx, rtx, int, unsigned);
229 #endif
231 /* Initialize data in final at the beginning of a compilation. */
233 void
234 init_final (const char *filename ATTRIBUTE_UNUSED)
236 app_on = 0;
237 final_sequence = 0;
239 #ifdef ASSEMBLER_DIALECT
240 dialect_number = ASSEMBLER_DIALECT;
241 #endif
244 /* Default target function prologue and epilogue assembler output.
246 If not overridden for epilogue code, then the function body itself
247 contains return instructions wherever needed. */
248 void
249 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
250 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
254 void
255 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
256 tree decl ATTRIBUTE_UNUSED,
257 bool new_is_cold ATTRIBUTE_UNUSED)
261 /* Default target hook that outputs nothing to a stream. */
262 void
263 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
267 /* Enable APP processing of subsequent output.
268 Used before the output from an `asm' statement. */
270 void
271 app_enable (void)
273 if (! app_on)
275 fputs (ASM_APP_ON, asm_out_file);
276 app_on = 1;
280 /* Disable APP processing of subsequent output.
281 Called from varasm.c before most kinds of output. */
283 void
284 app_disable (void)
286 if (app_on)
288 fputs (ASM_APP_OFF, asm_out_file);
289 app_on = 0;
293 /* Return the number of slots filled in the current
294 delayed branch sequence (we don't count the insn needing the
295 delay slot). Zero if not in a delayed branch sequence. */
297 #ifdef DELAY_SLOTS
299 dbr_sequence_length (void)
301 if (final_sequence != 0)
302 return XVECLEN (final_sequence, 0) - 1;
303 else
304 return 0;
306 #endif
308 /* The next two pages contain routines used to compute the length of an insn
309 and to shorten branches. */
311 /* Arrays for insn lengths, and addresses. The latter is referenced by
312 `insn_current_length'. */
314 static int *insn_lengths;
316 VEC(int,heap) *insn_addresses_;
318 /* Max uid for which the above arrays are valid. */
319 static int insn_lengths_max_uid;
321 /* Address of insn being processed. Used by `insn_current_length'. */
322 int insn_current_address;
324 /* Address of insn being processed in previous iteration. */
325 int insn_last_address;
327 /* known invariant alignment of insn being processed. */
328 int insn_current_align;
330 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
331 gives the next following alignment insn that increases the known
332 alignment, or NULL_RTX if there is no such insn.
333 For any alignment obtained this way, we can again index uid_align with
334 its uid to obtain the next following align that in turn increases the
335 alignment, till we reach NULL_RTX; the sequence obtained this way
336 for each insn we'll call the alignment chain of this insn in the following
337 comments. */
339 struct label_alignment
341 short alignment;
342 short max_skip;
345 static rtx *uid_align;
346 static int *uid_shuid;
347 static struct label_alignment *label_align;
349 /* Indicate that branch shortening hasn't yet been done. */
351 void
352 init_insn_lengths (void)
354 if (uid_shuid)
356 free (uid_shuid);
357 uid_shuid = 0;
359 if (insn_lengths)
361 free (insn_lengths);
362 insn_lengths = 0;
363 insn_lengths_max_uid = 0;
365 #ifdef HAVE_ATTR_length
366 INSN_ADDRESSES_FREE ();
367 #endif
368 if (uid_align)
370 free (uid_align);
371 uid_align = 0;
375 /* Obtain the current length of an insn. If branch shortening has been done,
376 get its actual length. Otherwise, use FALLBACK_FN to calculate the
377 length. */
378 static inline int
379 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
380 int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
382 #ifdef HAVE_ATTR_length
383 rtx body;
384 int i;
385 int length = 0;
387 if (insn_lengths_max_uid > INSN_UID (insn))
388 return insn_lengths[INSN_UID (insn)];
389 else
390 switch (GET_CODE (insn))
392 case NOTE:
393 case BARRIER:
394 case CODE_LABEL:
395 case DEBUG_INSN:
396 return 0;
398 case CALL_INSN:
399 length = fallback_fn (insn);
400 break;
402 case JUMP_INSN:
403 body = PATTERN (insn);
404 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
406 /* Alignment is machine-dependent and should be handled by
407 ADDR_VEC_ALIGN. */
409 else
410 length = fallback_fn (insn);
411 break;
413 case INSN:
414 body = PATTERN (insn);
415 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
416 return 0;
418 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
419 length = asm_insn_count (body) * fallback_fn (insn);
420 else if (GET_CODE (body) == SEQUENCE)
421 for (i = 0; i < XVECLEN (body, 0); i++)
422 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
423 else
424 length = fallback_fn (insn);
425 break;
427 default:
428 break;
431 #ifdef ADJUST_INSN_LENGTH
432 ADJUST_INSN_LENGTH (insn, length);
433 #endif
434 return length;
435 #else /* not HAVE_ATTR_length */
436 return 0;
437 #define insn_default_length 0
438 #define insn_min_length 0
439 #endif /* not HAVE_ATTR_length */
442 /* Obtain the current length of an insn. If branch shortening has been done,
443 get its actual length. Otherwise, get its maximum length. */
445 get_attr_length (rtx insn)
447 return get_attr_length_1 (insn, insn_default_length);
450 /* Obtain the current length of an insn. If branch shortening has been done,
451 get its actual length. Otherwise, get its minimum length. */
453 get_attr_min_length (rtx insn)
455 return get_attr_length_1 (insn, insn_min_length);
458 /* Code to handle alignment inside shorten_branches. */
460 /* Here is an explanation how the algorithm in align_fuzz can give
461 proper results:
463 Call a sequence of instructions beginning with alignment point X
464 and continuing until the next alignment point `block X'. When `X'
465 is used in an expression, it means the alignment value of the
466 alignment point.
468 Call the distance between the start of the first insn of block X, and
469 the end of the last insn of block X `IX', for the `inner size of X'.
470 This is clearly the sum of the instruction lengths.
472 Likewise with the next alignment-delimited block following X, which we
473 shall call block Y.
475 Call the distance between the start of the first insn of block X, and
476 the start of the first insn of block Y `OX', for the `outer size of X'.
478 The estimated padding is then OX - IX.
480 OX can be safely estimated as
482 if (X >= Y)
483 OX = round_up(IX, Y)
484 else
485 OX = round_up(IX, X) + Y - X
487 Clearly est(IX) >= real(IX), because that only depends on the
488 instruction lengths, and those being overestimated is a given.
490 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
491 we needn't worry about that when thinking about OX.
493 When X >= Y, the alignment provided by Y adds no uncertainty factor
494 for branch ranges starting before X, so we can just round what we have.
495 But when X < Y, we don't know anything about the, so to speak,
496 `middle bits', so we have to assume the worst when aligning up from an
497 address mod X to one mod Y, which is Y - X. */
499 #ifndef LABEL_ALIGN
500 #define LABEL_ALIGN(LABEL) align_labels_log
501 #endif
503 #ifndef LOOP_ALIGN
504 #define LOOP_ALIGN(LABEL) align_loops_log
505 #endif
507 #ifndef LABEL_ALIGN_AFTER_BARRIER
508 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
509 #endif
511 #ifndef JUMP_ALIGN
512 #define JUMP_ALIGN(LABEL) align_jumps_log
513 #endif
516 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
518 return 0;
522 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
524 return align_loops_max_skip;
528 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
530 return align_labels_max_skip;
534 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
536 return align_jumps_max_skip;
539 #ifndef ADDR_VEC_ALIGN
540 static int
541 final_addr_vec_align (rtx addr_vec)
543 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
545 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
546 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 return exact_log2 (align);
551 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
552 #endif
554 #ifndef INSN_LENGTH_ALIGNMENT
555 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
556 #endif
558 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
560 static int min_labelno, max_labelno;
562 #define LABEL_TO_ALIGNMENT(LABEL) \
563 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
565 #define LABEL_TO_MAX_SKIP(LABEL) \
566 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
568 /* For the benefit of port specific code do this also as a function. */
571 label_to_alignment (rtx label)
573 if (CODE_LABEL_NUMBER (label) <= max_labelno)
574 return LABEL_TO_ALIGNMENT (label);
575 return 0;
579 label_to_max_skip (rtx label)
581 if (CODE_LABEL_NUMBER (label) <= max_labelno)
582 return LABEL_TO_MAX_SKIP (label);
583 return 0;
586 #ifdef HAVE_ATTR_length
587 /* The differences in addresses
588 between a branch and its target might grow or shrink depending on
589 the alignment the start insn of the range (the branch for a forward
590 branch or the label for a backward branch) starts out on; if these
591 differences are used naively, they can even oscillate infinitely.
592 We therefore want to compute a 'worst case' address difference that
593 is independent of the alignment the start insn of the range end
594 up on, and that is at least as large as the actual difference.
595 The function align_fuzz calculates the amount we have to add to the
596 naively computed difference, by traversing the part of the alignment
597 chain of the start insn of the range that is in front of the end insn
598 of the range, and considering for each alignment the maximum amount
599 that it might contribute to a size increase.
601 For casesi tables, we also want to know worst case minimum amounts of
602 address difference, in case a machine description wants to introduce
603 some common offset that is added to all offsets in a table.
604 For this purpose, align_fuzz with a growth argument of 0 computes the
605 appropriate adjustment. */
607 /* Compute the maximum delta by which the difference of the addresses of
608 START and END might grow / shrink due to a different address for start
609 which changes the size of alignment insns between START and END.
610 KNOWN_ALIGN_LOG is the alignment known for START.
611 GROWTH should be ~0 if the objective is to compute potential code size
612 increase, and 0 if the objective is to compute potential shrink.
613 The return value is undefined for any other value of GROWTH. */
615 static int
616 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
618 int uid = INSN_UID (start);
619 rtx align_label;
620 int known_align = 1 << known_align_log;
621 int end_shuid = INSN_SHUID (end);
622 int fuzz = 0;
624 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
626 int align_addr, new_align;
628 uid = INSN_UID (align_label);
629 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
630 if (uid_shuid[uid] > end_shuid)
631 break;
632 known_align_log = LABEL_TO_ALIGNMENT (align_label);
633 new_align = 1 << known_align_log;
634 if (new_align < known_align)
635 continue;
636 fuzz += (-align_addr ^ growth) & (new_align - known_align);
637 known_align = new_align;
639 return fuzz;
642 /* Compute a worst-case reference address of a branch so that it
643 can be safely used in the presence of aligned labels. Since the
644 size of the branch itself is unknown, the size of the branch is
645 not included in the range. I.e. for a forward branch, the reference
646 address is the end address of the branch as known from the previous
647 branch shortening pass, minus a value to account for possible size
648 increase due to alignment. For a backward branch, it is the start
649 address of the branch as known from the current pass, plus a value
650 to account for possible size increase due to alignment.
651 NB.: Therefore, the maximum offset allowed for backward branches needs
652 to exclude the branch size. */
655 insn_current_reference_address (rtx branch)
657 rtx dest, seq;
658 int seq_uid;
660 if (! INSN_ADDRESSES_SET_P ())
661 return 0;
663 seq = NEXT_INSN (PREV_INSN (branch));
664 seq_uid = INSN_UID (seq);
665 if (!JUMP_P (branch))
666 /* This can happen for example on the PA; the objective is to know the
667 offset to address something in front of the start of the function.
668 Thus, we can treat it like a backward branch.
669 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
670 any alignment we'd encounter, so we skip the call to align_fuzz. */
671 return insn_current_address;
672 dest = JUMP_LABEL (branch);
674 /* BRANCH has no proper alignment chain set, so use SEQ.
675 BRANCH also has no INSN_SHUID. */
676 if (INSN_SHUID (seq) < INSN_SHUID (dest))
678 /* Forward branch. */
679 return (insn_last_address + insn_lengths[seq_uid]
680 - align_fuzz (seq, dest, length_unit_log, ~0));
682 else
684 /* Backward branch. */
685 return (insn_current_address
686 + align_fuzz (dest, seq, length_unit_log, ~0));
689 #endif /* HAVE_ATTR_length */
691 /* Compute branch alignments based on frequency information in the
692 CFG. */
694 unsigned int
695 compute_alignments (void)
697 int log, max_skip, max_log;
698 basic_block bb;
699 int freq_max = 0;
700 int freq_threshold = 0;
702 if (label_align)
704 free (label_align);
705 label_align = 0;
708 max_labelno = max_label_num ();
709 min_labelno = get_first_label_num ();
710 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
712 /* If not optimizing or optimizing for size, don't assign any alignments. */
713 if (! optimize || optimize_function_for_size_p (cfun))
714 return 0;
716 if (dump_file)
718 dump_reg_info (dump_file);
719 dump_flow_info (dump_file, TDF_DETAILS);
720 flow_loops_dump (dump_file, NULL, 1);
722 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
723 FOR_EACH_BB (bb)
724 if (bb->frequency > freq_max)
725 freq_max = bb->frequency;
726 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
728 if (dump_file)
729 fprintf(dump_file, "freq_max: %i\n",freq_max);
730 FOR_EACH_BB (bb)
732 rtx label = BB_HEAD (bb);
733 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
734 edge e;
735 edge_iterator ei;
737 if (!LABEL_P (label)
738 || optimize_bb_for_size_p (bb))
740 if (dump_file)
741 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
742 bb->index, bb->frequency, bb->loop_father->num,
743 bb_loop_depth (bb));
744 continue;
746 max_log = LABEL_ALIGN (label);
747 max_skip = targetm.asm_out.label_align_max_skip (label);
749 FOR_EACH_EDGE (e, ei, bb->preds)
751 if (e->flags & EDGE_FALLTHRU)
752 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
753 else
754 branch_frequency += EDGE_FREQUENCY (e);
756 if (dump_file)
758 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
759 bb->index, bb->frequency, bb->loop_father->num,
760 bb_loop_depth (bb),
761 fallthru_frequency, branch_frequency);
762 if (!bb->loop_father->inner && bb->loop_father->num)
763 fprintf (dump_file, " inner_loop");
764 if (bb->loop_father->header == bb)
765 fprintf (dump_file, " loop_header");
766 fprintf (dump_file, "\n");
769 /* There are two purposes to align block with no fallthru incoming edge:
770 1) to avoid fetch stalls when branch destination is near cache boundary
771 2) to improve cache efficiency in case the previous block is not executed
772 (so it does not need to be in the cache).
774 We to catch first case, we align frequently executed blocks.
775 To catch the second, we align blocks that are executed more frequently
776 than the predecessor and the predecessor is likely to not be executed
777 when function is called. */
779 if (!has_fallthru
780 && (branch_frequency > freq_threshold
781 || (bb->frequency > bb->prev_bb->frequency * 10
782 && (bb->prev_bb->frequency
783 <= ENTRY_BLOCK_PTR->frequency / 2))))
785 log = JUMP_ALIGN (label);
786 if (dump_file)
787 fprintf(dump_file, " jump alignment added.\n");
788 if (max_log < log)
790 max_log = log;
791 max_skip = targetm.asm_out.jump_align_max_skip (label);
794 /* In case block is frequent and reached mostly by non-fallthru edge,
795 align it. It is most likely a first block of loop. */
796 if (has_fallthru
797 && optimize_bb_for_speed_p (bb)
798 && branch_frequency + fallthru_frequency > freq_threshold
799 && (branch_frequency
800 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
802 log = LOOP_ALIGN (label);
803 if (dump_file)
804 fprintf(dump_file, " internal loop alignment added.\n");
805 if (max_log < log)
807 max_log = log;
808 max_skip = targetm.asm_out.loop_align_max_skip (label);
811 LABEL_TO_ALIGNMENT (label) = max_log;
812 LABEL_TO_MAX_SKIP (label) = max_skip;
815 loop_optimizer_finalize ();
816 free_dominance_info (CDI_DOMINATORS);
817 return 0;
820 struct rtl_opt_pass pass_compute_alignments =
823 RTL_PASS,
824 "alignments", /* name */
825 NULL, /* gate */
826 compute_alignments, /* execute */
827 NULL, /* sub */
828 NULL, /* next */
829 0, /* static_pass_number */
830 TV_NONE, /* tv_id */
831 0, /* properties_required */
832 0, /* properties_provided */
833 0, /* properties_destroyed */
834 0, /* todo_flags_start */
835 TODO_verify_rtl_sharing
836 | TODO_ggc_collect /* todo_flags_finish */
841 /* Make a pass over all insns and compute their actual lengths by shortening
842 any branches of variable length if possible. */
844 /* shorten_branches might be called multiple times: for example, the SH
845 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
846 In order to do this, it needs proper length information, which it obtains
847 by calling shorten_branches. This cannot be collapsed with
848 shorten_branches itself into a single pass unless we also want to integrate
849 reorg.c, since the branch splitting exposes new instructions with delay
850 slots. */
852 void
853 shorten_branches (rtx first ATTRIBUTE_UNUSED)
855 rtx insn;
856 int max_uid;
857 int i;
858 int max_log;
859 int max_skip;
860 #ifdef HAVE_ATTR_length
861 #define MAX_CODE_ALIGN 16
862 rtx seq;
863 int something_changed = 1;
864 char *varying_length;
865 rtx body;
866 int uid;
867 rtx align_tab[MAX_CODE_ALIGN];
869 #endif
871 /* Compute maximum UID and allocate label_align / uid_shuid. */
872 max_uid = get_max_uid ();
874 /* Free uid_shuid before reallocating it. */
875 free (uid_shuid);
877 uid_shuid = XNEWVEC (int, max_uid);
879 if (max_labelno != max_label_num ())
881 int old = max_labelno;
882 int n_labels;
883 int n_old_labels;
885 max_labelno = max_label_num ();
887 n_labels = max_labelno - min_labelno + 1;
888 n_old_labels = old - min_labelno + 1;
890 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
892 /* Range of labels grows monotonically in the function. Failing here
893 means that the initialization of array got lost. */
894 gcc_assert (n_old_labels <= n_labels);
896 memset (label_align + n_old_labels, 0,
897 (n_labels - n_old_labels) * sizeof (struct label_alignment));
900 /* Initialize label_align and set up uid_shuid to be strictly
901 monotonically rising with insn order. */
902 /* We use max_log here to keep track of the maximum alignment we want to
903 impose on the next CODE_LABEL (or the current one if we are processing
904 the CODE_LABEL itself). */
906 max_log = 0;
907 max_skip = 0;
909 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
911 int log;
913 INSN_SHUID (insn) = i++;
914 if (INSN_P (insn))
915 continue;
917 if (LABEL_P (insn))
919 rtx next;
920 bool next_is_jumptable;
922 /* Merge in alignments computed by compute_alignments. */
923 log = LABEL_TO_ALIGNMENT (insn);
924 if (max_log < log)
926 max_log = log;
927 max_skip = LABEL_TO_MAX_SKIP (insn);
930 next = next_nonnote_insn (insn);
931 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
932 if (!next_is_jumptable)
934 log = LABEL_ALIGN (insn);
935 if (max_log < log)
937 max_log = log;
938 max_skip = targetm.asm_out.label_align_max_skip (insn);
941 /* ADDR_VECs only take room if read-only data goes into the text
942 section. */
943 if ((JUMP_TABLES_IN_TEXT_SECTION
944 || readonly_data_section == text_section)
945 && next_is_jumptable)
947 log = ADDR_VEC_ALIGN (next);
948 if (max_log < log)
950 max_log = log;
951 max_skip = targetm.asm_out.label_align_max_skip (insn);
954 LABEL_TO_ALIGNMENT (insn) = max_log;
955 LABEL_TO_MAX_SKIP (insn) = max_skip;
956 max_log = 0;
957 max_skip = 0;
959 else if (BARRIER_P (insn))
961 rtx label;
963 for (label = insn; label && ! INSN_P (label);
964 label = NEXT_INSN (label))
965 if (LABEL_P (label))
967 log = LABEL_ALIGN_AFTER_BARRIER (insn);
968 if (max_log < log)
970 max_log = log;
971 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
973 break;
977 #ifdef HAVE_ATTR_length
979 /* Allocate the rest of the arrays. */
980 insn_lengths = XNEWVEC (int, max_uid);
981 insn_lengths_max_uid = max_uid;
982 /* Syntax errors can lead to labels being outside of the main insn stream.
983 Initialize insn_addresses, so that we get reproducible results. */
984 INSN_ADDRESSES_ALLOC (max_uid);
986 varying_length = XCNEWVEC (char, max_uid);
988 /* Initialize uid_align. We scan instructions
989 from end to start, and keep in align_tab[n] the last seen insn
990 that does an alignment of at least n+1, i.e. the successor
991 in the alignment chain for an insn that does / has a known
992 alignment of n. */
993 uid_align = XCNEWVEC (rtx, max_uid);
995 for (i = MAX_CODE_ALIGN; --i >= 0;)
996 align_tab[i] = NULL_RTX;
997 seq = get_last_insn ();
998 for (; seq; seq = PREV_INSN (seq))
1000 int uid = INSN_UID (seq);
1001 int log;
1002 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1003 uid_align[uid] = align_tab[0];
1004 if (log)
1006 /* Found an alignment label. */
1007 uid_align[uid] = align_tab[log];
1008 for (i = log - 1; i >= 0; i--)
1009 align_tab[i] = seq;
1013 /* When optimizing, we start assuming minimum length, and keep increasing
1014 lengths as we find the need for this, till nothing changes.
1015 When not optimizing, we start assuming maximum lengths, and
1016 do a single pass to update the lengths. */
1017 bool increasing = optimize != 0;
1019 #ifdef CASE_VECTOR_SHORTEN_MODE
1020 if (optimize)
1022 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1023 label fields. */
1025 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1026 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1027 int rel;
1029 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1031 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1032 int len, i, min, max, insn_shuid;
1033 int min_align;
1034 addr_diff_vec_flags flags;
1036 if (!JUMP_P (insn)
1037 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1038 continue;
1039 pat = PATTERN (insn);
1040 len = XVECLEN (pat, 1);
1041 gcc_assert (len > 0);
1042 min_align = MAX_CODE_ALIGN;
1043 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1045 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1046 int shuid = INSN_SHUID (lab);
1047 if (shuid < min)
1049 min = shuid;
1050 min_lab = lab;
1052 if (shuid > max)
1054 max = shuid;
1055 max_lab = lab;
1057 if (min_align > LABEL_TO_ALIGNMENT (lab))
1058 min_align = LABEL_TO_ALIGNMENT (lab);
1060 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1061 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1062 insn_shuid = INSN_SHUID (insn);
1063 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1064 memset (&flags, 0, sizeof (flags));
1065 flags.min_align = min_align;
1066 flags.base_after_vec = rel > insn_shuid;
1067 flags.min_after_vec = min > insn_shuid;
1068 flags.max_after_vec = max > insn_shuid;
1069 flags.min_after_base = min > rel;
1070 flags.max_after_base = max > rel;
1071 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1073 if (increasing)
1074 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1077 #endif /* CASE_VECTOR_SHORTEN_MODE */
1079 /* Compute initial lengths, addresses, and varying flags for each insn. */
1080 int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
1082 for (insn_current_address = 0, insn = first;
1083 insn != 0;
1084 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1086 uid = INSN_UID (insn);
1088 insn_lengths[uid] = 0;
1090 if (LABEL_P (insn))
1092 int log = LABEL_TO_ALIGNMENT (insn);
1093 if (log)
1095 int align = 1 << log;
1096 int new_address = (insn_current_address + align - 1) & -align;
1097 insn_lengths[uid] = new_address - insn_current_address;
1101 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1103 if (NOTE_P (insn) || BARRIER_P (insn)
1104 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1105 continue;
1106 if (INSN_DELETED_P (insn))
1107 continue;
1109 body = PATTERN (insn);
1110 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1112 /* This only takes room if read-only data goes into the text
1113 section. */
1114 if (JUMP_TABLES_IN_TEXT_SECTION
1115 || readonly_data_section == text_section)
1116 insn_lengths[uid] = (XVECLEN (body,
1117 GET_CODE (body) == ADDR_DIFF_VEC)
1118 * GET_MODE_SIZE (GET_MODE (body)));
1119 /* Alignment is handled by ADDR_VEC_ALIGN. */
1121 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1122 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1123 else if (GET_CODE (body) == SEQUENCE)
1125 int i;
1126 int const_delay_slots;
1127 #ifdef DELAY_SLOTS
1128 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1129 #else
1130 const_delay_slots = 0;
1131 #endif
1132 int (*inner_length_fun) (rtx)
1133 = const_delay_slots ? length_fun : insn_default_length;
1134 /* Inside a delay slot sequence, we do not do any branch shortening
1135 if the shortening could change the number of delay slots
1136 of the branch. */
1137 for (i = 0; i < XVECLEN (body, 0); i++)
1139 rtx inner_insn = XVECEXP (body, 0, i);
1140 int inner_uid = INSN_UID (inner_insn);
1141 int inner_length;
1143 if (GET_CODE (body) == ASM_INPUT
1144 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1145 inner_length = (asm_insn_count (PATTERN (inner_insn))
1146 * insn_default_length (inner_insn));
1147 else
1148 inner_length = inner_length_fun (inner_insn);
1150 insn_lengths[inner_uid] = inner_length;
1151 if (const_delay_slots)
1153 if ((varying_length[inner_uid]
1154 = insn_variable_length_p (inner_insn)) != 0)
1155 varying_length[uid] = 1;
1156 INSN_ADDRESSES (inner_uid) = (insn_current_address
1157 + insn_lengths[uid]);
1159 else
1160 varying_length[inner_uid] = 0;
1161 insn_lengths[uid] += inner_length;
1164 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1166 insn_lengths[uid] = length_fun (insn);
1167 varying_length[uid] = insn_variable_length_p (insn);
1170 /* If needed, do any adjustment. */
1171 #ifdef ADJUST_INSN_LENGTH
1172 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1173 if (insn_lengths[uid] < 0)
1174 fatal_insn ("negative insn length", insn);
1175 #endif
1178 /* Now loop over all the insns finding varying length insns. For each,
1179 get the current insn length. If it has changed, reflect the change.
1180 When nothing changes for a full pass, we are done. */
1182 while (something_changed)
1184 something_changed = 0;
1185 insn_current_align = MAX_CODE_ALIGN - 1;
1186 for (insn_current_address = 0, insn = first;
1187 insn != 0;
1188 insn = NEXT_INSN (insn))
1190 int new_length;
1191 #ifdef ADJUST_INSN_LENGTH
1192 int tmp_length;
1193 #endif
1194 int length_align;
1196 uid = INSN_UID (insn);
1198 if (LABEL_P (insn))
1200 int log = LABEL_TO_ALIGNMENT (insn);
1201 if (log > insn_current_align)
1203 int align = 1 << log;
1204 int new_address= (insn_current_address + align - 1) & -align;
1205 insn_lengths[uid] = new_address - insn_current_address;
1206 insn_current_align = log;
1207 insn_current_address = new_address;
1209 else
1210 insn_lengths[uid] = 0;
1211 INSN_ADDRESSES (uid) = insn_current_address;
1212 continue;
1215 length_align = INSN_LENGTH_ALIGNMENT (insn);
1216 if (length_align < insn_current_align)
1217 insn_current_align = length_align;
1219 insn_last_address = INSN_ADDRESSES (uid);
1220 INSN_ADDRESSES (uid) = insn_current_address;
1222 #ifdef CASE_VECTOR_SHORTEN_MODE
1223 if (optimize && JUMP_P (insn)
1224 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1226 rtx body = PATTERN (insn);
1227 int old_length = insn_lengths[uid];
1228 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1229 rtx min_lab = XEXP (XEXP (body, 2), 0);
1230 rtx max_lab = XEXP (XEXP (body, 3), 0);
1231 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1232 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1233 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1234 rtx prev;
1235 int rel_align = 0;
1236 addr_diff_vec_flags flags;
1237 enum machine_mode vec_mode;
1239 /* Avoid automatic aggregate initialization. */
1240 flags = ADDR_DIFF_VEC_FLAGS (body);
1242 /* Try to find a known alignment for rel_lab. */
1243 for (prev = rel_lab;
1244 prev
1245 && ! insn_lengths[INSN_UID (prev)]
1246 && ! (varying_length[INSN_UID (prev)] & 1);
1247 prev = PREV_INSN (prev))
1248 if (varying_length[INSN_UID (prev)] & 2)
1250 rel_align = LABEL_TO_ALIGNMENT (prev);
1251 break;
1254 /* See the comment on addr_diff_vec_flags in rtl.h for the
1255 meaning of the flags values. base: REL_LAB vec: INSN */
1256 /* Anything after INSN has still addresses from the last
1257 pass; adjust these so that they reflect our current
1258 estimate for this pass. */
1259 if (flags.base_after_vec)
1260 rel_addr += insn_current_address - insn_last_address;
1261 if (flags.min_after_vec)
1262 min_addr += insn_current_address - insn_last_address;
1263 if (flags.max_after_vec)
1264 max_addr += insn_current_address - insn_last_address;
1265 /* We want to know the worst case, i.e. lowest possible value
1266 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1267 its offset is positive, and we have to be wary of code shrink;
1268 otherwise, it is negative, and we have to be vary of code
1269 size increase. */
1270 if (flags.min_after_base)
1272 /* If INSN is between REL_LAB and MIN_LAB, the size
1273 changes we are about to make can change the alignment
1274 within the observed offset, therefore we have to break
1275 it up into two parts that are independent. */
1276 if (! flags.base_after_vec && flags.min_after_vec)
1278 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1279 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1281 else
1282 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1284 else
1286 if (flags.base_after_vec && ! flags.min_after_vec)
1288 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1289 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1291 else
1292 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1294 /* Likewise, determine the highest lowest possible value
1295 for the offset of MAX_LAB. */
1296 if (flags.max_after_base)
1298 if (! flags.base_after_vec && flags.max_after_vec)
1300 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1301 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1303 else
1304 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1306 else
1308 if (flags.base_after_vec && ! flags.max_after_vec)
1310 max_addr += align_fuzz (max_lab, insn, 0, 0);
1311 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1313 else
1314 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1316 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1317 max_addr - rel_addr, body);
1318 if (!increasing
1319 || (GET_MODE_SIZE (vec_mode)
1320 >= GET_MODE_SIZE (GET_MODE (body))))
1321 PUT_MODE (body, vec_mode);
1322 if (JUMP_TABLES_IN_TEXT_SECTION
1323 || readonly_data_section == text_section)
1325 insn_lengths[uid]
1326 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1327 insn_current_address += insn_lengths[uid];
1328 if (insn_lengths[uid] != old_length)
1329 something_changed = 1;
1332 continue;
1334 #endif /* CASE_VECTOR_SHORTEN_MODE */
1336 if (! (varying_length[uid]))
1338 if (NONJUMP_INSN_P (insn)
1339 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1341 int i;
1343 body = PATTERN (insn);
1344 for (i = 0; i < XVECLEN (body, 0); i++)
1346 rtx inner_insn = XVECEXP (body, 0, i);
1347 int inner_uid = INSN_UID (inner_insn);
1349 INSN_ADDRESSES (inner_uid) = insn_current_address;
1351 insn_current_address += insn_lengths[inner_uid];
1354 else
1355 insn_current_address += insn_lengths[uid];
1357 continue;
1360 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1362 int i;
1364 body = PATTERN (insn);
1365 new_length = 0;
1366 for (i = 0; i < XVECLEN (body, 0); i++)
1368 rtx inner_insn = XVECEXP (body, 0, i);
1369 int inner_uid = INSN_UID (inner_insn);
1370 int inner_length;
1372 INSN_ADDRESSES (inner_uid) = insn_current_address;
1374 /* insn_current_length returns 0 for insns with a
1375 non-varying length. */
1376 if (! varying_length[inner_uid])
1377 inner_length = insn_lengths[inner_uid];
1378 else
1379 inner_length = insn_current_length (inner_insn);
1381 if (inner_length != insn_lengths[inner_uid])
1383 if (!increasing || inner_length > insn_lengths[inner_uid])
1385 insn_lengths[inner_uid] = inner_length;
1386 something_changed = 1;
1388 else
1389 inner_length = insn_lengths[inner_uid];
1391 insn_current_address += inner_length;
1392 new_length += inner_length;
1395 else
1397 new_length = insn_current_length (insn);
1398 insn_current_address += new_length;
1401 #ifdef ADJUST_INSN_LENGTH
1402 /* If needed, do any adjustment. */
1403 tmp_length = new_length;
1404 ADJUST_INSN_LENGTH (insn, new_length);
1405 insn_current_address += (new_length - tmp_length);
1406 #endif
1408 if (new_length != insn_lengths[uid]
1409 && (!increasing || new_length > insn_lengths[uid]))
1411 insn_lengths[uid] = new_length;
1412 something_changed = 1;
1414 else
1415 insn_current_address += insn_lengths[uid] - new_length;
1417 /* For a non-optimizing compile, do only a single pass. */
1418 if (!increasing)
1419 break;
1422 free (varying_length);
1424 #endif /* HAVE_ATTR_length */
1427 #ifdef HAVE_ATTR_length
1428 /* Given the body of an INSN known to be generated by an ASM statement, return
1429 the number of machine instructions likely to be generated for this insn.
1430 This is used to compute its length. */
1432 static int
1433 asm_insn_count (rtx body)
1435 const char *templ;
1437 if (GET_CODE (body) == ASM_INPUT)
1438 templ = XSTR (body, 0);
1439 else
1440 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1442 return asm_str_count (templ);
1444 #endif
1446 /* Return the number of machine instructions likely to be generated for the
1447 inline-asm template. */
1449 asm_str_count (const char *templ)
1451 int count = 1;
1453 if (!*templ)
1454 return 0;
1456 for (; *templ; templ++)
1457 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1458 || *templ == '\n')
1459 count++;
1461 return count;
1464 /* ??? This is probably the wrong place for these. */
1465 /* Structure recording the mapping from source file and directory
1466 names at compile time to those to be embedded in debug
1467 information. */
1468 typedef struct debug_prefix_map
1470 const char *old_prefix;
1471 const char *new_prefix;
1472 size_t old_len;
1473 size_t new_len;
1474 struct debug_prefix_map *next;
1475 } debug_prefix_map;
1477 /* Linked list of such structures. */
1478 debug_prefix_map *debug_prefix_maps;
1481 /* Record a debug file prefix mapping. ARG is the argument to
1482 -fdebug-prefix-map and must be of the form OLD=NEW. */
1484 void
1485 add_debug_prefix_map (const char *arg)
1487 debug_prefix_map *map;
1488 const char *p;
1490 p = strchr (arg, '=');
1491 if (!p)
1493 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1494 return;
1496 map = XNEW (debug_prefix_map);
1497 map->old_prefix = xstrndup (arg, p - arg);
1498 map->old_len = p - arg;
1499 p++;
1500 map->new_prefix = xstrdup (p);
1501 map->new_len = strlen (p);
1502 map->next = debug_prefix_maps;
1503 debug_prefix_maps = map;
1506 /* Perform user-specified mapping of debug filename prefixes. Return
1507 the new name corresponding to FILENAME. */
1509 const char *
1510 remap_debug_filename (const char *filename)
1512 debug_prefix_map *map;
1513 char *s;
1514 const char *name;
1515 size_t name_len;
1517 for (map = debug_prefix_maps; map; map = map->next)
1518 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1519 break;
1520 if (!map)
1521 return filename;
1522 name = filename + map->old_len;
1523 name_len = strlen (name) + 1;
1524 s = (char *) alloca (name_len + map->new_len);
1525 memcpy (s, map->new_prefix, map->new_len);
1526 memcpy (s + map->new_len, name, name_len);
1527 return ggc_strdup (s);
1530 /* Return true if DWARF2 debug info can be emitted for DECL. */
1532 static bool
1533 dwarf2_debug_info_emitted_p (tree decl)
1535 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1536 return false;
1538 if (DECL_IGNORED_P (decl))
1539 return false;
1541 return true;
1544 /* Return scope resulting from combination of S1 and S2. */
1545 static tree
1546 choose_inner_scope (tree s1, tree s2)
1548 if (!s1)
1549 return s2;
1550 if (!s2)
1551 return s1;
1552 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1553 return s1;
1554 return s2;
1557 /* Emit lexical block notes needed to change scope from S1 to S2. */
1559 static void
1560 change_scope (rtx orig_insn, tree s1, tree s2)
1562 rtx insn = orig_insn;
1563 tree com = NULL_TREE;
1564 tree ts1 = s1, ts2 = s2;
1565 tree s;
1567 while (ts1 != ts2)
1569 gcc_assert (ts1 && ts2);
1570 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1571 ts1 = BLOCK_SUPERCONTEXT (ts1);
1572 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1573 ts2 = BLOCK_SUPERCONTEXT (ts2);
1574 else
1576 ts1 = BLOCK_SUPERCONTEXT (ts1);
1577 ts2 = BLOCK_SUPERCONTEXT (ts2);
1580 com = ts1;
1582 /* Close scopes. */
1583 s = s1;
1584 while (s != com)
1586 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1587 NOTE_BLOCK (note) = s;
1588 s = BLOCK_SUPERCONTEXT (s);
1591 /* Open scopes. */
1592 s = s2;
1593 while (s != com)
1595 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1596 NOTE_BLOCK (insn) = s;
1597 s = BLOCK_SUPERCONTEXT (s);
1601 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1602 on the scope tree and the newly reordered instructions. */
1604 static void
1605 reemit_insn_block_notes (void)
1607 tree cur_block = DECL_INITIAL (cfun->decl);
1608 rtx insn, note;
1610 insn = get_insns ();
1611 if (!active_insn_p (insn))
1612 insn = next_active_insn (insn);
1613 for (; insn; insn = next_active_insn (insn))
1615 tree this_block;
1617 /* Avoid putting scope notes between jump table and its label. */
1618 if (JUMP_TABLE_DATA_P (insn))
1619 continue;
1621 this_block = insn_scope (insn);
1622 /* For sequences compute scope resulting from merging all scopes
1623 of instructions nested inside. */
1624 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1626 int i;
1627 rtx body = PATTERN (insn);
1629 this_block = NULL;
1630 for (i = 0; i < XVECLEN (body, 0); i++)
1631 this_block = choose_inner_scope (this_block,
1632 insn_scope (XVECEXP (body, 0, i)));
1634 if (! this_block)
1635 this_block = DECL_INITIAL (cfun->decl);
1637 if (this_block != cur_block)
1639 change_scope (insn, cur_block, this_block);
1640 cur_block = this_block;
1644 /* change_scope emits before the insn, not after. */
1645 note = emit_note (NOTE_INSN_DELETED);
1646 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1647 delete_insn (note);
1649 reorder_blocks ();
1652 /* Output assembler code for the start of a function,
1653 and initialize some of the variables in this file
1654 for the new function. The label for the function and associated
1655 assembler pseudo-ops have already been output in `assemble_start_function'.
1657 FIRST is the first insn of the rtl for the function being compiled.
1658 FILE is the file to write assembler code to.
1659 OPTIMIZE_P is nonzero if we should eliminate redundant
1660 test and compare insns. */
1662 void
1663 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1664 int optimize_p ATTRIBUTE_UNUSED)
1666 block_depth = 0;
1668 this_is_asm_operands = 0;
1670 last_filename = LOCATION_FILE (prologue_location);
1671 last_linenum = LOCATION_LINE (prologue_location);
1672 last_discriminator = discriminator = 0;
1674 high_block_linenum = high_function_linenum = last_linenum;
1676 if (!DECL_IGNORED_P (current_function_decl))
1677 debug_hooks->begin_prologue (last_linenum, last_filename);
1679 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1680 dwarf2out_begin_prologue (0, NULL);
1682 #ifdef LEAF_REG_REMAP
1683 if (crtl->uses_only_leaf_regs)
1684 leaf_renumber_regs (first);
1685 #endif
1687 /* The Sun386i and perhaps other machines don't work right
1688 if the profiling code comes after the prologue. */
1689 if (targetm.profile_before_prologue () && crtl->profile)
1690 profile_function (file);
1692 /* If debugging, assign block numbers to all of the blocks in this
1693 function. */
1694 if (write_symbols)
1696 reemit_insn_block_notes ();
1697 number_blocks (current_function_decl);
1698 /* We never actually put out begin/end notes for the top-level
1699 block in the function. But, conceptually, that block is
1700 always needed. */
1701 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1704 if (warn_frame_larger_than
1705 && get_frame_size () > frame_larger_than_size)
1707 /* Issue a warning */
1708 warning (OPT_Wframe_larger_than_,
1709 "the frame size of %wd bytes is larger than %wd bytes",
1710 get_frame_size (), frame_larger_than_size);
1713 /* First output the function prologue: code to set up the stack frame. */
1714 targetm.asm_out.function_prologue (file, get_frame_size ());
1716 /* If the machine represents the prologue as RTL, the profiling code must
1717 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1718 #ifdef HAVE_prologue
1719 if (! HAVE_prologue)
1720 #endif
1721 profile_after_prologue (file);
1724 static void
1725 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1727 if (!targetm.profile_before_prologue () && crtl->profile)
1728 profile_function (file);
1731 static void
1732 profile_function (FILE *file ATTRIBUTE_UNUSED)
1734 #ifndef NO_PROFILE_COUNTERS
1735 # define NO_PROFILE_COUNTERS 0
1736 #endif
1737 #ifdef ASM_OUTPUT_REG_PUSH
1738 rtx sval = NULL, chain = NULL;
1740 if (cfun->returns_struct)
1741 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1742 true);
1743 if (cfun->static_chain_decl)
1744 chain = targetm.calls.static_chain (current_function_decl, true);
1745 #endif /* ASM_OUTPUT_REG_PUSH */
1747 if (! NO_PROFILE_COUNTERS)
1749 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1750 switch_to_section (data_section);
1751 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1752 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1753 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1756 switch_to_section (current_function_section ());
1758 #ifdef ASM_OUTPUT_REG_PUSH
1759 if (sval && REG_P (sval))
1760 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1761 if (chain && REG_P (chain))
1762 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1763 #endif
1765 FUNCTION_PROFILER (file, current_function_funcdef_no);
1767 #ifdef ASM_OUTPUT_REG_PUSH
1768 if (chain && REG_P (chain))
1769 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1770 if (sval && REG_P (sval))
1771 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1772 #endif
1775 /* Output assembler code for the end of a function.
1776 For clarity, args are same as those of `final_start_function'
1777 even though not all of them are needed. */
1779 void
1780 final_end_function (void)
1782 app_disable ();
1784 if (!DECL_IGNORED_P (current_function_decl))
1785 debug_hooks->end_function (high_function_linenum);
1787 /* Finally, output the function epilogue:
1788 code to restore the stack frame and return to the caller. */
1789 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1791 /* And debug output. */
1792 if (!DECL_IGNORED_P (current_function_decl))
1793 debug_hooks->end_epilogue (last_linenum, last_filename);
1795 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1796 && dwarf2out_do_frame ())
1797 dwarf2out_end_epilogue (last_linenum, last_filename);
1801 /* Dumper helper for basic block information. FILE is the assembly
1802 output file, and INSN is the instruction being emitted. */
1804 static void
1805 dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
1806 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1808 basic_block bb;
1810 if (!flag_debug_asm)
1811 return;
1813 if (INSN_UID (insn) < bb_map_size
1814 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1816 edge e;
1817 edge_iterator ei;
1819 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1820 if (bb->frequency)
1821 fprintf (file, " freq:%d", bb->frequency);
1822 if (bb->count)
1823 fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
1824 bb->count);
1825 fprintf (file, " seq:%d", (*bb_seqn)++);
1826 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1827 FOR_EACH_EDGE (e, ei, bb->preds)
1829 dump_edge_info (file, e, TDF_DETAILS, 0);
1831 fprintf (file, "\n");
1833 if (INSN_UID (insn) < bb_map_size
1834 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1836 edge e;
1837 edge_iterator ei;
1839 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1840 FOR_EACH_EDGE (e, ei, bb->succs)
1842 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1844 fprintf (file, "\n");
1848 /* Output assembler code for some insns: all or part of a function.
1849 For description of args, see `final_start_function', above. */
1851 void
1852 final (rtx first, FILE *file, int optimize_p)
1854 rtx insn, next;
1855 int seen = 0;
1857 /* Used for -dA dump. */
1858 basic_block *start_to_bb = NULL;
1859 basic_block *end_to_bb = NULL;
1860 int bb_map_size = 0;
1861 int bb_seqn = 0;
1863 last_ignored_compare = 0;
1865 #ifdef HAVE_cc0
1866 for (insn = first; insn; insn = NEXT_INSN (insn))
1868 /* If CC tracking across branches is enabled, record the insn which
1869 jumps to each branch only reached from one place. */
1870 if (optimize_p && JUMP_P (insn))
1872 rtx lab = JUMP_LABEL (insn);
1873 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
1875 LABEL_REFS (lab) = insn;
1879 #endif
1881 init_recog ();
1883 CC_STATUS_INIT;
1885 if (flag_debug_asm)
1887 basic_block bb;
1889 bb_map_size = get_max_uid () + 1;
1890 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1891 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1893 /* There is no cfg for a thunk. */
1894 if (!cfun->is_thunk)
1895 FOR_EACH_BB_REVERSE (bb)
1897 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1898 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1902 /* Output the insns. */
1903 for (insn = first; insn;)
1905 #ifdef HAVE_ATTR_length
1906 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1908 /* This can be triggered by bugs elsewhere in the compiler if
1909 new insns are created after init_insn_lengths is called. */
1910 gcc_assert (NOTE_P (insn));
1911 insn_current_address = -1;
1913 else
1914 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1915 #endif /* HAVE_ATTR_length */
1917 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1918 bb_map_size, &bb_seqn);
1919 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1922 if (flag_debug_asm)
1924 free (start_to_bb);
1925 free (end_to_bb);
1928 /* Remove CFI notes, to avoid compare-debug failures. */
1929 for (insn = first; insn; insn = next)
1931 next = NEXT_INSN (insn);
1932 if (NOTE_P (insn)
1933 && (NOTE_KIND (insn) == NOTE_INSN_CFI
1934 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
1935 delete_insn (insn);
1939 const char *
1940 get_insn_template (int code, rtx insn)
1942 switch (insn_data[code].output_format)
1944 case INSN_OUTPUT_FORMAT_SINGLE:
1945 return insn_data[code].output.single;
1946 case INSN_OUTPUT_FORMAT_MULTI:
1947 return insn_data[code].output.multi[which_alternative];
1948 case INSN_OUTPUT_FORMAT_FUNCTION:
1949 gcc_assert (insn);
1950 return (*insn_data[code].output.function) (recog_data.operand, insn);
1952 default:
1953 gcc_unreachable ();
1957 /* Emit the appropriate declaration for an alternate-entry-point
1958 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1959 LABEL_KIND != LABEL_NORMAL.
1961 The case fall-through in this function is intentional. */
1962 static void
1963 output_alternate_entry_point (FILE *file, rtx insn)
1965 const char *name = LABEL_NAME (insn);
1967 switch (LABEL_KIND (insn))
1969 case LABEL_WEAK_ENTRY:
1970 #ifdef ASM_WEAKEN_LABEL
1971 ASM_WEAKEN_LABEL (file, name);
1972 #endif
1973 case LABEL_GLOBAL_ENTRY:
1974 targetm.asm_out.globalize_label (file, name);
1975 case LABEL_STATIC_ENTRY:
1976 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1977 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1978 #endif
1979 ASM_OUTPUT_LABEL (file, name);
1980 break;
1982 case LABEL_NORMAL:
1983 default:
1984 gcc_unreachable ();
1988 /* Given a CALL_INSN, find and return the nested CALL. */
1989 static rtx
1990 call_from_call_insn (rtx insn)
1992 rtx x;
1993 gcc_assert (CALL_P (insn));
1994 x = PATTERN (insn);
1996 while (GET_CODE (x) != CALL)
1998 switch (GET_CODE (x))
2000 default:
2001 gcc_unreachable ();
2002 case COND_EXEC:
2003 x = COND_EXEC_CODE (x);
2004 break;
2005 case PARALLEL:
2006 x = XVECEXP (x, 0, 0);
2007 break;
2008 case SET:
2009 x = XEXP (x, 1);
2010 break;
2013 return x;
2016 /* The final scan for one insn, INSN.
2017 Args are same as in `final', except that INSN
2018 is the insn being scanned.
2019 Value returned is the next insn to be scanned.
2021 NOPEEPHOLES is the flag to disallow peephole processing (currently
2022 used for within delayed branch sequence output).
2024 SEEN is used to track the end of the prologue, for emitting
2025 debug information. We force the emission of a line note after
2026 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
2027 at the beginning of the second basic block, whichever comes
2028 first. */
2031 final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2032 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2034 #ifdef HAVE_cc0
2035 rtx set;
2036 #endif
2037 rtx next;
2039 insn_counter++;
2041 /* Ignore deleted insns. These can occur when we split insns (due to a
2042 template of "#") while not optimizing. */
2043 if (INSN_DELETED_P (insn))
2044 return NEXT_INSN (insn);
2046 switch (GET_CODE (insn))
2048 case NOTE:
2049 switch (NOTE_KIND (insn))
2051 case NOTE_INSN_DELETED:
2052 break;
2054 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2055 in_cold_section_p = !in_cold_section_p;
2057 if (dwarf2out_do_frame ())
2058 dwarf2out_switch_text_section ();
2059 else if (!DECL_IGNORED_P (current_function_decl))
2060 debug_hooks->switch_text_section ();
2062 switch_to_section (current_function_section ());
2063 targetm.asm_out.function_switched_text_sections (asm_out_file,
2064 current_function_decl,
2065 in_cold_section_p);
2066 break;
2068 case NOTE_INSN_BASIC_BLOCK:
2069 if (targetm.asm_out.unwind_emit)
2070 targetm.asm_out.unwind_emit (asm_out_file, insn);
2072 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
2074 *seen |= SEEN_EMITTED;
2075 force_source_line = true;
2077 else
2078 *seen |= SEEN_BB;
2080 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2082 break;
2084 case NOTE_INSN_EH_REGION_BEG:
2085 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2086 NOTE_EH_HANDLER (insn));
2087 break;
2089 case NOTE_INSN_EH_REGION_END:
2090 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2091 NOTE_EH_HANDLER (insn));
2092 break;
2094 case NOTE_INSN_PROLOGUE_END:
2095 targetm.asm_out.function_end_prologue (file);
2096 profile_after_prologue (file);
2098 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2100 *seen |= SEEN_EMITTED;
2101 force_source_line = true;
2103 else
2104 *seen |= SEEN_NOTE;
2106 break;
2108 case NOTE_INSN_EPILOGUE_BEG:
2109 if (!DECL_IGNORED_P (current_function_decl))
2110 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2111 targetm.asm_out.function_begin_epilogue (file);
2112 break;
2114 case NOTE_INSN_CFI:
2115 dwarf2out_emit_cfi (NOTE_CFI (insn));
2116 break;
2118 case NOTE_INSN_CFI_LABEL:
2119 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2120 NOTE_LABEL_NUMBER (insn));
2121 break;
2123 case NOTE_INSN_FUNCTION_BEG:
2124 app_disable ();
2125 if (!DECL_IGNORED_P (current_function_decl))
2126 debug_hooks->end_prologue (last_linenum, last_filename);
2128 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2130 *seen |= SEEN_EMITTED;
2131 force_source_line = true;
2133 else
2134 *seen |= SEEN_NOTE;
2136 break;
2138 case NOTE_INSN_BLOCK_BEG:
2139 if (debug_info_level == DINFO_LEVEL_NORMAL
2140 || debug_info_level == DINFO_LEVEL_VERBOSE
2141 || write_symbols == DWARF2_DEBUG
2142 || write_symbols == VMS_AND_DWARF2_DEBUG
2143 || write_symbols == VMS_DEBUG)
2145 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2147 app_disable ();
2148 ++block_depth;
2149 high_block_linenum = last_linenum;
2151 /* Output debugging info about the symbol-block beginning. */
2152 if (!DECL_IGNORED_P (current_function_decl))
2153 debug_hooks->begin_block (last_linenum, n);
2155 /* Mark this block as output. */
2156 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2158 if (write_symbols == DBX_DEBUG
2159 || write_symbols == SDB_DEBUG)
2161 location_t *locus_ptr
2162 = block_nonartificial_location (NOTE_BLOCK (insn));
2164 if (locus_ptr != NULL)
2166 override_filename = LOCATION_FILE (*locus_ptr);
2167 override_linenum = LOCATION_LINE (*locus_ptr);
2170 break;
2172 case NOTE_INSN_BLOCK_END:
2173 if (debug_info_level == DINFO_LEVEL_NORMAL
2174 || debug_info_level == DINFO_LEVEL_VERBOSE
2175 || write_symbols == DWARF2_DEBUG
2176 || write_symbols == VMS_AND_DWARF2_DEBUG
2177 || write_symbols == VMS_DEBUG)
2179 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2181 app_disable ();
2183 /* End of a symbol-block. */
2184 --block_depth;
2185 gcc_assert (block_depth >= 0);
2187 if (!DECL_IGNORED_P (current_function_decl))
2188 debug_hooks->end_block (high_block_linenum, n);
2190 if (write_symbols == DBX_DEBUG
2191 || write_symbols == SDB_DEBUG)
2193 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2194 location_t *locus_ptr
2195 = block_nonartificial_location (outer_block);
2197 if (locus_ptr != NULL)
2199 override_filename = LOCATION_FILE (*locus_ptr);
2200 override_linenum = LOCATION_LINE (*locus_ptr);
2202 else
2204 override_filename = NULL;
2205 override_linenum = 0;
2208 break;
2210 case NOTE_INSN_DELETED_LABEL:
2211 /* Emit the label. We may have deleted the CODE_LABEL because
2212 the label could be proved to be unreachable, though still
2213 referenced (in the form of having its address taken. */
2214 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2215 break;
2217 case NOTE_INSN_DELETED_DEBUG_LABEL:
2218 /* Similarly, but need to use different namespace for it. */
2219 if (CODE_LABEL_NUMBER (insn) != -1)
2220 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2221 break;
2223 case NOTE_INSN_VAR_LOCATION:
2224 case NOTE_INSN_CALL_ARG_LOCATION:
2225 if (!DECL_IGNORED_P (current_function_decl))
2226 debug_hooks->var_location (insn);
2227 break;
2229 default:
2230 gcc_unreachable ();
2231 break;
2233 break;
2235 case BARRIER:
2236 break;
2238 case CODE_LABEL:
2239 /* The target port might emit labels in the output function for
2240 some insn, e.g. sh.c output_branchy_insn. */
2241 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2243 int align = LABEL_TO_ALIGNMENT (insn);
2244 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2245 int max_skip = LABEL_TO_MAX_SKIP (insn);
2246 #endif
2248 if (align && NEXT_INSN (insn))
2250 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2251 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2252 #else
2253 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2254 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2255 #else
2256 ASM_OUTPUT_ALIGN (file, align);
2257 #endif
2258 #endif
2261 CC_STATUS_INIT;
2263 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2264 debug_hooks->label (insn);
2266 app_disable ();
2268 next = next_nonnote_insn (insn);
2269 /* If this label is followed by a jump-table, make sure we put
2270 the label in the read-only section. Also possibly write the
2271 label and jump table together. */
2272 if (next != 0 && JUMP_TABLE_DATA_P (next))
2274 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2275 /* In this case, the case vector is being moved by the
2276 target, so don't output the label at all. Leave that
2277 to the back end macros. */
2278 #else
2279 if (! JUMP_TABLES_IN_TEXT_SECTION)
2281 int log_align;
2283 switch_to_section (targetm.asm_out.function_rodata_section
2284 (current_function_decl));
2286 #ifdef ADDR_VEC_ALIGN
2287 log_align = ADDR_VEC_ALIGN (next);
2288 #else
2289 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2290 #endif
2291 ASM_OUTPUT_ALIGN (file, log_align);
2293 else
2294 switch_to_section (current_function_section ());
2296 #ifdef ASM_OUTPUT_CASE_LABEL
2297 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2298 next);
2299 #else
2300 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2301 #endif
2302 #endif
2303 break;
2305 if (LABEL_ALT_ENTRY_P (insn))
2306 output_alternate_entry_point (file, insn);
2307 else
2308 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2309 break;
2311 default:
2313 rtx body = PATTERN (insn);
2314 int insn_code_number;
2315 const char *templ;
2316 bool is_stmt;
2318 /* Reset this early so it is correct for ASM statements. */
2319 current_insn_predicate = NULL_RTX;
2321 /* An INSN, JUMP_INSN or CALL_INSN.
2322 First check for special kinds that recog doesn't recognize. */
2324 if (GET_CODE (body) == USE /* These are just declarations. */
2325 || GET_CODE (body) == CLOBBER)
2326 break;
2328 #ifdef HAVE_cc0
2330 /* If there is a REG_CC_SETTER note on this insn, it means that
2331 the setting of the condition code was done in the delay slot
2332 of the insn that branched here. So recover the cc status
2333 from the insn that set it. */
2335 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2336 if (note)
2338 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2339 cc_prev_status = cc_status;
2342 #endif
2344 /* Detect insns that are really jump-tables
2345 and output them as such. */
2347 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2349 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2350 int vlen, idx;
2351 #endif
2353 if (! JUMP_TABLES_IN_TEXT_SECTION)
2354 switch_to_section (targetm.asm_out.function_rodata_section
2355 (current_function_decl));
2356 else
2357 switch_to_section (current_function_section ());
2359 app_disable ();
2361 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2362 if (GET_CODE (body) == ADDR_VEC)
2364 #ifdef ASM_OUTPUT_ADDR_VEC
2365 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2366 #else
2367 gcc_unreachable ();
2368 #endif
2370 else
2372 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2373 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2374 #else
2375 gcc_unreachable ();
2376 #endif
2378 #else
2379 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2380 for (idx = 0; idx < vlen; idx++)
2382 if (GET_CODE (body) == ADDR_VEC)
2384 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2385 ASM_OUTPUT_ADDR_VEC_ELT
2386 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2387 #else
2388 gcc_unreachable ();
2389 #endif
2391 else
2393 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2394 ASM_OUTPUT_ADDR_DIFF_ELT
2395 (file,
2396 body,
2397 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2398 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2399 #else
2400 gcc_unreachable ();
2401 #endif
2404 #ifdef ASM_OUTPUT_CASE_END
2405 ASM_OUTPUT_CASE_END (file,
2406 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2407 insn);
2408 #endif
2409 #endif
2411 switch_to_section (current_function_section ());
2413 break;
2415 /* Output this line note if it is the first or the last line
2416 note in a row. */
2417 if (!DECL_IGNORED_P (current_function_decl)
2418 && notice_source_line (insn, &is_stmt))
2419 (*debug_hooks->source_line) (last_linenum, last_filename,
2420 last_discriminator, is_stmt);
2422 if (GET_CODE (body) == ASM_INPUT)
2424 const char *string = XSTR (body, 0);
2426 /* There's no telling what that did to the condition codes. */
2427 CC_STATUS_INIT;
2429 if (string[0])
2431 expanded_location loc;
2433 app_enable ();
2434 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2435 if (*loc.file && loc.line)
2436 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2437 ASM_COMMENT_START, loc.line, loc.file);
2438 fprintf (asm_out_file, "\t%s\n", string);
2439 #if HAVE_AS_LINE_ZERO
2440 if (*loc.file && loc.line)
2441 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2442 #endif
2444 break;
2447 /* Detect `asm' construct with operands. */
2448 if (asm_noperands (body) >= 0)
2450 unsigned int noperands = asm_noperands (body);
2451 rtx *ops = XALLOCAVEC (rtx, noperands);
2452 const char *string;
2453 location_t loc;
2454 expanded_location expanded;
2456 /* There's no telling what that did to the condition codes. */
2457 CC_STATUS_INIT;
2459 /* Get out the operand values. */
2460 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2461 /* Inhibit dying on what would otherwise be compiler bugs. */
2462 insn_noperands = noperands;
2463 this_is_asm_operands = insn;
2464 expanded = expand_location (loc);
2466 #ifdef FINAL_PRESCAN_INSN
2467 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2468 #endif
2470 /* Output the insn using them. */
2471 if (string[0])
2473 app_enable ();
2474 if (expanded.file && expanded.line)
2475 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2476 ASM_COMMENT_START, expanded.line, expanded.file);
2477 output_asm_insn (string, ops);
2478 #if HAVE_AS_LINE_ZERO
2479 if (expanded.file && expanded.line)
2480 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2481 #endif
2484 if (targetm.asm_out.final_postscan_insn)
2485 targetm.asm_out.final_postscan_insn (file, insn, ops,
2486 insn_noperands);
2488 this_is_asm_operands = 0;
2489 break;
2492 app_disable ();
2494 if (GET_CODE (body) == SEQUENCE)
2496 /* A delayed-branch sequence */
2497 int i;
2499 final_sequence = body;
2501 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2502 force the restoration of a comparison that was previously
2503 thought unnecessary. If that happens, cancel this sequence
2504 and cause that insn to be restored. */
2506 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2507 if (next != XVECEXP (body, 0, 1))
2509 final_sequence = 0;
2510 return next;
2513 for (i = 1; i < XVECLEN (body, 0); i++)
2515 rtx insn = XVECEXP (body, 0, i);
2516 rtx next = NEXT_INSN (insn);
2517 /* We loop in case any instruction in a delay slot gets
2518 split. */
2520 insn = final_scan_insn (insn, file, 0, 1, seen);
2521 while (insn != next);
2523 #ifdef DBR_OUTPUT_SEQEND
2524 DBR_OUTPUT_SEQEND (file);
2525 #endif
2526 final_sequence = 0;
2528 /* If the insn requiring the delay slot was a CALL_INSN, the
2529 insns in the delay slot are actually executed before the
2530 called function. Hence we don't preserve any CC-setting
2531 actions in these insns and the CC must be marked as being
2532 clobbered by the function. */
2533 if (CALL_P (XVECEXP (body, 0, 0)))
2535 CC_STATUS_INIT;
2537 break;
2540 /* We have a real machine instruction as rtl. */
2542 body = PATTERN (insn);
2544 #ifdef HAVE_cc0
2545 set = single_set (insn);
2547 /* Check for redundant test and compare instructions
2548 (when the condition codes are already set up as desired).
2549 This is done only when optimizing; if not optimizing,
2550 it should be possible for the user to alter a variable
2551 with the debugger in between statements
2552 and the next statement should reexamine the variable
2553 to compute the condition codes. */
2555 if (optimize_p)
2557 if (set
2558 && GET_CODE (SET_DEST (set)) == CC0
2559 && insn != last_ignored_compare)
2561 rtx src1, src2;
2562 if (GET_CODE (SET_SRC (set)) == SUBREG)
2563 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2565 src1 = SET_SRC (set);
2566 src2 = NULL_RTX;
2567 if (GET_CODE (SET_SRC (set)) == COMPARE)
2569 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2570 XEXP (SET_SRC (set), 0)
2571 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2572 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2573 XEXP (SET_SRC (set), 1)
2574 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2575 if (XEXP (SET_SRC (set), 1)
2576 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2577 src2 = XEXP (SET_SRC (set), 0);
2579 if ((cc_status.value1 != 0
2580 && rtx_equal_p (src1, cc_status.value1))
2581 || (cc_status.value2 != 0
2582 && rtx_equal_p (src1, cc_status.value2))
2583 || (src2 != 0 && cc_status.value1 != 0
2584 && rtx_equal_p (src2, cc_status.value1))
2585 || (src2 != 0 && cc_status.value2 != 0
2586 && rtx_equal_p (src2, cc_status.value2)))
2588 /* Don't delete insn if it has an addressing side-effect. */
2589 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2590 /* or if anything in it is volatile. */
2591 && ! volatile_refs_p (PATTERN (insn)))
2593 /* We don't really delete the insn; just ignore it. */
2594 last_ignored_compare = insn;
2595 break;
2601 /* If this is a conditional branch, maybe modify it
2602 if the cc's are in a nonstandard state
2603 so that it accomplishes the same thing that it would
2604 do straightforwardly if the cc's were set up normally. */
2606 if (cc_status.flags != 0
2607 && JUMP_P (insn)
2608 && GET_CODE (body) == SET
2609 && SET_DEST (body) == pc_rtx
2610 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2611 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2612 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2614 /* This function may alter the contents of its argument
2615 and clear some of the cc_status.flags bits.
2616 It may also return 1 meaning condition now always true
2617 or -1 meaning condition now always false
2618 or 2 meaning condition nontrivial but altered. */
2619 int result = alter_cond (XEXP (SET_SRC (body), 0));
2620 /* If condition now has fixed value, replace the IF_THEN_ELSE
2621 with its then-operand or its else-operand. */
2622 if (result == 1)
2623 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2624 if (result == -1)
2625 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2627 /* The jump is now either unconditional or a no-op.
2628 If it has become a no-op, don't try to output it.
2629 (It would not be recognized.) */
2630 if (SET_SRC (body) == pc_rtx)
2632 delete_insn (insn);
2633 break;
2635 else if (ANY_RETURN_P (SET_SRC (body)))
2636 /* Replace (set (pc) (return)) with (return). */
2637 PATTERN (insn) = body = SET_SRC (body);
2639 /* Rerecognize the instruction if it has changed. */
2640 if (result != 0)
2641 INSN_CODE (insn) = -1;
2644 /* If this is a conditional trap, maybe modify it if the cc's
2645 are in a nonstandard state so that it accomplishes the same
2646 thing that it would do straightforwardly if the cc's were
2647 set up normally. */
2648 if (cc_status.flags != 0
2649 && NONJUMP_INSN_P (insn)
2650 && GET_CODE (body) == TRAP_IF
2651 && COMPARISON_P (TRAP_CONDITION (body))
2652 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2654 /* This function may alter the contents of its argument
2655 and clear some of the cc_status.flags bits.
2656 It may also return 1 meaning condition now always true
2657 or -1 meaning condition now always false
2658 or 2 meaning condition nontrivial but altered. */
2659 int result = alter_cond (TRAP_CONDITION (body));
2661 /* If TRAP_CONDITION has become always false, delete the
2662 instruction. */
2663 if (result == -1)
2665 delete_insn (insn);
2666 break;
2669 /* If TRAP_CONDITION has become always true, replace
2670 TRAP_CONDITION with const_true_rtx. */
2671 if (result == 1)
2672 TRAP_CONDITION (body) = const_true_rtx;
2674 /* Rerecognize the instruction if it has changed. */
2675 if (result != 0)
2676 INSN_CODE (insn) = -1;
2679 /* Make same adjustments to instructions that examine the
2680 condition codes without jumping and instructions that
2681 handle conditional moves (if this machine has either one). */
2683 if (cc_status.flags != 0
2684 && set != 0)
2686 rtx cond_rtx, then_rtx, else_rtx;
2688 if (!JUMP_P (insn)
2689 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2691 cond_rtx = XEXP (SET_SRC (set), 0);
2692 then_rtx = XEXP (SET_SRC (set), 1);
2693 else_rtx = XEXP (SET_SRC (set), 2);
2695 else
2697 cond_rtx = SET_SRC (set);
2698 then_rtx = const_true_rtx;
2699 else_rtx = const0_rtx;
2702 switch (GET_CODE (cond_rtx))
2704 case GTU:
2705 case GT:
2706 case LTU:
2707 case LT:
2708 case GEU:
2709 case GE:
2710 case LEU:
2711 case LE:
2712 case EQ:
2713 case NE:
2715 int result;
2716 if (XEXP (cond_rtx, 0) != cc0_rtx)
2717 break;
2718 result = alter_cond (cond_rtx);
2719 if (result == 1)
2720 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2721 else if (result == -1)
2722 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2723 else if (result == 2)
2724 INSN_CODE (insn) = -1;
2725 if (SET_DEST (set) == SET_SRC (set))
2726 delete_insn (insn);
2728 break;
2730 default:
2731 break;
2735 #endif
2737 #ifdef HAVE_peephole
2738 /* Do machine-specific peephole optimizations if desired. */
2740 if (optimize_p && !flag_no_peephole && !nopeepholes)
2742 rtx next = peephole (insn);
2743 /* When peepholing, if there were notes within the peephole,
2744 emit them before the peephole. */
2745 if (next != 0 && next != NEXT_INSN (insn))
2747 rtx note, prev = PREV_INSN (insn);
2749 for (note = NEXT_INSN (insn); note != next;
2750 note = NEXT_INSN (note))
2751 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2753 /* Put the notes in the proper position for a later
2754 rescan. For example, the SH target can do this
2755 when generating a far jump in a delayed branch
2756 sequence. */
2757 note = NEXT_INSN (insn);
2758 PREV_INSN (note) = prev;
2759 NEXT_INSN (prev) = note;
2760 NEXT_INSN (PREV_INSN (next)) = insn;
2761 PREV_INSN (insn) = PREV_INSN (next);
2762 NEXT_INSN (insn) = next;
2763 PREV_INSN (next) = insn;
2766 /* PEEPHOLE might have changed this. */
2767 body = PATTERN (insn);
2769 #endif
2771 /* Try to recognize the instruction.
2772 If successful, verify that the operands satisfy the
2773 constraints for the instruction. Crash if they don't,
2774 since `reload' should have changed them so that they do. */
2776 insn_code_number = recog_memoized (insn);
2777 cleanup_subreg_operands (insn);
2779 /* Dump the insn in the assembly for debugging (-dAP).
2780 If the final dump is requested as slim RTL, dump slim
2781 RTL to the assembly file also. */
2782 if (flag_dump_rtl_in_asm)
2784 print_rtx_head = ASM_COMMENT_START;
2785 if (! (dump_flags & TDF_SLIM))
2786 print_rtl_single (asm_out_file, insn);
2787 else
2788 dump_insn_slim (asm_out_file, insn);
2789 print_rtx_head = "";
2792 if (! constrain_operands_cached (1))
2793 fatal_insn_not_found (insn);
2795 /* Some target machines need to prescan each insn before
2796 it is output. */
2798 #ifdef FINAL_PRESCAN_INSN
2799 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2800 #endif
2802 if (targetm.have_conditional_execution ()
2803 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2804 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2806 #ifdef HAVE_cc0
2807 cc_prev_status = cc_status;
2809 /* Update `cc_status' for this instruction.
2810 The instruction's output routine may change it further.
2811 If the output routine for a jump insn needs to depend
2812 on the cc status, it should look at cc_prev_status. */
2814 NOTICE_UPDATE_CC (body, insn);
2815 #endif
2817 current_output_insn = debug_insn = insn;
2819 /* Find the proper template for this insn. */
2820 templ = get_insn_template (insn_code_number, insn);
2822 /* If the C code returns 0, it means that it is a jump insn
2823 which follows a deleted test insn, and that test insn
2824 needs to be reinserted. */
2825 if (templ == 0)
2827 rtx prev;
2829 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2831 /* We have already processed the notes between the setter and
2832 the user. Make sure we don't process them again, this is
2833 particularly important if one of the notes is a block
2834 scope note or an EH note. */
2835 for (prev = insn;
2836 prev != last_ignored_compare;
2837 prev = PREV_INSN (prev))
2839 if (NOTE_P (prev))
2840 delete_insn (prev); /* Use delete_note. */
2843 return prev;
2846 /* If the template is the string "#", it means that this insn must
2847 be split. */
2848 if (templ[0] == '#' && templ[1] == '\0')
2850 rtx new_rtx = try_split (body, insn, 0);
2852 /* If we didn't split the insn, go away. */
2853 if (new_rtx == insn && PATTERN (new_rtx) == body)
2854 fatal_insn ("could not split insn", insn);
2856 #ifdef HAVE_ATTR_length
2857 /* This instruction should have been split in shorten_branches,
2858 to ensure that we would have valid length info for the
2859 splitees. */
2860 gcc_unreachable ();
2861 #endif
2863 return new_rtx;
2866 /* ??? This will put the directives in the wrong place if
2867 get_insn_template outputs assembly directly. However calling it
2868 before get_insn_template breaks if the insns is split. */
2869 if (targetm.asm_out.unwind_emit_before_insn
2870 && targetm.asm_out.unwind_emit)
2871 targetm.asm_out.unwind_emit (asm_out_file, insn);
2873 if (CALL_P (insn))
2875 rtx x = call_from_call_insn (insn);
2876 x = XEXP (x, 0);
2877 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2879 tree t;
2880 x = XEXP (x, 0);
2881 t = SYMBOL_REF_DECL (x);
2882 if (t)
2883 assemble_external (t);
2885 if (!DECL_IGNORED_P (current_function_decl))
2886 debug_hooks->var_location (insn);
2889 /* Output assembler code from the template. */
2890 output_asm_insn (templ, recog_data.operand);
2892 /* Some target machines need to postscan each insn after
2893 it is output. */
2894 if (targetm.asm_out.final_postscan_insn)
2895 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2896 recog_data.n_operands);
2898 if (!targetm.asm_out.unwind_emit_before_insn
2899 && targetm.asm_out.unwind_emit)
2900 targetm.asm_out.unwind_emit (asm_out_file, insn);
2902 current_output_insn = debug_insn = 0;
2905 return NEXT_INSN (insn);
2908 /* Return whether a source line note needs to be emitted before INSN.
2909 Sets IS_STMT to TRUE if the line should be marked as a possible
2910 breakpoint location. */
2912 static bool
2913 notice_source_line (rtx insn, bool *is_stmt)
2915 const char *filename;
2916 int linenum;
2918 if (override_filename)
2920 filename = override_filename;
2921 linenum = override_linenum;
2923 else
2925 filename = insn_file (insn);
2926 linenum = insn_line (insn);
2929 if (filename == NULL)
2930 return false;
2932 if (force_source_line
2933 || filename != last_filename
2934 || last_linenum != linenum)
2936 force_source_line = false;
2937 last_filename = filename;
2938 last_linenum = linenum;
2939 last_discriminator = discriminator;
2940 *is_stmt = true;
2941 high_block_linenum = MAX (last_linenum, high_block_linenum);
2942 high_function_linenum = MAX (last_linenum, high_function_linenum);
2943 return true;
2946 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
2948 /* If the discriminator changed, but the line number did not,
2949 output the line table entry with is_stmt false so the
2950 debugger does not treat this as a breakpoint location. */
2951 last_discriminator = discriminator;
2952 *is_stmt = false;
2953 return true;
2956 return false;
2959 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2960 directly to the desired hard register. */
2962 void
2963 cleanup_subreg_operands (rtx insn)
2965 int i;
2966 bool changed = false;
2967 extract_insn_cached (insn);
2968 for (i = 0; i < recog_data.n_operands; i++)
2970 /* The following test cannot use recog_data.operand when testing
2971 for a SUBREG: the underlying object might have been changed
2972 already if we are inside a match_operator expression that
2973 matches the else clause. Instead we test the underlying
2974 expression directly. */
2975 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2977 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
2978 changed = true;
2980 else if (GET_CODE (recog_data.operand[i]) == PLUS
2981 || GET_CODE (recog_data.operand[i]) == MULT
2982 || MEM_P (recog_data.operand[i]))
2983 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2986 for (i = 0; i < recog_data.n_dups; i++)
2988 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2990 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
2991 changed = true;
2993 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2994 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2995 || MEM_P (*recog_data.dup_loc[i]))
2996 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2998 if (changed)
2999 df_insn_rescan (insn);
3002 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3003 the thing it is a subreg of. Do it anyway if FINAL_P. */
3006 alter_subreg (rtx *xp, bool final_p)
3008 rtx x = *xp;
3009 rtx y = SUBREG_REG (x);
3011 /* simplify_subreg does not remove subreg from volatile references.
3012 We are required to. */
3013 if (MEM_P (y))
3015 int offset = SUBREG_BYTE (x);
3017 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3018 contains 0 instead of the proper offset. See simplify_subreg. */
3019 if (offset == 0
3020 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3022 int difference = GET_MODE_SIZE (GET_MODE (y))
3023 - GET_MODE_SIZE (GET_MODE (x));
3024 if (WORDS_BIG_ENDIAN)
3025 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3026 if (BYTES_BIG_ENDIAN)
3027 offset += difference % UNITS_PER_WORD;
3030 if (final_p)
3031 *xp = adjust_address (y, GET_MODE (x), offset);
3032 else
3033 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3035 else
3037 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3038 SUBREG_BYTE (x));
3040 if (new_rtx != 0)
3041 *xp = new_rtx;
3042 else if (final_p && REG_P (y))
3044 /* Simplify_subreg can't handle some REG cases, but we have to. */
3045 unsigned int regno;
3046 HOST_WIDE_INT offset;
3048 regno = subreg_regno (x);
3049 if (subreg_lowpart_p (x))
3050 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3051 else
3052 offset = SUBREG_BYTE (x);
3053 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3057 return *xp;
3060 /* Do alter_subreg on all the SUBREGs contained in X. */
3062 static rtx
3063 walk_alter_subreg (rtx *xp, bool *changed)
3065 rtx x = *xp;
3066 switch (GET_CODE (x))
3068 case PLUS:
3069 case MULT:
3070 case AND:
3071 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3072 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3073 break;
3075 case MEM:
3076 case ZERO_EXTEND:
3077 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3078 break;
3080 case SUBREG:
3081 *changed = true;
3082 return alter_subreg (xp, true);
3084 default:
3085 break;
3088 return *xp;
3091 #ifdef HAVE_cc0
3093 /* Given BODY, the body of a jump instruction, alter the jump condition
3094 as required by the bits that are set in cc_status.flags.
3095 Not all of the bits there can be handled at this level in all cases.
3097 The value is normally 0.
3098 1 means that the condition has become always true.
3099 -1 means that the condition has become always false.
3100 2 means that COND has been altered. */
3102 static int
3103 alter_cond (rtx cond)
3105 int value = 0;
3107 if (cc_status.flags & CC_REVERSED)
3109 value = 2;
3110 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3113 if (cc_status.flags & CC_INVERTED)
3115 value = 2;
3116 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3119 if (cc_status.flags & CC_NOT_POSITIVE)
3120 switch (GET_CODE (cond))
3122 case LE:
3123 case LEU:
3124 case GEU:
3125 /* Jump becomes unconditional. */
3126 return 1;
3128 case GT:
3129 case GTU:
3130 case LTU:
3131 /* Jump becomes no-op. */
3132 return -1;
3134 case GE:
3135 PUT_CODE (cond, EQ);
3136 value = 2;
3137 break;
3139 case LT:
3140 PUT_CODE (cond, NE);
3141 value = 2;
3142 break;
3144 default:
3145 break;
3148 if (cc_status.flags & CC_NOT_NEGATIVE)
3149 switch (GET_CODE (cond))
3151 case GE:
3152 case GEU:
3153 /* Jump becomes unconditional. */
3154 return 1;
3156 case LT:
3157 case LTU:
3158 /* Jump becomes no-op. */
3159 return -1;
3161 case LE:
3162 case LEU:
3163 PUT_CODE (cond, EQ);
3164 value = 2;
3165 break;
3167 case GT:
3168 case GTU:
3169 PUT_CODE (cond, NE);
3170 value = 2;
3171 break;
3173 default:
3174 break;
3177 if (cc_status.flags & CC_NO_OVERFLOW)
3178 switch (GET_CODE (cond))
3180 case GEU:
3181 /* Jump becomes unconditional. */
3182 return 1;
3184 case LEU:
3185 PUT_CODE (cond, EQ);
3186 value = 2;
3187 break;
3189 case GTU:
3190 PUT_CODE (cond, NE);
3191 value = 2;
3192 break;
3194 case LTU:
3195 /* Jump becomes no-op. */
3196 return -1;
3198 default:
3199 break;
3202 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3203 switch (GET_CODE (cond))
3205 default:
3206 gcc_unreachable ();
3208 case NE:
3209 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3210 value = 2;
3211 break;
3213 case EQ:
3214 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3215 value = 2;
3216 break;
3219 if (cc_status.flags & CC_NOT_SIGNED)
3220 /* The flags are valid if signed condition operators are converted
3221 to unsigned. */
3222 switch (GET_CODE (cond))
3224 case LE:
3225 PUT_CODE (cond, LEU);
3226 value = 2;
3227 break;
3229 case LT:
3230 PUT_CODE (cond, LTU);
3231 value = 2;
3232 break;
3234 case GT:
3235 PUT_CODE (cond, GTU);
3236 value = 2;
3237 break;
3239 case GE:
3240 PUT_CODE (cond, GEU);
3241 value = 2;
3242 break;
3244 default:
3245 break;
3248 return value;
3250 #endif
3252 /* Report inconsistency between the assembler template and the operands.
3253 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3255 void
3256 output_operand_lossage (const char *cmsgid, ...)
3258 char *fmt_string;
3259 char *new_message;
3260 const char *pfx_str;
3261 va_list ap;
3263 va_start (ap, cmsgid);
3265 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3266 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3267 vasprintf (&new_message, fmt_string, ap);
3269 if (this_is_asm_operands)
3270 error_for_asm (this_is_asm_operands, "%s", new_message);
3271 else
3272 internal_error ("%s", new_message);
3274 free (fmt_string);
3275 free (new_message);
3276 va_end (ap);
3279 /* Output of assembler code from a template, and its subroutines. */
3281 /* Annotate the assembly with a comment describing the pattern and
3282 alternative used. */
3284 static void
3285 output_asm_name (void)
3287 if (debug_insn)
3289 int num = INSN_CODE (debug_insn);
3290 fprintf (asm_out_file, "\t%s %d\t%s",
3291 ASM_COMMENT_START, INSN_UID (debug_insn),
3292 insn_data[num].name);
3293 if (insn_data[num].n_alternatives > 1)
3294 fprintf (asm_out_file, "/%d", which_alternative + 1);
3295 #ifdef HAVE_ATTR_length
3296 fprintf (asm_out_file, "\t[length = %d]",
3297 get_attr_length (debug_insn));
3298 #endif
3299 /* Clear this so only the first assembler insn
3300 of any rtl insn will get the special comment for -dp. */
3301 debug_insn = 0;
3305 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3306 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3307 corresponds to the address of the object and 0 if to the object. */
3309 static tree
3310 get_mem_expr_from_op (rtx op, int *paddressp)
3312 tree expr;
3313 int inner_addressp;
3315 *paddressp = 0;
3317 if (REG_P (op))
3318 return REG_EXPR (op);
3319 else if (!MEM_P (op))
3320 return 0;
3322 if (MEM_EXPR (op) != 0)
3323 return MEM_EXPR (op);
3325 /* Otherwise we have an address, so indicate it and look at the address. */
3326 *paddressp = 1;
3327 op = XEXP (op, 0);
3329 /* First check if we have a decl for the address, then look at the right side
3330 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3331 But don't allow the address to itself be indirect. */
3332 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3333 return expr;
3334 else if (GET_CODE (op) == PLUS
3335 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3336 return expr;
3338 while (UNARY_P (op)
3339 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3340 op = XEXP (op, 0);
3342 expr = get_mem_expr_from_op (op, &inner_addressp);
3343 return inner_addressp ? 0 : expr;
3346 /* Output operand names for assembler instructions. OPERANDS is the
3347 operand vector, OPORDER is the order to write the operands, and NOPS
3348 is the number of operands to write. */
3350 static void
3351 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3353 int wrote = 0;
3354 int i;
3356 for (i = 0; i < nops; i++)
3358 int addressp;
3359 rtx op = operands[oporder[i]];
3360 tree expr = get_mem_expr_from_op (op, &addressp);
3362 fprintf (asm_out_file, "%c%s",
3363 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3364 wrote = 1;
3365 if (expr)
3367 fprintf (asm_out_file, "%s",
3368 addressp ? "*" : "");
3369 print_mem_expr (asm_out_file, expr);
3370 wrote = 1;
3372 else if (REG_P (op) && ORIGINAL_REGNO (op)
3373 && ORIGINAL_REGNO (op) != REGNO (op))
3374 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3378 #ifdef ASSEMBLER_DIALECT
3379 /* Helper function to parse assembler dialects in the asm string.
3380 This is called from output_asm_insn and asm_fprintf. */
3381 static const char *
3382 do_assembler_dialects (const char *p, int *dialect)
3384 char c = *(p - 1);
3386 switch (c)
3388 case '{':
3390 int i;
3392 if (*dialect)
3393 output_operand_lossage ("nested assembly dialect alternatives");
3394 else
3395 *dialect = 1;
3397 /* If we want the first dialect, do nothing. Otherwise, skip
3398 DIALECT_NUMBER of strings ending with '|'. */
3399 for (i = 0; i < dialect_number; i++)
3401 while (*p && *p != '}' && *p++ != '|')
3403 if (*p == '}')
3404 break;
3407 if (*p == '\0')
3408 output_operand_lossage ("unterminated assembly dialect alternative");
3410 break;
3412 case '|':
3413 if (*dialect)
3415 /* Skip to close brace. */
3418 if (*p == '\0')
3420 output_operand_lossage ("unterminated assembly dialect alternative");
3421 break;
3424 while (*p++ != '}');
3425 *dialect = 0;
3427 else
3428 putc (c, asm_out_file);
3429 break;
3431 case '}':
3432 if (! *dialect)
3433 putc (c, asm_out_file);
3434 *dialect = 0;
3435 break;
3436 default:
3437 gcc_unreachable ();
3440 return p;
3442 #endif
3444 /* Output text from TEMPLATE to the assembler output file,
3445 obeying %-directions to substitute operands taken from
3446 the vector OPERANDS.
3448 %N (for N a digit) means print operand N in usual manner.
3449 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3450 and print the label name with no punctuation.
3451 %cN means require operand N to be a constant
3452 and print the constant expression with no punctuation.
3453 %aN means expect operand N to be a memory address
3454 (not a memory reference!) and print a reference
3455 to that address.
3456 %nN means expect operand N to be a constant
3457 and print a constant expression for minus the value
3458 of the operand, with no other punctuation. */
3460 void
3461 output_asm_insn (const char *templ, rtx *operands)
3463 const char *p;
3464 int c;
3465 #ifdef ASSEMBLER_DIALECT
3466 int dialect = 0;
3467 #endif
3468 int oporder[MAX_RECOG_OPERANDS];
3469 char opoutput[MAX_RECOG_OPERANDS];
3470 int ops = 0;
3472 /* An insn may return a null string template
3473 in a case where no assembler code is needed. */
3474 if (*templ == 0)
3475 return;
3477 memset (opoutput, 0, sizeof opoutput);
3478 p = templ;
3479 putc ('\t', asm_out_file);
3481 #ifdef ASM_OUTPUT_OPCODE
3482 ASM_OUTPUT_OPCODE (asm_out_file, p);
3483 #endif
3485 while ((c = *p++))
3486 switch (c)
3488 case '\n':
3489 if (flag_verbose_asm)
3490 output_asm_operand_names (operands, oporder, ops);
3491 if (flag_print_asm_name)
3492 output_asm_name ();
3494 ops = 0;
3495 memset (opoutput, 0, sizeof opoutput);
3497 putc (c, asm_out_file);
3498 #ifdef ASM_OUTPUT_OPCODE
3499 while ((c = *p) == '\t')
3501 putc (c, asm_out_file);
3502 p++;
3504 ASM_OUTPUT_OPCODE (asm_out_file, p);
3505 #endif
3506 break;
3508 #ifdef ASSEMBLER_DIALECT
3509 case '{':
3510 case '}':
3511 case '|':
3512 p = do_assembler_dialects (p, &dialect);
3513 break;
3514 #endif
3516 case '%':
3517 /* %% outputs a single %. */
3518 if (*p == '%')
3520 p++;
3521 putc (c, asm_out_file);
3523 /* %= outputs a number which is unique to each insn in the entire
3524 compilation. This is useful for making local labels that are
3525 referred to more than once in a given insn. */
3526 else if (*p == '=')
3528 p++;
3529 fprintf (asm_out_file, "%d", insn_counter);
3531 /* % followed by a letter and some digits
3532 outputs an operand in a special way depending on the letter.
3533 Letters `acln' are implemented directly.
3534 Other letters are passed to `output_operand' so that
3535 the TARGET_PRINT_OPERAND hook can define them. */
3536 else if (ISALPHA (*p))
3538 int letter = *p++;
3539 unsigned long opnum;
3540 char *endptr;
3542 opnum = strtoul (p, &endptr, 10);
3544 if (endptr == p)
3545 output_operand_lossage ("operand number missing "
3546 "after %%-letter");
3547 else if (this_is_asm_operands && opnum >= insn_noperands)
3548 output_operand_lossage ("operand number out of range");
3549 else if (letter == 'l')
3550 output_asm_label (operands[opnum]);
3551 else if (letter == 'a')
3552 output_address (operands[opnum]);
3553 else if (letter == 'c')
3555 if (CONSTANT_ADDRESS_P (operands[opnum]))
3556 output_addr_const (asm_out_file, operands[opnum]);
3557 else
3558 output_operand (operands[opnum], 'c');
3560 else if (letter == 'n')
3562 if (CONST_INT_P (operands[opnum]))
3563 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3564 - INTVAL (operands[opnum]));
3565 else
3567 putc ('-', asm_out_file);
3568 output_addr_const (asm_out_file, operands[opnum]);
3571 else
3572 output_operand (operands[opnum], letter);
3574 if (!opoutput[opnum])
3575 oporder[ops++] = opnum;
3576 opoutput[opnum] = 1;
3578 p = endptr;
3579 c = *p;
3581 /* % followed by a digit outputs an operand the default way. */
3582 else if (ISDIGIT (*p))
3584 unsigned long opnum;
3585 char *endptr;
3587 opnum = strtoul (p, &endptr, 10);
3588 if (this_is_asm_operands && opnum >= insn_noperands)
3589 output_operand_lossage ("operand number out of range");
3590 else
3591 output_operand (operands[opnum], 0);
3593 if (!opoutput[opnum])
3594 oporder[ops++] = opnum;
3595 opoutput[opnum] = 1;
3597 p = endptr;
3598 c = *p;
3600 /* % followed by punctuation: output something for that
3601 punctuation character alone, with no operand. The
3602 TARGET_PRINT_OPERAND hook decides what is actually done. */
3603 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3604 output_operand (NULL_RTX, *p++);
3605 else
3606 output_operand_lossage ("invalid %%-code");
3607 break;
3609 default:
3610 putc (c, asm_out_file);
3613 /* Write out the variable names for operands, if we know them. */
3614 if (flag_verbose_asm)
3615 output_asm_operand_names (operands, oporder, ops);
3616 if (flag_print_asm_name)
3617 output_asm_name ();
3619 putc ('\n', asm_out_file);
3622 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3624 void
3625 output_asm_label (rtx x)
3627 char buf[256];
3629 if (GET_CODE (x) == LABEL_REF)
3630 x = XEXP (x, 0);
3631 if (LABEL_P (x)
3632 || (NOTE_P (x)
3633 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3634 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3635 else
3636 output_operand_lossage ("'%%l' operand isn't a label");
3638 assemble_name (asm_out_file, buf);
3641 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3642 output_operand. Marks SYMBOL_REFs as referenced through use of
3643 assemble_external. */
3645 static int
3646 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3648 rtx x = *xp;
3650 /* If we have a used symbol, we may have to emit assembly
3651 annotations corresponding to whether the symbol is external, weak
3652 or has non-default visibility. */
3653 if (GET_CODE (x) == SYMBOL_REF)
3655 tree t;
3657 t = SYMBOL_REF_DECL (x);
3658 if (t)
3659 assemble_external (t);
3661 return -1;
3664 return 0;
3667 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3669 void
3670 mark_symbol_refs_as_used (rtx x)
3672 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3675 /* Print operand X using machine-dependent assembler syntax.
3676 CODE is a non-digit that preceded the operand-number in the % spec,
3677 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3678 between the % and the digits.
3679 When CODE is a non-letter, X is 0.
3681 The meanings of the letters are machine-dependent and controlled
3682 by TARGET_PRINT_OPERAND. */
3684 void
3685 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3687 if (x && GET_CODE (x) == SUBREG)
3688 x = alter_subreg (&x, true);
3690 /* X must not be a pseudo reg. */
3691 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3693 targetm.asm_out.print_operand (asm_out_file, x, code);
3695 if (x == NULL_RTX)
3696 return;
3698 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3701 /* Print a memory reference operand for address X using
3702 machine-dependent assembler syntax. */
3704 void
3705 output_address (rtx x)
3707 bool changed = false;
3708 walk_alter_subreg (&x, &changed);
3709 targetm.asm_out.print_operand_address (asm_out_file, x);
3712 /* Print an integer constant expression in assembler syntax.
3713 Addition and subtraction are the only arithmetic
3714 that may appear in these expressions. */
3716 void
3717 output_addr_const (FILE *file, rtx x)
3719 char buf[256];
3721 restart:
3722 switch (GET_CODE (x))
3724 case PC:
3725 putc ('.', file);
3726 break;
3728 case SYMBOL_REF:
3729 if (SYMBOL_REF_DECL (x))
3730 assemble_external (SYMBOL_REF_DECL (x));
3731 #ifdef ASM_OUTPUT_SYMBOL_REF
3732 ASM_OUTPUT_SYMBOL_REF (file, x);
3733 #else
3734 assemble_name (file, XSTR (x, 0));
3735 #endif
3736 break;
3738 case LABEL_REF:
3739 x = XEXP (x, 0);
3740 /* Fall through. */
3741 case CODE_LABEL:
3742 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3743 #ifdef ASM_OUTPUT_LABEL_REF
3744 ASM_OUTPUT_LABEL_REF (file, buf);
3745 #else
3746 assemble_name (file, buf);
3747 #endif
3748 break;
3750 case CONST_INT:
3751 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3752 break;
3754 case CONST:
3755 /* This used to output parentheses around the expression,
3756 but that does not work on the 386 (either ATT or BSD assembler). */
3757 output_addr_const (file, XEXP (x, 0));
3758 break;
3760 case CONST_DOUBLE:
3761 if (GET_MODE (x) == VOIDmode)
3763 /* We can use %d if the number is one word and positive. */
3764 if (CONST_DOUBLE_HIGH (x))
3765 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3766 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3767 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3768 else if (CONST_DOUBLE_LOW (x) < 0)
3769 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3770 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3771 else
3772 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3774 else
3775 /* We can't handle floating point constants;
3776 PRINT_OPERAND must handle them. */
3777 output_operand_lossage ("floating constant misused");
3778 break;
3780 case CONST_FIXED:
3781 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3782 break;
3784 case PLUS:
3785 /* Some assemblers need integer constants to appear last (eg masm). */
3786 if (CONST_INT_P (XEXP (x, 0)))
3788 output_addr_const (file, XEXP (x, 1));
3789 if (INTVAL (XEXP (x, 0)) >= 0)
3790 fprintf (file, "+");
3791 output_addr_const (file, XEXP (x, 0));
3793 else
3795 output_addr_const (file, XEXP (x, 0));
3796 if (!CONST_INT_P (XEXP (x, 1))
3797 || INTVAL (XEXP (x, 1)) >= 0)
3798 fprintf (file, "+");
3799 output_addr_const (file, XEXP (x, 1));
3801 break;
3803 case MINUS:
3804 /* Avoid outputting things like x-x or x+5-x,
3805 since some assemblers can't handle that. */
3806 x = simplify_subtraction (x);
3807 if (GET_CODE (x) != MINUS)
3808 goto restart;
3810 output_addr_const (file, XEXP (x, 0));
3811 fprintf (file, "-");
3812 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3813 || GET_CODE (XEXP (x, 1)) == PC
3814 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3815 output_addr_const (file, XEXP (x, 1));
3816 else
3818 fputs (targetm.asm_out.open_paren, file);
3819 output_addr_const (file, XEXP (x, 1));
3820 fputs (targetm.asm_out.close_paren, file);
3822 break;
3824 case ZERO_EXTEND:
3825 case SIGN_EXTEND:
3826 case SUBREG:
3827 case TRUNCATE:
3828 output_addr_const (file, XEXP (x, 0));
3829 break;
3831 default:
3832 if (targetm.asm_out.output_addr_const_extra (file, x))
3833 break;
3835 output_operand_lossage ("invalid expression as operand");
3839 /* Output a quoted string. */
3841 void
3842 output_quoted_string (FILE *asm_file, const char *string)
3844 #ifdef OUTPUT_QUOTED_STRING
3845 OUTPUT_QUOTED_STRING (asm_file, string);
3846 #else
3847 char c;
3849 putc ('\"', asm_file);
3850 while ((c = *string++) != 0)
3852 if (ISPRINT (c))
3854 if (c == '\"' || c == '\\')
3855 putc ('\\', asm_file);
3856 putc (c, asm_file);
3858 else
3859 fprintf (asm_file, "\\%03o", (unsigned char) c);
3861 putc ('\"', asm_file);
3862 #endif
3865 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3867 void
3868 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3870 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3871 if (value == 0)
3872 putc ('0', f);
3873 else
3875 char *p = buf + sizeof (buf);
3877 *--p = "0123456789abcdef"[value % 16];
3878 while ((value /= 16) != 0);
3879 *--p = 'x';
3880 *--p = '0';
3881 fwrite (p, 1, buf + sizeof (buf) - p, f);
3885 /* Internal function that prints an unsigned long in decimal in reverse.
3886 The output string IS NOT null-terminated. */
3888 static int
3889 sprint_ul_rev (char *s, unsigned long value)
3891 int i = 0;
3894 s[i] = "0123456789"[value % 10];
3895 value /= 10;
3896 i++;
3897 /* alternate version, without modulo */
3898 /* oldval = value; */
3899 /* value /= 10; */
3900 /* s[i] = "0123456789" [oldval - 10*value]; */
3901 /* i++ */
3903 while (value != 0);
3904 return i;
3907 /* Write an unsigned long as decimal to a file, fast. */
3909 void
3910 fprint_ul (FILE *f, unsigned long value)
3912 /* python says: len(str(2**64)) == 20 */
3913 char s[20];
3914 int i;
3916 i = sprint_ul_rev (s, value);
3918 /* It's probably too small to bother with string reversal and fputs. */
3921 i--;
3922 putc (s[i], f);
3924 while (i != 0);
3927 /* Write an unsigned long as decimal to a string, fast.
3928 s must be wide enough to not overflow, at least 21 chars.
3929 Returns the length of the string (without terminating '\0'). */
3932 sprint_ul (char *s, unsigned long value)
3934 int len;
3935 char tmp_c;
3936 int i;
3937 int j;
3939 len = sprint_ul_rev (s, value);
3940 s[len] = '\0';
3942 /* Reverse the string. */
3943 i = 0;
3944 j = len - 1;
3945 while (i < j)
3947 tmp_c = s[i];
3948 s[i] = s[j];
3949 s[j] = tmp_c;
3950 i++; j--;
3953 return len;
3956 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3957 %R prints the value of REGISTER_PREFIX.
3958 %L prints the value of LOCAL_LABEL_PREFIX.
3959 %U prints the value of USER_LABEL_PREFIX.
3960 %I prints the value of IMMEDIATE_PREFIX.
3961 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3962 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3964 We handle alternate assembler dialects here, just like output_asm_insn. */
3966 void
3967 asm_fprintf (FILE *file, const char *p, ...)
3969 char buf[10];
3970 char *q, c;
3971 #ifdef ASSEMBLER_DIALECT
3972 int dialect = 0;
3973 #endif
3974 va_list argptr;
3976 va_start (argptr, p);
3978 buf[0] = '%';
3980 while ((c = *p++))
3981 switch (c)
3983 #ifdef ASSEMBLER_DIALECT
3984 case '{':
3985 case '}':
3986 case '|':
3987 p = do_assembler_dialects (p, &dialect);
3988 break;
3989 #endif
3991 case '%':
3992 c = *p++;
3993 q = &buf[1];
3994 while (strchr ("-+ #0", c))
3996 *q++ = c;
3997 c = *p++;
3999 while (ISDIGIT (c) || c == '.')
4001 *q++ = c;
4002 c = *p++;
4004 switch (c)
4006 case '%':
4007 putc ('%', file);
4008 break;
4010 case 'd': case 'i': case 'u':
4011 case 'x': case 'X': case 'o':
4012 case 'c':
4013 *q++ = c;
4014 *q = 0;
4015 fprintf (file, buf, va_arg (argptr, int));
4016 break;
4018 case 'w':
4019 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4020 'o' cases, but we do not check for those cases. It
4021 means that the value is a HOST_WIDE_INT, which may be
4022 either `long' or `long long'. */
4023 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4024 q += strlen (HOST_WIDE_INT_PRINT);
4025 *q++ = *p++;
4026 *q = 0;
4027 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4028 break;
4030 case 'l':
4031 *q++ = c;
4032 #ifdef HAVE_LONG_LONG
4033 if (*p == 'l')
4035 *q++ = *p++;
4036 *q++ = *p++;
4037 *q = 0;
4038 fprintf (file, buf, va_arg (argptr, long long));
4040 else
4041 #endif
4043 *q++ = *p++;
4044 *q = 0;
4045 fprintf (file, buf, va_arg (argptr, long));
4048 break;
4050 case 's':
4051 *q++ = c;
4052 *q = 0;
4053 fprintf (file, buf, va_arg (argptr, char *));
4054 break;
4056 case 'O':
4057 #ifdef ASM_OUTPUT_OPCODE
4058 ASM_OUTPUT_OPCODE (asm_out_file, p);
4059 #endif
4060 break;
4062 case 'R':
4063 #ifdef REGISTER_PREFIX
4064 fprintf (file, "%s", REGISTER_PREFIX);
4065 #endif
4066 break;
4068 case 'I':
4069 #ifdef IMMEDIATE_PREFIX
4070 fprintf (file, "%s", IMMEDIATE_PREFIX);
4071 #endif
4072 break;
4074 case 'L':
4075 #ifdef LOCAL_LABEL_PREFIX
4076 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4077 #endif
4078 break;
4080 case 'U':
4081 fputs (user_label_prefix, file);
4082 break;
4084 #ifdef ASM_FPRINTF_EXTENSIONS
4085 /* Uppercase letters are reserved for general use by asm_fprintf
4086 and so are not available to target specific code. In order to
4087 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4088 they are defined here. As they get turned into real extensions
4089 to asm_fprintf they should be removed from this list. */
4090 case 'A': case 'B': case 'C': case 'D': case 'E':
4091 case 'F': case 'G': case 'H': case 'J': case 'K':
4092 case 'M': case 'N': case 'P': case 'Q': case 'S':
4093 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4094 break;
4096 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4097 #endif
4098 default:
4099 gcc_unreachable ();
4101 break;
4103 default:
4104 putc (c, file);
4106 va_end (argptr);
4109 /* Return nonzero if this function has no function calls. */
4112 leaf_function_p (void)
4114 rtx insn;
4115 rtx link;
4117 if (crtl->profile || profile_arc_flag)
4118 return 0;
4120 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4122 if (CALL_P (insn)
4123 && ! SIBLING_CALL_P (insn))
4124 return 0;
4125 if (NONJUMP_INSN_P (insn)
4126 && GET_CODE (PATTERN (insn)) == SEQUENCE
4127 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4128 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4129 return 0;
4131 for (link = crtl->epilogue_delay_list;
4132 link;
4133 link = XEXP (link, 1))
4135 insn = XEXP (link, 0);
4137 if (CALL_P (insn)
4138 && ! SIBLING_CALL_P (insn))
4139 return 0;
4140 if (NONJUMP_INSN_P (insn)
4141 && GET_CODE (PATTERN (insn)) == SEQUENCE
4142 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4143 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4144 return 0;
4147 return 1;
4150 /* Return 1 if branch is a forward branch.
4151 Uses insn_shuid array, so it works only in the final pass. May be used by
4152 output templates to customary add branch prediction hints.
4155 final_forward_branch_p (rtx insn)
4157 int insn_id, label_id;
4159 gcc_assert (uid_shuid);
4160 insn_id = INSN_SHUID (insn);
4161 label_id = INSN_SHUID (JUMP_LABEL (insn));
4162 /* We've hit some insns that does not have id information available. */
4163 gcc_assert (insn_id && label_id);
4164 return insn_id < label_id;
4167 /* On some machines, a function with no call insns
4168 can run faster if it doesn't create its own register window.
4169 When output, the leaf function should use only the "output"
4170 registers. Ordinarily, the function would be compiled to use
4171 the "input" registers to find its arguments; it is a candidate
4172 for leaf treatment if it uses only the "input" registers.
4173 Leaf function treatment means renumbering so the function
4174 uses the "output" registers instead. */
4176 #ifdef LEAF_REGISTERS
4178 /* Return 1 if this function uses only the registers that can be
4179 safely renumbered. */
4182 only_leaf_regs_used (void)
4184 int i;
4185 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4187 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4188 if ((df_regs_ever_live_p (i) || global_regs[i])
4189 && ! permitted_reg_in_leaf_functions[i])
4190 return 0;
4192 if (crtl->uses_pic_offset_table
4193 && pic_offset_table_rtx != 0
4194 && REG_P (pic_offset_table_rtx)
4195 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4196 return 0;
4198 return 1;
4201 /* Scan all instructions and renumber all registers into those
4202 available in leaf functions. */
4204 static void
4205 leaf_renumber_regs (rtx first)
4207 rtx insn;
4209 /* Renumber only the actual patterns.
4210 The reg-notes can contain frame pointer refs,
4211 and renumbering them could crash, and should not be needed. */
4212 for (insn = first; insn; insn = NEXT_INSN (insn))
4213 if (INSN_P (insn))
4214 leaf_renumber_regs_insn (PATTERN (insn));
4215 for (insn = crtl->epilogue_delay_list;
4216 insn;
4217 insn = XEXP (insn, 1))
4218 if (INSN_P (XEXP (insn, 0)))
4219 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
4222 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4223 available in leaf functions. */
4225 void
4226 leaf_renumber_regs_insn (rtx in_rtx)
4228 int i, j;
4229 const char *format_ptr;
4231 if (in_rtx == 0)
4232 return;
4234 /* Renumber all input-registers into output-registers.
4235 renumbered_regs would be 1 for an output-register;
4236 they */
4238 if (REG_P (in_rtx))
4240 int newreg;
4242 /* Don't renumber the same reg twice. */
4243 if (in_rtx->used)
4244 return;
4246 newreg = REGNO (in_rtx);
4247 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4248 to reach here as part of a REG_NOTE. */
4249 if (newreg >= FIRST_PSEUDO_REGISTER)
4251 in_rtx->used = 1;
4252 return;
4254 newreg = LEAF_REG_REMAP (newreg);
4255 gcc_assert (newreg >= 0);
4256 df_set_regs_ever_live (REGNO (in_rtx), false);
4257 df_set_regs_ever_live (newreg, true);
4258 SET_REGNO (in_rtx, newreg);
4259 in_rtx->used = 1;
4262 if (INSN_P (in_rtx))
4264 /* Inside a SEQUENCE, we find insns.
4265 Renumber just the patterns of these insns,
4266 just as we do for the top-level insns. */
4267 leaf_renumber_regs_insn (PATTERN (in_rtx));
4268 return;
4271 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4273 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4274 switch (*format_ptr++)
4276 case 'e':
4277 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4278 break;
4280 case 'E':
4281 if (NULL != XVEC (in_rtx, i))
4283 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4284 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4286 break;
4288 case 'S':
4289 case 's':
4290 case '0':
4291 case 'i':
4292 case 'w':
4293 case 'n':
4294 case 'u':
4295 break;
4297 default:
4298 gcc_unreachable ();
4301 #endif
4303 /* Turn the RTL into assembly. */
4304 static unsigned int
4305 rest_of_handle_final (void)
4307 rtx x;
4308 const char *fnname;
4310 /* Get the function's name, as described by its RTL. This may be
4311 different from the DECL_NAME name used in the source file. */
4313 x = DECL_RTL (current_function_decl);
4314 gcc_assert (MEM_P (x));
4315 x = XEXP (x, 0);
4316 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4317 fnname = XSTR (x, 0);
4319 assemble_start_function (current_function_decl, fnname);
4320 final_start_function (get_insns (), asm_out_file, optimize);
4321 final (get_insns (), asm_out_file, optimize);
4322 final_end_function ();
4324 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4325 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4326 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4327 output_function_exception_table (fnname);
4329 assemble_end_function (current_function_decl, fnname);
4331 user_defined_section_attribute = false;
4333 /* Free up reg info memory. */
4334 free_reg_info ();
4336 if (! quiet_flag)
4337 fflush (asm_out_file);
4339 /* Write DBX symbols if requested. */
4341 /* Note that for those inline functions where we don't initially
4342 know for certain that we will be generating an out-of-line copy,
4343 the first invocation of this routine (rest_of_compilation) will
4344 skip over this code by doing a `goto exit_rest_of_compilation;'.
4345 Later on, wrapup_global_declarations will (indirectly) call
4346 rest_of_compilation again for those inline functions that need
4347 to have out-of-line copies generated. During that call, we
4348 *will* be routed past here. */
4350 timevar_push (TV_SYMOUT);
4351 if (!DECL_IGNORED_P (current_function_decl))
4352 debug_hooks->function_decl (current_function_decl);
4353 timevar_pop (TV_SYMOUT);
4355 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4356 DECL_INITIAL (current_function_decl) = error_mark_node;
4358 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4359 && targetm.have_ctors_dtors)
4360 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4361 decl_init_priority_lookup
4362 (current_function_decl));
4363 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4364 && targetm.have_ctors_dtors)
4365 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4366 decl_fini_priority_lookup
4367 (current_function_decl));
4368 return 0;
4371 struct rtl_opt_pass pass_final =
4374 RTL_PASS,
4375 "final", /* name */
4376 NULL, /* gate */
4377 rest_of_handle_final, /* execute */
4378 NULL, /* sub */
4379 NULL, /* next */
4380 0, /* static_pass_number */
4381 TV_FINAL, /* tv_id */
4382 0, /* properties_required */
4383 0, /* properties_provided */
4384 0, /* properties_destroyed */
4385 0, /* todo_flags_start */
4386 TODO_ggc_collect /* todo_flags_finish */
4391 static unsigned int
4392 rest_of_handle_shorten_branches (void)
4394 /* Shorten branches. */
4395 shorten_branches (get_insns ());
4396 return 0;
4399 struct rtl_opt_pass pass_shorten_branches =
4402 RTL_PASS,
4403 "shorten", /* name */
4404 NULL, /* gate */
4405 rest_of_handle_shorten_branches, /* execute */
4406 NULL, /* sub */
4407 NULL, /* next */
4408 0, /* static_pass_number */
4409 TV_SHORTEN_BRANCH, /* tv_id */
4410 0, /* properties_required */
4411 0, /* properties_provided */
4412 0, /* properties_destroyed */
4413 0, /* todo_flags_start */
4414 0 /* todo_flags_finish */
4419 static unsigned int
4420 rest_of_clean_state (void)
4422 rtx insn, next;
4423 FILE *final_output = NULL;
4424 int save_unnumbered = flag_dump_unnumbered;
4425 int save_noaddr = flag_dump_noaddr;
4427 if (flag_dump_final_insns)
4429 final_output = fopen (flag_dump_final_insns, "a");
4430 if (!final_output)
4432 error ("could not open final insn dump file %qs: %m",
4433 flag_dump_final_insns);
4434 flag_dump_final_insns = NULL;
4436 else
4438 flag_dump_noaddr = flag_dump_unnumbered = 1;
4439 if (flag_compare_debug_opt || flag_compare_debug)
4440 dump_flags |= TDF_NOUID;
4441 dump_function_header (final_output, current_function_decl,
4442 dump_flags);
4443 final_insns_dump_p = true;
4445 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4446 if (LABEL_P (insn))
4447 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4448 else
4450 if (NOTE_P (insn))
4451 set_block_for_insn (insn, NULL);
4452 INSN_UID (insn) = 0;
4457 /* It is very important to decompose the RTL instruction chain here:
4458 debug information keeps pointing into CODE_LABEL insns inside the function
4459 body. If these remain pointing to the other insns, we end up preserving
4460 whole RTL chain and attached detailed debug info in memory. */
4461 for (insn = get_insns (); insn; insn = next)
4463 next = NEXT_INSN (insn);
4464 NEXT_INSN (insn) = NULL;
4465 PREV_INSN (insn) = NULL;
4467 if (final_output
4468 && (!NOTE_P (insn) ||
4469 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4470 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4471 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4472 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4473 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4474 print_rtl_single (final_output, insn);
4477 if (final_output)
4479 flag_dump_noaddr = save_noaddr;
4480 flag_dump_unnumbered = save_unnumbered;
4481 final_insns_dump_p = false;
4483 if (fclose (final_output))
4485 error ("could not close final insn dump file %qs: %m",
4486 flag_dump_final_insns);
4487 flag_dump_final_insns = NULL;
4491 /* In case the function was not output,
4492 don't leave any temporary anonymous types
4493 queued up for sdb output. */
4494 #ifdef SDB_DEBUGGING_INFO
4495 if (write_symbols == SDB_DEBUG)
4496 sdbout_types (NULL_TREE);
4497 #endif
4499 flag_rerun_cse_after_global_opts = 0;
4500 reload_completed = 0;
4501 epilogue_completed = 0;
4502 #ifdef STACK_REGS
4503 regstack_completed = 0;
4504 #endif
4506 /* Clear out the insn_length contents now that they are no
4507 longer valid. */
4508 init_insn_lengths ();
4510 /* Show no temporary slots allocated. */
4511 init_temp_slots ();
4513 free_bb_for_insn ();
4515 delete_tree_ssa ();
4517 /* We can reduce stack alignment on call site only when we are sure that
4518 the function body just produced will be actually used in the final
4519 executable. */
4520 if (decl_binds_to_current_def_p (current_function_decl))
4522 unsigned int pref = crtl->preferred_stack_boundary;
4523 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4524 pref = crtl->stack_alignment_needed;
4525 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4526 = pref;
4529 /* Make sure volatile mem refs aren't considered valid operands for
4530 arithmetic insns. We must call this here if this is a nested inline
4531 function, since the above code leaves us in the init_recog state,
4532 and the function context push/pop code does not save/restore volatile_ok.
4534 ??? Maybe it isn't necessary for expand_start_function to call this
4535 anymore if we do it here? */
4537 init_recog_no_volatile ();
4539 /* We're done with this function. Free up memory if we can. */
4540 free_after_parsing (cfun);
4541 free_after_compilation (cfun);
4542 return 0;
4545 struct rtl_opt_pass pass_clean_state =
4548 RTL_PASS,
4549 "*clean_state", /* name */
4550 NULL, /* gate */
4551 rest_of_clean_state, /* execute */
4552 NULL, /* sub */
4553 NULL, /* next */
4554 0, /* static_pass_number */
4555 TV_FINAL, /* tv_id */
4556 0, /* properties_required */
4557 0, /* properties_provided */
4558 PROP_rtl, /* properties_destroyed */
4559 0, /* todo_flags_start */
4560 0 /* todo_flags_finish */