use ASM_COMMENT_START instead of #
[official-gcc.git] / gcc / final.c
blobdf5d7d5e6815e87e98ed0c4a091b0128040dbc66
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This is the final pass of the compiler.
24 It looks at the rtl code for a function and outputs assembler code.
26 Call `final_start_function' to output the assembler code for function entry,
27 `final' to output assembler code for some RTL code,
28 `final_end_function' to output assembler code for function exit.
29 If a function is compiled in several pieces, each piece is
30 output separately with `final'.
32 Some optimizations are also done at this level.
33 Move instructions that were made unnecessary by good register allocation
34 are detected and omitted from the output. (Though most of these
35 are removed by the last jump pass.)
37 Instructions to set the condition codes are omitted when it can be
38 seen that the condition codes already had the desired values.
40 In some cases it is sufficient if the inherited condition codes
41 have related values, but this may require the following insn
42 (the one that tests the condition codes) to be modified.
44 The code for the function prologue and epilogue are generated
45 directly in assembler by the target functions function_prologue and
46 function_epilogue. Those instructions never exist as rtl. */
48 #include "config.h"
49 #include "system.h"
50 #include "coretypes.h"
51 #include "tm.h"
53 #include "tree.h"
54 #include "rtl.h"
55 #include "tm_p.h"
56 #include "regs.h"
57 #include "insn-config.h"
58 #include "insn-attr.h"
59 #include "recog.h"
60 #include "conditions.h"
61 #include "flags.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "rtl-error.h"
67 #include "toplev.h" /* exact_log2, floor_log2 */
68 #include "reload.h"
69 #include "intl.h"
70 #include "basic-block.h"
71 #include "target.h"
72 #include "targhooks.h"
73 #include "debug.h"
74 #include "expr.h"
75 #include "cfglayout.h"
76 #include "tree-pass.h"
77 #include "tree-flow.h"
78 #include "timevar.h"
79 #include "cgraph.h"
80 #include "coverage.h"
81 #include "df.h"
82 #include "vecprim.h"
83 #include "ggc.h"
84 #include "cfgloop.h"
85 #include "params.h"
87 #ifdef XCOFF_DEBUGGING_INFO
88 #include "xcoffout.h" /* Needed for external data
89 declarations for e.g. AIX 4.x. */
90 #endif
92 #include "dwarf2out.h"
94 #ifdef DBX_DEBUGGING_INFO
95 #include "dbxout.h"
96 #endif
98 #ifdef SDB_DEBUGGING_INFO
99 #include "sdbout.h"
100 #endif
102 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
103 So define a null default for it to save conditionalization later. */
104 #ifndef CC_STATUS_INIT
105 #define CC_STATUS_INIT
106 #endif
108 /* How to start an assembler comment. */
109 #ifndef ASM_COMMENT_START
110 #define ASM_COMMENT_START ";#"
111 #endif
113 /* Is the given character a logical line separator for the assembler? */
114 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
115 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
116 #endif
118 #ifndef JUMP_TABLES_IN_TEXT_SECTION
119 #define JUMP_TABLES_IN_TEXT_SECTION 0
120 #endif
122 /* Bitflags used by final_scan_insn. */
123 #define SEEN_BB 1
124 #define SEEN_NOTE 2
125 #define SEEN_EMITTED 4
127 /* Last insn processed by final_scan_insn. */
128 static rtx debug_insn;
129 rtx current_output_insn;
131 /* Line number of last NOTE. */
132 static int last_linenum;
134 /* Last discriminator written to assembly. */
135 static int last_discriminator;
137 /* Discriminator of current block. */
138 static int discriminator;
140 /* Highest line number in current block. */
141 static int high_block_linenum;
143 /* Likewise for function. */
144 static int high_function_linenum;
146 /* Filename of last NOTE. */
147 static const char *last_filename;
149 /* Override filename and line number. */
150 static const char *override_filename;
151 static int override_linenum;
153 /* Whether to force emission of a line note before the next insn. */
154 static bool force_source_line = false;
156 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
158 /* Nonzero while outputting an `asm' with operands.
159 This means that inconsistencies are the user's fault, so don't die.
160 The precise value is the insn being output, to pass to error_for_asm. */
161 rtx this_is_asm_operands;
163 /* Number of operands of this insn, for an `asm' with operands. */
164 static unsigned int insn_noperands;
166 /* Compare optimization flag. */
168 static rtx last_ignored_compare = 0;
170 /* Assign a unique number to each insn that is output.
171 This can be used to generate unique local labels. */
173 static int insn_counter = 0;
175 #ifdef HAVE_cc0
176 /* This variable contains machine-dependent flags (defined in tm.h)
177 set and examined by output routines
178 that describe how to interpret the condition codes properly. */
180 CC_STATUS cc_status;
182 /* During output of an insn, this contains a copy of cc_status
183 from before the insn. */
185 CC_STATUS cc_prev_status;
186 #endif
188 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
190 static int block_depth;
192 /* Nonzero if have enabled APP processing of our assembler output. */
194 static int app_on;
196 /* If we are outputting an insn sequence, this contains the sequence rtx.
197 Zero otherwise. */
199 rtx final_sequence;
201 #ifdef ASSEMBLER_DIALECT
203 /* Number of the assembler dialect to use, starting at 0. */
204 static int dialect_number;
205 #endif
207 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
208 rtx current_insn_predicate;
210 /* True if printing into -fdump-final-insns= dump. */
211 bool final_insns_dump_p;
213 #ifdef HAVE_ATTR_length
214 static int asm_insn_count (rtx);
215 #endif
216 static void profile_function (FILE *);
217 static void profile_after_prologue (FILE *);
218 static bool notice_source_line (rtx, bool *);
219 static rtx walk_alter_subreg (rtx *, bool *);
220 static void output_asm_name (void);
221 static void output_alternate_entry_point (FILE *, rtx);
222 static tree get_mem_expr_from_op (rtx, int *);
223 static void output_asm_operand_names (rtx *, int *, int);
224 #ifdef LEAF_REGISTERS
225 static void leaf_renumber_regs (rtx);
226 #endif
227 #ifdef HAVE_cc0
228 static int alter_cond (rtx);
229 #endif
230 #ifndef ADDR_VEC_ALIGN
231 static int final_addr_vec_align (rtx);
232 #endif
233 #ifdef HAVE_ATTR_length
234 static int align_fuzz (rtx, rtx, int, unsigned);
235 #endif
237 /* Initialize data in final at the beginning of a compilation. */
239 void
240 init_final (const char *filename ATTRIBUTE_UNUSED)
242 app_on = 0;
243 final_sequence = 0;
245 #ifdef ASSEMBLER_DIALECT
246 dialect_number = ASSEMBLER_DIALECT;
247 #endif
250 /* Default target function prologue and epilogue assembler output.
252 If not overridden for epilogue code, then the function body itself
253 contains return instructions wherever needed. */
254 void
255 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
256 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
260 void
261 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
262 tree decl ATTRIBUTE_UNUSED,
263 bool new_is_cold ATTRIBUTE_UNUSED)
267 /* Default target hook that outputs nothing to a stream. */
268 void
269 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
273 /* Enable APP processing of subsequent output.
274 Used before the output from an `asm' statement. */
276 void
277 app_enable (void)
279 if (! app_on)
281 fputs (ASM_APP_ON, asm_out_file);
282 app_on = 1;
286 /* Disable APP processing of subsequent output.
287 Called from varasm.c before most kinds of output. */
289 void
290 app_disable (void)
292 if (app_on)
294 fputs (ASM_APP_OFF, asm_out_file);
295 app_on = 0;
299 /* Return the number of slots filled in the current
300 delayed branch sequence (we don't count the insn needing the
301 delay slot). Zero if not in a delayed branch sequence. */
303 #ifdef DELAY_SLOTS
305 dbr_sequence_length (void)
307 if (final_sequence != 0)
308 return XVECLEN (final_sequence, 0) - 1;
309 else
310 return 0;
312 #endif
314 /* The next two pages contain routines used to compute the length of an insn
315 and to shorten branches. */
317 /* Arrays for insn lengths, and addresses. The latter is referenced by
318 `insn_current_length'. */
320 static int *insn_lengths;
322 VEC(int,heap) *insn_addresses_;
324 /* Max uid for which the above arrays are valid. */
325 static int insn_lengths_max_uid;
327 /* Address of insn being processed. Used by `insn_current_length'. */
328 int insn_current_address;
330 /* Address of insn being processed in previous iteration. */
331 int insn_last_address;
333 /* known invariant alignment of insn being processed. */
334 int insn_current_align;
336 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
337 gives the next following alignment insn that increases the known
338 alignment, or NULL_RTX if there is no such insn.
339 For any alignment obtained this way, we can again index uid_align with
340 its uid to obtain the next following align that in turn increases the
341 alignment, till we reach NULL_RTX; the sequence obtained this way
342 for each insn we'll call the alignment chain of this insn in the following
343 comments. */
345 struct label_alignment
347 short alignment;
348 short max_skip;
351 static rtx *uid_align;
352 static int *uid_shuid;
353 static struct label_alignment *label_align;
355 /* Indicate that branch shortening hasn't yet been done. */
357 void
358 init_insn_lengths (void)
360 if (uid_shuid)
362 free (uid_shuid);
363 uid_shuid = 0;
365 if (insn_lengths)
367 free (insn_lengths);
368 insn_lengths = 0;
369 insn_lengths_max_uid = 0;
371 #ifdef HAVE_ATTR_length
372 INSN_ADDRESSES_FREE ();
373 #endif
374 if (uid_align)
376 free (uid_align);
377 uid_align = 0;
381 /* Obtain the current length of an insn. If branch shortening has been done,
382 get its actual length. Otherwise, use FALLBACK_FN to calculate the
383 length. */
384 static inline int
385 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
386 int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
388 #ifdef HAVE_ATTR_length
389 rtx body;
390 int i;
391 int length = 0;
393 if (insn_lengths_max_uid > INSN_UID (insn))
394 return insn_lengths[INSN_UID (insn)];
395 else
396 switch (GET_CODE (insn))
398 case NOTE:
399 case BARRIER:
400 case CODE_LABEL:
401 case DEBUG_INSN:
402 return 0;
404 case CALL_INSN:
405 length = fallback_fn (insn);
406 break;
408 case JUMP_INSN:
409 body = PATTERN (insn);
410 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
412 /* Alignment is machine-dependent and should be handled by
413 ADDR_VEC_ALIGN. */
415 else
416 length = fallback_fn (insn);
417 break;
419 case INSN:
420 body = PATTERN (insn);
421 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
422 return 0;
424 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
425 length = asm_insn_count (body) * fallback_fn (insn);
426 else if (GET_CODE (body) == SEQUENCE)
427 for (i = 0; i < XVECLEN (body, 0); i++)
428 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
429 else
430 length = fallback_fn (insn);
431 break;
433 default:
434 break;
437 #ifdef ADJUST_INSN_LENGTH
438 ADJUST_INSN_LENGTH (insn, length);
439 #endif
440 return length;
441 #else /* not HAVE_ATTR_length */
442 return 0;
443 #define insn_default_length 0
444 #define insn_min_length 0
445 #endif /* not HAVE_ATTR_length */
448 /* Obtain the current length of an insn. If branch shortening has been done,
449 get its actual length. Otherwise, get its maximum length. */
451 get_attr_length (rtx insn)
453 return get_attr_length_1 (insn, insn_default_length);
456 /* Obtain the current length of an insn. If branch shortening has been done,
457 get its actual length. Otherwise, get its minimum length. */
459 get_attr_min_length (rtx insn)
461 return get_attr_length_1 (insn, insn_min_length);
464 /* Code to handle alignment inside shorten_branches. */
466 /* Here is an explanation how the algorithm in align_fuzz can give
467 proper results:
469 Call a sequence of instructions beginning with alignment point X
470 and continuing until the next alignment point `block X'. When `X'
471 is used in an expression, it means the alignment value of the
472 alignment point.
474 Call the distance between the start of the first insn of block X, and
475 the end of the last insn of block X `IX', for the `inner size of X'.
476 This is clearly the sum of the instruction lengths.
478 Likewise with the next alignment-delimited block following X, which we
479 shall call block Y.
481 Call the distance between the start of the first insn of block X, and
482 the start of the first insn of block Y `OX', for the `outer size of X'.
484 The estimated padding is then OX - IX.
486 OX can be safely estimated as
488 if (X >= Y)
489 OX = round_up(IX, Y)
490 else
491 OX = round_up(IX, X) + Y - X
493 Clearly est(IX) >= real(IX), because that only depends on the
494 instruction lengths, and those being overestimated is a given.
496 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
497 we needn't worry about that when thinking about OX.
499 When X >= Y, the alignment provided by Y adds no uncertainty factor
500 for branch ranges starting before X, so we can just round what we have.
501 But when X < Y, we don't know anything about the, so to speak,
502 `middle bits', so we have to assume the worst when aligning up from an
503 address mod X to one mod Y, which is Y - X. */
505 #ifndef LABEL_ALIGN
506 #define LABEL_ALIGN(LABEL) align_labels_log
507 #endif
509 #ifndef LOOP_ALIGN
510 #define LOOP_ALIGN(LABEL) align_loops_log
511 #endif
513 #ifndef LABEL_ALIGN_AFTER_BARRIER
514 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
515 #endif
517 #ifndef JUMP_ALIGN
518 #define JUMP_ALIGN(LABEL) align_jumps_log
519 #endif
522 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
524 return 0;
528 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
530 return align_loops_max_skip;
534 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
536 return align_labels_max_skip;
540 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
542 return align_jumps_max_skip;
545 #ifndef ADDR_VEC_ALIGN
546 static int
547 final_addr_vec_align (rtx addr_vec)
549 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
551 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
552 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
553 return exact_log2 (align);
557 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
558 #endif
560 #ifndef INSN_LENGTH_ALIGNMENT
561 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
562 #endif
564 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
566 static int min_labelno, max_labelno;
568 #define LABEL_TO_ALIGNMENT(LABEL) \
569 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
571 #define LABEL_TO_MAX_SKIP(LABEL) \
572 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
574 /* For the benefit of port specific code do this also as a function. */
577 label_to_alignment (rtx label)
579 if (CODE_LABEL_NUMBER (label) <= max_labelno)
580 return LABEL_TO_ALIGNMENT (label);
581 return 0;
585 label_to_max_skip (rtx label)
587 if (CODE_LABEL_NUMBER (label) <= max_labelno)
588 return LABEL_TO_MAX_SKIP (label);
589 return 0;
592 #ifdef HAVE_ATTR_length
593 /* The differences in addresses
594 between a branch and its target might grow or shrink depending on
595 the alignment the start insn of the range (the branch for a forward
596 branch or the label for a backward branch) starts out on; if these
597 differences are used naively, they can even oscillate infinitely.
598 We therefore want to compute a 'worst case' address difference that
599 is independent of the alignment the start insn of the range end
600 up on, and that is at least as large as the actual difference.
601 The function align_fuzz calculates the amount we have to add to the
602 naively computed difference, by traversing the part of the alignment
603 chain of the start insn of the range that is in front of the end insn
604 of the range, and considering for each alignment the maximum amount
605 that it might contribute to a size increase.
607 For casesi tables, we also want to know worst case minimum amounts of
608 address difference, in case a machine description wants to introduce
609 some common offset that is added to all offsets in a table.
610 For this purpose, align_fuzz with a growth argument of 0 computes the
611 appropriate adjustment. */
613 /* Compute the maximum delta by which the difference of the addresses of
614 START and END might grow / shrink due to a different address for start
615 which changes the size of alignment insns between START and END.
616 KNOWN_ALIGN_LOG is the alignment known for START.
617 GROWTH should be ~0 if the objective is to compute potential code size
618 increase, and 0 if the objective is to compute potential shrink.
619 The return value is undefined for any other value of GROWTH. */
621 static int
622 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
624 int uid = INSN_UID (start);
625 rtx align_label;
626 int known_align = 1 << known_align_log;
627 int end_shuid = INSN_SHUID (end);
628 int fuzz = 0;
630 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
632 int align_addr, new_align;
634 uid = INSN_UID (align_label);
635 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
636 if (uid_shuid[uid] > end_shuid)
637 break;
638 known_align_log = LABEL_TO_ALIGNMENT (align_label);
639 new_align = 1 << known_align_log;
640 if (new_align < known_align)
641 continue;
642 fuzz += (-align_addr ^ growth) & (new_align - known_align);
643 known_align = new_align;
645 return fuzz;
648 /* Compute a worst-case reference address of a branch so that it
649 can be safely used in the presence of aligned labels. Since the
650 size of the branch itself is unknown, the size of the branch is
651 not included in the range. I.e. for a forward branch, the reference
652 address is the end address of the branch as known from the previous
653 branch shortening pass, minus a value to account for possible size
654 increase due to alignment. For a backward branch, it is the start
655 address of the branch as known from the current pass, plus a value
656 to account for possible size increase due to alignment.
657 NB.: Therefore, the maximum offset allowed for backward branches needs
658 to exclude the branch size. */
661 insn_current_reference_address (rtx branch)
663 rtx dest, seq;
664 int seq_uid;
666 if (! INSN_ADDRESSES_SET_P ())
667 return 0;
669 seq = NEXT_INSN (PREV_INSN (branch));
670 seq_uid = INSN_UID (seq);
671 if (!JUMP_P (branch))
672 /* This can happen for example on the PA; the objective is to know the
673 offset to address something in front of the start of the function.
674 Thus, we can treat it like a backward branch.
675 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
676 any alignment we'd encounter, so we skip the call to align_fuzz. */
677 return insn_current_address;
678 dest = JUMP_LABEL (branch);
680 /* BRANCH has no proper alignment chain set, so use SEQ.
681 BRANCH also has no INSN_SHUID. */
682 if (INSN_SHUID (seq) < INSN_SHUID (dest))
684 /* Forward branch. */
685 return (insn_last_address + insn_lengths[seq_uid]
686 - align_fuzz (seq, dest, length_unit_log, ~0));
688 else
690 /* Backward branch. */
691 return (insn_current_address
692 + align_fuzz (dest, seq, length_unit_log, ~0));
695 #endif /* HAVE_ATTR_length */
697 /* Compute branch alignments based on frequency information in the
698 CFG. */
700 unsigned int
701 compute_alignments (void)
703 int log, max_skip, max_log;
704 basic_block bb;
705 int freq_max = 0;
706 int freq_threshold = 0;
708 if (label_align)
710 free (label_align);
711 label_align = 0;
714 max_labelno = max_label_num ();
715 min_labelno = get_first_label_num ();
716 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
718 /* If not optimizing or optimizing for size, don't assign any alignments. */
719 if (! optimize || optimize_function_for_size_p (cfun))
720 return 0;
722 if (dump_file)
724 dump_flow_info (dump_file, TDF_DETAILS);
725 flow_loops_dump (dump_file, NULL, 1);
727 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
728 FOR_EACH_BB (bb)
729 if (bb->frequency > freq_max)
730 freq_max = bb->frequency;
731 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
733 if (dump_file)
734 fprintf(dump_file, "freq_max: %i\n",freq_max);
735 FOR_EACH_BB (bb)
737 rtx label = BB_HEAD (bb);
738 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
739 edge e;
740 edge_iterator ei;
742 if (!LABEL_P (label)
743 || optimize_bb_for_size_p (bb))
745 if (dump_file)
746 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
747 bb->index, bb->frequency, bb->loop_father->num, bb->loop_depth);
748 continue;
750 max_log = LABEL_ALIGN (label);
751 max_skip = targetm.asm_out.label_align_max_skip (label);
753 FOR_EACH_EDGE (e, ei, bb->preds)
755 if (e->flags & EDGE_FALLTHRU)
756 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
757 else
758 branch_frequency += EDGE_FREQUENCY (e);
760 if (dump_file)
762 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
763 bb->index, bb->frequency, bb->loop_father->num,
764 bb->loop_depth,
765 fallthru_frequency, branch_frequency);
766 if (!bb->loop_father->inner && bb->loop_father->num)
767 fprintf (dump_file, " inner_loop");
768 if (bb->loop_father->header == bb)
769 fprintf (dump_file, " loop_header");
770 fprintf (dump_file, "\n");
773 /* There are two purposes to align block with no fallthru incoming edge:
774 1) to avoid fetch stalls when branch destination is near cache boundary
775 2) to improve cache efficiency in case the previous block is not executed
776 (so it does not need to be in the cache).
778 We to catch first case, we align frequently executed blocks.
779 To catch the second, we align blocks that are executed more frequently
780 than the predecessor and the predecessor is likely to not be executed
781 when function is called. */
783 if (!has_fallthru
784 && (branch_frequency > freq_threshold
785 || (bb->frequency > bb->prev_bb->frequency * 10
786 && (bb->prev_bb->frequency
787 <= ENTRY_BLOCK_PTR->frequency / 2))))
789 log = JUMP_ALIGN (label);
790 if (dump_file)
791 fprintf(dump_file, " jump alignment added.\n");
792 if (max_log < log)
794 max_log = log;
795 max_skip = targetm.asm_out.jump_align_max_skip (label);
798 /* In case block is frequent and reached mostly by non-fallthru edge,
799 align it. It is most likely a first block of loop. */
800 if (has_fallthru
801 && optimize_bb_for_speed_p (bb)
802 && branch_frequency + fallthru_frequency > freq_threshold
803 && (branch_frequency
804 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
806 log = LOOP_ALIGN (label);
807 if (dump_file)
808 fprintf(dump_file, " internal loop alignment added.\n");
809 if (max_log < log)
811 max_log = log;
812 max_skip = targetm.asm_out.loop_align_max_skip (label);
815 LABEL_TO_ALIGNMENT (label) = max_log;
816 LABEL_TO_MAX_SKIP (label) = max_skip;
819 loop_optimizer_finalize ();
820 free_dominance_info (CDI_DOMINATORS);
821 return 0;
824 struct rtl_opt_pass pass_compute_alignments =
827 RTL_PASS,
828 "alignments", /* name */
829 NULL, /* gate */
830 compute_alignments, /* execute */
831 NULL, /* sub */
832 NULL, /* next */
833 0, /* static_pass_number */
834 TV_NONE, /* tv_id */
835 0, /* properties_required */
836 0, /* properties_provided */
837 0, /* properties_destroyed */
838 0, /* todo_flags_start */
839 TODO_dump_func | TODO_verify_rtl_sharing
840 | TODO_ggc_collect /* todo_flags_finish */
845 /* Make a pass over all insns and compute their actual lengths by shortening
846 any branches of variable length if possible. */
848 /* shorten_branches might be called multiple times: for example, the SH
849 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
850 In order to do this, it needs proper length information, which it obtains
851 by calling shorten_branches. This cannot be collapsed with
852 shorten_branches itself into a single pass unless we also want to integrate
853 reorg.c, since the branch splitting exposes new instructions with delay
854 slots. */
856 void
857 shorten_branches (rtx first ATTRIBUTE_UNUSED)
859 rtx insn;
860 int max_uid;
861 int i;
862 int max_log;
863 int max_skip;
864 #ifdef HAVE_ATTR_length
865 #define MAX_CODE_ALIGN 16
866 rtx seq;
867 int something_changed = 1;
868 char *varying_length;
869 rtx body;
870 int uid;
871 rtx align_tab[MAX_CODE_ALIGN];
873 #endif
875 /* Compute maximum UID and allocate label_align / uid_shuid. */
876 max_uid = get_max_uid ();
878 /* Free uid_shuid before reallocating it. */
879 free (uid_shuid);
881 uid_shuid = XNEWVEC (int, max_uid);
883 if (max_labelno != max_label_num ())
885 int old = max_labelno;
886 int n_labels;
887 int n_old_labels;
889 max_labelno = max_label_num ();
891 n_labels = max_labelno - min_labelno + 1;
892 n_old_labels = old - min_labelno + 1;
894 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
896 /* Range of labels grows monotonically in the function. Failing here
897 means that the initialization of array got lost. */
898 gcc_assert (n_old_labels <= n_labels);
900 memset (label_align + n_old_labels, 0,
901 (n_labels - n_old_labels) * sizeof (struct label_alignment));
904 /* Initialize label_align and set up uid_shuid to be strictly
905 monotonically rising with insn order. */
906 /* We use max_log here to keep track of the maximum alignment we want to
907 impose on the next CODE_LABEL (or the current one if we are processing
908 the CODE_LABEL itself). */
910 max_log = 0;
911 max_skip = 0;
913 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
915 int log;
917 INSN_SHUID (insn) = i++;
918 if (INSN_P (insn))
919 continue;
921 if (LABEL_P (insn))
923 rtx next;
924 bool next_is_jumptable;
926 /* Merge in alignments computed by compute_alignments. */
927 log = LABEL_TO_ALIGNMENT (insn);
928 if (max_log < log)
930 max_log = log;
931 max_skip = LABEL_TO_MAX_SKIP (insn);
934 next = next_nonnote_insn (insn);
935 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
936 if (!next_is_jumptable)
938 log = LABEL_ALIGN (insn);
939 if (max_log < log)
941 max_log = log;
942 max_skip = targetm.asm_out.label_align_max_skip (insn);
945 /* ADDR_VECs only take room if read-only data goes into the text
946 section. */
947 if ((JUMP_TABLES_IN_TEXT_SECTION
948 || readonly_data_section == text_section)
949 && next_is_jumptable)
951 log = ADDR_VEC_ALIGN (next);
952 if (max_log < log)
954 max_log = log;
955 max_skip = targetm.asm_out.label_align_max_skip (insn);
958 LABEL_TO_ALIGNMENT (insn) = max_log;
959 LABEL_TO_MAX_SKIP (insn) = max_skip;
960 max_log = 0;
961 max_skip = 0;
963 else if (BARRIER_P (insn))
965 rtx label;
967 for (label = insn; label && ! INSN_P (label);
968 label = NEXT_INSN (label))
969 if (LABEL_P (label))
971 log = LABEL_ALIGN_AFTER_BARRIER (insn);
972 if (max_log < log)
974 max_log = log;
975 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
977 break;
981 #ifdef HAVE_ATTR_length
983 /* Allocate the rest of the arrays. */
984 insn_lengths = XNEWVEC (int, max_uid);
985 insn_lengths_max_uid = max_uid;
986 /* Syntax errors can lead to labels being outside of the main insn stream.
987 Initialize insn_addresses, so that we get reproducible results. */
988 INSN_ADDRESSES_ALLOC (max_uid);
990 varying_length = XCNEWVEC (char, max_uid);
992 /* Initialize uid_align. We scan instructions
993 from end to start, and keep in align_tab[n] the last seen insn
994 that does an alignment of at least n+1, i.e. the successor
995 in the alignment chain for an insn that does / has a known
996 alignment of n. */
997 uid_align = XCNEWVEC (rtx, max_uid);
999 for (i = MAX_CODE_ALIGN; --i >= 0;)
1000 align_tab[i] = NULL_RTX;
1001 seq = get_last_insn ();
1002 for (; seq; seq = PREV_INSN (seq))
1004 int uid = INSN_UID (seq);
1005 int log;
1006 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1007 uid_align[uid] = align_tab[0];
1008 if (log)
1010 /* Found an alignment label. */
1011 uid_align[uid] = align_tab[log];
1012 for (i = log - 1; i >= 0; i--)
1013 align_tab[i] = seq;
1016 #ifdef CASE_VECTOR_SHORTEN_MODE
1017 if (optimize)
1019 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1020 label fields. */
1022 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1023 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1024 int rel;
1026 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1028 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1029 int len, i, min, max, insn_shuid;
1030 int min_align;
1031 addr_diff_vec_flags flags;
1033 if (!JUMP_P (insn)
1034 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1035 continue;
1036 pat = PATTERN (insn);
1037 len = XVECLEN (pat, 1);
1038 gcc_assert (len > 0);
1039 min_align = MAX_CODE_ALIGN;
1040 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1042 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1043 int shuid = INSN_SHUID (lab);
1044 if (shuid < min)
1046 min = shuid;
1047 min_lab = lab;
1049 if (shuid > max)
1051 max = shuid;
1052 max_lab = lab;
1054 if (min_align > LABEL_TO_ALIGNMENT (lab))
1055 min_align = LABEL_TO_ALIGNMENT (lab);
1057 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1058 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1059 insn_shuid = INSN_SHUID (insn);
1060 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1061 memset (&flags, 0, sizeof (flags));
1062 flags.min_align = min_align;
1063 flags.base_after_vec = rel > insn_shuid;
1064 flags.min_after_vec = min > insn_shuid;
1065 flags.max_after_vec = max > insn_shuid;
1066 flags.min_after_base = min > rel;
1067 flags.max_after_base = max > rel;
1068 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1071 #endif /* CASE_VECTOR_SHORTEN_MODE */
1073 /* Compute initial lengths, addresses, and varying flags for each insn. */
1074 for (insn_current_address = 0, insn = first;
1075 insn != 0;
1076 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1078 uid = INSN_UID (insn);
1080 insn_lengths[uid] = 0;
1082 if (LABEL_P (insn))
1084 int log = LABEL_TO_ALIGNMENT (insn);
1085 if (log)
1087 int align = 1 << log;
1088 int new_address = (insn_current_address + align - 1) & -align;
1089 insn_lengths[uid] = new_address - insn_current_address;
1093 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1095 if (NOTE_P (insn) || BARRIER_P (insn)
1096 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1097 continue;
1098 if (INSN_DELETED_P (insn))
1099 continue;
1101 body = PATTERN (insn);
1102 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1104 /* This only takes room if read-only data goes into the text
1105 section. */
1106 if (JUMP_TABLES_IN_TEXT_SECTION
1107 || readonly_data_section == text_section)
1108 insn_lengths[uid] = (XVECLEN (body,
1109 GET_CODE (body) == ADDR_DIFF_VEC)
1110 * GET_MODE_SIZE (GET_MODE (body)));
1111 /* Alignment is handled by ADDR_VEC_ALIGN. */
1113 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1114 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1115 else if (GET_CODE (body) == SEQUENCE)
1117 int i;
1118 int const_delay_slots;
1119 #ifdef DELAY_SLOTS
1120 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1121 #else
1122 const_delay_slots = 0;
1123 #endif
1124 /* Inside a delay slot sequence, we do not do any branch shortening
1125 if the shortening could change the number of delay slots
1126 of the branch. */
1127 for (i = 0; i < XVECLEN (body, 0); i++)
1129 rtx inner_insn = XVECEXP (body, 0, i);
1130 int inner_uid = INSN_UID (inner_insn);
1131 int inner_length;
1133 if (GET_CODE (body) == ASM_INPUT
1134 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1135 inner_length = (asm_insn_count (PATTERN (inner_insn))
1136 * insn_default_length (inner_insn));
1137 else
1138 inner_length = insn_default_length (inner_insn);
1140 insn_lengths[inner_uid] = inner_length;
1141 if (const_delay_slots)
1143 if ((varying_length[inner_uid]
1144 = insn_variable_length_p (inner_insn)) != 0)
1145 varying_length[uid] = 1;
1146 INSN_ADDRESSES (inner_uid) = (insn_current_address
1147 + insn_lengths[uid]);
1149 else
1150 varying_length[inner_uid] = 0;
1151 insn_lengths[uid] += inner_length;
1154 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1156 insn_lengths[uid] = insn_default_length (insn);
1157 varying_length[uid] = insn_variable_length_p (insn);
1160 /* If needed, do any adjustment. */
1161 #ifdef ADJUST_INSN_LENGTH
1162 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1163 if (insn_lengths[uid] < 0)
1164 fatal_insn ("negative insn length", insn);
1165 #endif
1168 /* Now loop over all the insns finding varying length insns. For each,
1169 get the current insn length. If it has changed, reflect the change.
1170 When nothing changes for a full pass, we are done. */
1172 while (something_changed)
1174 something_changed = 0;
1175 insn_current_align = MAX_CODE_ALIGN - 1;
1176 for (insn_current_address = 0, insn = first;
1177 insn != 0;
1178 insn = NEXT_INSN (insn))
1180 int new_length;
1181 #ifdef ADJUST_INSN_LENGTH
1182 int tmp_length;
1183 #endif
1184 int length_align;
1186 uid = INSN_UID (insn);
1188 if (LABEL_P (insn))
1190 int log = LABEL_TO_ALIGNMENT (insn);
1191 if (log > insn_current_align)
1193 int align = 1 << log;
1194 int new_address= (insn_current_address + align - 1) & -align;
1195 insn_lengths[uid] = new_address - insn_current_address;
1196 insn_current_align = log;
1197 insn_current_address = new_address;
1199 else
1200 insn_lengths[uid] = 0;
1201 INSN_ADDRESSES (uid) = insn_current_address;
1202 continue;
1205 length_align = INSN_LENGTH_ALIGNMENT (insn);
1206 if (length_align < insn_current_align)
1207 insn_current_align = length_align;
1209 insn_last_address = INSN_ADDRESSES (uid);
1210 INSN_ADDRESSES (uid) = insn_current_address;
1212 #ifdef CASE_VECTOR_SHORTEN_MODE
1213 if (optimize && JUMP_P (insn)
1214 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1216 rtx body = PATTERN (insn);
1217 int old_length = insn_lengths[uid];
1218 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1219 rtx min_lab = XEXP (XEXP (body, 2), 0);
1220 rtx max_lab = XEXP (XEXP (body, 3), 0);
1221 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1222 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1223 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1224 rtx prev;
1225 int rel_align = 0;
1226 addr_diff_vec_flags flags;
1228 /* Avoid automatic aggregate initialization. */
1229 flags = ADDR_DIFF_VEC_FLAGS (body);
1231 /* Try to find a known alignment for rel_lab. */
1232 for (prev = rel_lab;
1233 prev
1234 && ! insn_lengths[INSN_UID (prev)]
1235 && ! (varying_length[INSN_UID (prev)] & 1);
1236 prev = PREV_INSN (prev))
1237 if (varying_length[INSN_UID (prev)] & 2)
1239 rel_align = LABEL_TO_ALIGNMENT (prev);
1240 break;
1243 /* See the comment on addr_diff_vec_flags in rtl.h for the
1244 meaning of the flags values. base: REL_LAB vec: INSN */
1245 /* Anything after INSN has still addresses from the last
1246 pass; adjust these so that they reflect our current
1247 estimate for this pass. */
1248 if (flags.base_after_vec)
1249 rel_addr += insn_current_address - insn_last_address;
1250 if (flags.min_after_vec)
1251 min_addr += insn_current_address - insn_last_address;
1252 if (flags.max_after_vec)
1253 max_addr += insn_current_address - insn_last_address;
1254 /* We want to know the worst case, i.e. lowest possible value
1255 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1256 its offset is positive, and we have to be wary of code shrink;
1257 otherwise, it is negative, and we have to be vary of code
1258 size increase. */
1259 if (flags.min_after_base)
1261 /* If INSN is between REL_LAB and MIN_LAB, the size
1262 changes we are about to make can change the alignment
1263 within the observed offset, therefore we have to break
1264 it up into two parts that are independent. */
1265 if (! flags.base_after_vec && flags.min_after_vec)
1267 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1268 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1270 else
1271 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1273 else
1275 if (flags.base_after_vec && ! flags.min_after_vec)
1277 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1278 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1280 else
1281 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1283 /* Likewise, determine the highest lowest possible value
1284 for the offset of MAX_LAB. */
1285 if (flags.max_after_base)
1287 if (! flags.base_after_vec && flags.max_after_vec)
1289 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1290 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1292 else
1293 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1295 else
1297 if (flags.base_after_vec && ! flags.max_after_vec)
1299 max_addr += align_fuzz (max_lab, insn, 0, 0);
1300 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1302 else
1303 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1305 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1306 max_addr - rel_addr,
1307 body));
1308 if (JUMP_TABLES_IN_TEXT_SECTION
1309 || readonly_data_section == text_section)
1311 insn_lengths[uid]
1312 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1313 insn_current_address += insn_lengths[uid];
1314 if (insn_lengths[uid] != old_length)
1315 something_changed = 1;
1318 continue;
1320 #endif /* CASE_VECTOR_SHORTEN_MODE */
1322 if (! (varying_length[uid]))
1324 if (NONJUMP_INSN_P (insn)
1325 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1327 int i;
1329 body = PATTERN (insn);
1330 for (i = 0; i < XVECLEN (body, 0); i++)
1332 rtx inner_insn = XVECEXP (body, 0, i);
1333 int inner_uid = INSN_UID (inner_insn);
1335 INSN_ADDRESSES (inner_uid) = insn_current_address;
1337 insn_current_address += insn_lengths[inner_uid];
1340 else
1341 insn_current_address += insn_lengths[uid];
1343 continue;
1346 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1348 int i;
1350 body = PATTERN (insn);
1351 new_length = 0;
1352 for (i = 0; i < XVECLEN (body, 0); i++)
1354 rtx inner_insn = XVECEXP (body, 0, i);
1355 int inner_uid = INSN_UID (inner_insn);
1356 int inner_length;
1358 INSN_ADDRESSES (inner_uid) = insn_current_address;
1360 /* insn_current_length returns 0 for insns with a
1361 non-varying length. */
1362 if (! varying_length[inner_uid])
1363 inner_length = insn_lengths[inner_uid];
1364 else
1365 inner_length = insn_current_length (inner_insn);
1367 if (inner_length != insn_lengths[inner_uid])
1369 insn_lengths[inner_uid] = inner_length;
1370 something_changed = 1;
1372 insn_current_address += insn_lengths[inner_uid];
1373 new_length += inner_length;
1376 else
1378 new_length = insn_current_length (insn);
1379 insn_current_address += new_length;
1382 #ifdef ADJUST_INSN_LENGTH
1383 /* If needed, do any adjustment. */
1384 tmp_length = new_length;
1385 ADJUST_INSN_LENGTH (insn, new_length);
1386 insn_current_address += (new_length - tmp_length);
1387 #endif
1389 if (new_length != insn_lengths[uid])
1391 insn_lengths[uid] = new_length;
1392 something_changed = 1;
1395 /* For a non-optimizing compile, do only a single pass. */
1396 if (!optimize)
1397 break;
1400 free (varying_length);
1402 #endif /* HAVE_ATTR_length */
1405 #ifdef HAVE_ATTR_length
1406 /* Given the body of an INSN known to be generated by an ASM statement, return
1407 the number of machine instructions likely to be generated for this insn.
1408 This is used to compute its length. */
1410 static int
1411 asm_insn_count (rtx body)
1413 const char *templ;
1415 if (GET_CODE (body) == ASM_INPUT)
1416 templ = XSTR (body, 0);
1417 else
1418 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1420 return asm_str_count (templ);
1422 #endif
1424 /* Return the number of machine instructions likely to be generated for the
1425 inline-asm template. */
1427 asm_str_count (const char *templ)
1429 int count = 1;
1431 if (!*templ)
1432 return 0;
1434 for (; *templ; templ++)
1435 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1436 || *templ == '\n')
1437 count++;
1439 return count;
1442 /* ??? This is probably the wrong place for these. */
1443 /* Structure recording the mapping from source file and directory
1444 names at compile time to those to be embedded in debug
1445 information. */
1446 typedef struct debug_prefix_map
1448 const char *old_prefix;
1449 const char *new_prefix;
1450 size_t old_len;
1451 size_t new_len;
1452 struct debug_prefix_map *next;
1453 } debug_prefix_map;
1455 /* Linked list of such structures. */
1456 debug_prefix_map *debug_prefix_maps;
1459 /* Record a debug file prefix mapping. ARG is the argument to
1460 -fdebug-prefix-map and must be of the form OLD=NEW. */
1462 void
1463 add_debug_prefix_map (const char *arg)
1465 debug_prefix_map *map;
1466 const char *p;
1468 p = strchr (arg, '=');
1469 if (!p)
1471 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1472 return;
1474 map = XNEW (debug_prefix_map);
1475 map->old_prefix = xstrndup (arg, p - arg);
1476 map->old_len = p - arg;
1477 p++;
1478 map->new_prefix = xstrdup (p);
1479 map->new_len = strlen (p);
1480 map->next = debug_prefix_maps;
1481 debug_prefix_maps = map;
1484 /* Perform user-specified mapping of debug filename prefixes. Return
1485 the new name corresponding to FILENAME. */
1487 const char *
1488 remap_debug_filename (const char *filename)
1490 debug_prefix_map *map;
1491 char *s;
1492 const char *name;
1493 size_t name_len;
1495 for (map = debug_prefix_maps; map; map = map->next)
1496 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1497 break;
1498 if (!map)
1499 return filename;
1500 name = filename + map->old_len;
1501 name_len = strlen (name) + 1;
1502 s = (char *) alloca (name_len + map->new_len);
1503 memcpy (s, map->new_prefix, map->new_len);
1504 memcpy (s + map->new_len, name, name_len);
1505 return ggc_strdup (s);
1508 /* Return true if DWARF2 debug info can be emitted for DECL. */
1510 static bool
1511 dwarf2_debug_info_emitted_p (tree decl)
1513 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1514 return false;
1516 if (DECL_IGNORED_P (decl))
1517 return false;
1519 return true;
1522 /* Output assembler code for the start of a function,
1523 and initialize some of the variables in this file
1524 for the new function. The label for the function and associated
1525 assembler pseudo-ops have already been output in `assemble_start_function'.
1527 FIRST is the first insn of the rtl for the function being compiled.
1528 FILE is the file to write assembler code to.
1529 OPTIMIZE_P is nonzero if we should eliminate redundant
1530 test and compare insns. */
1532 void
1533 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1534 int optimize_p ATTRIBUTE_UNUSED)
1536 block_depth = 0;
1538 this_is_asm_operands = 0;
1540 last_filename = locator_file (prologue_locator);
1541 last_linenum = locator_line (prologue_locator);
1542 last_discriminator = discriminator = 0;
1544 high_block_linenum = high_function_linenum = last_linenum;
1546 if (!DECL_IGNORED_P (current_function_decl))
1547 debug_hooks->begin_prologue (last_linenum, last_filename);
1549 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1550 dwarf2out_begin_prologue (0, NULL);
1552 #ifdef LEAF_REG_REMAP
1553 if (current_function_uses_only_leaf_regs)
1554 leaf_renumber_regs (first);
1555 #endif
1557 /* The Sun386i and perhaps other machines don't work right
1558 if the profiling code comes after the prologue. */
1559 if (targetm.profile_before_prologue () && crtl->profile)
1560 profile_function (file);
1562 #if defined (HAVE_prologue)
1563 if (dwarf2out_do_frame ())
1564 dwarf2out_frame_debug_init ();
1565 #endif
1567 /* If debugging, assign block numbers to all of the blocks in this
1568 function. */
1569 if (write_symbols)
1571 reemit_insn_block_notes ();
1572 number_blocks (current_function_decl);
1573 /* We never actually put out begin/end notes for the top-level
1574 block in the function. But, conceptually, that block is
1575 always needed. */
1576 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1579 if (warn_frame_larger_than
1580 && get_frame_size () > frame_larger_than_size)
1582 /* Issue a warning */
1583 warning (OPT_Wframe_larger_than_,
1584 "the frame size of %wd bytes is larger than %wd bytes",
1585 get_frame_size (), frame_larger_than_size);
1588 /* First output the function prologue: code to set up the stack frame. */
1589 targetm.asm_out.function_prologue (file, get_frame_size ());
1591 /* If the machine represents the prologue as RTL, the profiling code must
1592 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1593 #ifdef HAVE_prologue
1594 if (! HAVE_prologue)
1595 #endif
1596 profile_after_prologue (file);
1599 static void
1600 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1602 if (!targetm.profile_before_prologue () && crtl->profile)
1603 profile_function (file);
1606 static void
1607 profile_function (FILE *file ATTRIBUTE_UNUSED)
1609 #ifndef NO_PROFILE_COUNTERS
1610 # define NO_PROFILE_COUNTERS 0
1611 #endif
1612 #ifdef ASM_OUTPUT_REG_PUSH
1613 rtx sval = NULL, chain = NULL;
1615 if (cfun->returns_struct)
1616 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1617 true);
1618 if (cfun->static_chain_decl)
1619 chain = targetm.calls.static_chain (current_function_decl, true);
1620 #endif /* ASM_OUTPUT_REG_PUSH */
1622 if (! NO_PROFILE_COUNTERS)
1624 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1625 switch_to_section (data_section);
1626 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1627 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1628 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1631 switch_to_section (current_function_section ());
1633 #ifdef ASM_OUTPUT_REG_PUSH
1634 if (sval && REG_P (sval))
1635 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1636 if (chain && REG_P (chain))
1637 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1638 #endif
1640 FUNCTION_PROFILER (file, current_function_funcdef_no);
1642 #ifdef ASM_OUTPUT_REG_PUSH
1643 if (chain && REG_P (chain))
1644 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1645 if (sval && REG_P (sval))
1646 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1647 #endif
1650 /* Output assembler code for the end of a function.
1651 For clarity, args are same as those of `final_start_function'
1652 even though not all of them are needed. */
1654 void
1655 final_end_function (void)
1657 app_disable ();
1659 if (!DECL_IGNORED_P (current_function_decl))
1660 debug_hooks->end_function (high_function_linenum);
1662 /* Finally, output the function epilogue:
1663 code to restore the stack frame and return to the caller. */
1664 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1666 /* And debug output. */
1667 if (!DECL_IGNORED_P (current_function_decl))
1668 debug_hooks->end_epilogue (last_linenum, last_filename);
1670 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1671 && dwarf2out_do_frame ())
1672 dwarf2out_end_epilogue (last_linenum, last_filename);
1676 /* Dumper helper for basic block information. FILE is the assembly
1677 output file, and INSN is the instruction being emitted. */
1679 static void
1680 dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
1681 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1683 basic_block bb;
1685 if (!flag_debug_asm)
1686 return;
1688 if (INSN_UID (insn) < bb_map_size
1689 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1691 edge e;
1692 edge_iterator ei;
1694 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1695 if (bb->frequency)
1696 fprintf (file, " freq:%d", bb->frequency);
1697 if (bb->count)
1698 fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
1699 bb->count);
1700 fprintf (file, " seq:%d", (*bb_seqn)++);
1701 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1702 FOR_EACH_EDGE (e, ei, bb->preds)
1704 dump_edge_info (file, e, 0);
1706 fprintf (file, "\n");
1708 if (INSN_UID (insn) < bb_map_size
1709 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1711 edge e;
1712 edge_iterator ei;
1714 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1715 FOR_EACH_EDGE (e, ei, bb->succs)
1717 dump_edge_info (asm_out_file, e, 1);
1719 fprintf (file, "\n");
1723 /* Output assembler code for some insns: all or part of a function.
1724 For description of args, see `final_start_function', above. */
1726 void
1727 final (rtx first, FILE *file, int optimize_p)
1729 rtx insn;
1730 int max_uid = 0;
1731 int seen = 0;
1733 /* Used for -dA dump. */
1734 basic_block *start_to_bb = NULL;
1735 basic_block *end_to_bb = NULL;
1736 int bb_map_size = 0;
1737 int bb_seqn = 0;
1739 last_ignored_compare = 0;
1741 for (insn = first; insn; insn = NEXT_INSN (insn))
1743 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1744 max_uid = INSN_UID (insn);
1745 #ifdef HAVE_cc0
1746 /* If CC tracking across branches is enabled, record the insn which
1747 jumps to each branch only reached from one place. */
1748 if (optimize_p && JUMP_P (insn))
1750 rtx lab = JUMP_LABEL (insn);
1751 if (lab && LABEL_NUSES (lab) == 1)
1753 LABEL_REFS (lab) = insn;
1756 #endif
1759 init_recog ();
1761 CC_STATUS_INIT;
1763 if (flag_debug_asm)
1765 basic_block bb;
1767 bb_map_size = get_max_uid () + 1;
1768 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1769 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1771 FOR_EACH_BB_REVERSE (bb)
1773 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1774 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1778 /* Output the insns. */
1779 for (insn = first; insn;)
1781 #ifdef HAVE_ATTR_length
1782 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1784 /* This can be triggered by bugs elsewhere in the compiler if
1785 new insns are created after init_insn_lengths is called. */
1786 gcc_assert (NOTE_P (insn));
1787 insn_current_address = -1;
1789 else
1790 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1791 #endif /* HAVE_ATTR_length */
1793 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1794 bb_map_size, &bb_seqn);
1795 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1798 if (flag_debug_asm)
1800 free (start_to_bb);
1801 free (end_to_bb);
1805 const char *
1806 get_insn_template (int code, rtx insn)
1808 switch (insn_data[code].output_format)
1810 case INSN_OUTPUT_FORMAT_SINGLE:
1811 return insn_data[code].output.single;
1812 case INSN_OUTPUT_FORMAT_MULTI:
1813 return insn_data[code].output.multi[which_alternative];
1814 case INSN_OUTPUT_FORMAT_FUNCTION:
1815 gcc_assert (insn);
1816 return (*insn_data[code].output.function) (recog_data.operand, insn);
1818 default:
1819 gcc_unreachable ();
1823 /* Emit the appropriate declaration for an alternate-entry-point
1824 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1825 LABEL_KIND != LABEL_NORMAL.
1827 The case fall-through in this function is intentional. */
1828 static void
1829 output_alternate_entry_point (FILE *file, rtx insn)
1831 const char *name = LABEL_NAME (insn);
1833 switch (LABEL_KIND (insn))
1835 case LABEL_WEAK_ENTRY:
1836 #ifdef ASM_WEAKEN_LABEL
1837 ASM_WEAKEN_LABEL (file, name);
1838 #endif
1839 case LABEL_GLOBAL_ENTRY:
1840 targetm.asm_out.globalize_label (file, name);
1841 case LABEL_STATIC_ENTRY:
1842 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1843 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1844 #endif
1845 ASM_OUTPUT_LABEL (file, name);
1846 break;
1848 case LABEL_NORMAL:
1849 default:
1850 gcc_unreachable ();
1854 /* Given a CALL_INSN, find and return the nested CALL. */
1855 static rtx
1856 call_from_call_insn (rtx insn)
1858 rtx x;
1859 gcc_assert (CALL_P (insn));
1860 x = PATTERN (insn);
1862 while (GET_CODE (x) != CALL)
1864 switch (GET_CODE (x))
1866 default:
1867 gcc_unreachable ();
1868 case COND_EXEC:
1869 x = COND_EXEC_CODE (x);
1870 break;
1871 case PARALLEL:
1872 x = XVECEXP (x, 0, 0);
1873 break;
1874 case SET:
1875 x = XEXP (x, 1);
1876 break;
1879 return x;
1882 /* The final scan for one insn, INSN.
1883 Args are same as in `final', except that INSN
1884 is the insn being scanned.
1885 Value returned is the next insn to be scanned.
1887 NOPEEPHOLES is the flag to disallow peephole processing (currently
1888 used for within delayed branch sequence output).
1890 SEEN is used to track the end of the prologue, for emitting
1891 debug information. We force the emission of a line note after
1892 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1893 at the beginning of the second basic block, whichever comes
1894 first. */
1897 final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
1898 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1900 #ifdef HAVE_cc0
1901 rtx set;
1902 #endif
1903 rtx next;
1905 insn_counter++;
1907 /* Ignore deleted insns. These can occur when we split insns (due to a
1908 template of "#") while not optimizing. */
1909 if (INSN_DELETED_P (insn))
1910 return NEXT_INSN (insn);
1912 switch (GET_CODE (insn))
1914 case NOTE:
1915 switch (NOTE_KIND (insn))
1917 case NOTE_INSN_DELETED:
1918 break;
1920 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1921 in_cold_section_p = !in_cold_section_p;
1923 if (dwarf2out_do_frame ())
1924 dwarf2out_switch_text_section ();
1925 else if (!DECL_IGNORED_P (current_function_decl))
1926 debug_hooks->switch_text_section ();
1928 switch_to_section (current_function_section ());
1929 targetm.asm_out.function_switched_text_sections (asm_out_file,
1930 current_function_decl,
1931 in_cold_section_p);
1932 break;
1934 case NOTE_INSN_BASIC_BLOCK:
1935 if (targetm.asm_out.unwind_emit)
1936 targetm.asm_out.unwind_emit (asm_out_file, insn);
1938 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1940 *seen |= SEEN_EMITTED;
1941 force_source_line = true;
1943 else
1944 *seen |= SEEN_BB;
1946 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
1948 break;
1950 case NOTE_INSN_EH_REGION_BEG:
1951 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1952 NOTE_EH_HANDLER (insn));
1953 break;
1955 case NOTE_INSN_EH_REGION_END:
1956 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1957 NOTE_EH_HANDLER (insn));
1958 break;
1960 case NOTE_INSN_PROLOGUE_END:
1961 targetm.asm_out.function_end_prologue (file);
1962 profile_after_prologue (file);
1964 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1966 *seen |= SEEN_EMITTED;
1967 force_source_line = true;
1969 else
1970 *seen |= SEEN_NOTE;
1972 break;
1974 case NOTE_INSN_EPILOGUE_BEG:
1975 #if defined (HAVE_epilogue)
1976 if (dwarf2out_do_frame ())
1977 dwarf2out_cfi_begin_epilogue (insn);
1978 #endif
1979 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
1980 targetm.asm_out.function_begin_epilogue (file);
1981 break;
1983 case NOTE_INSN_CFA_RESTORE_STATE:
1984 dwarf2out_frame_debug_restore_state ();
1985 break;
1987 case NOTE_INSN_FUNCTION_BEG:
1988 app_disable ();
1989 if (!DECL_IGNORED_P (current_function_decl))
1990 debug_hooks->end_prologue (last_linenum, last_filename);
1992 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1994 *seen |= SEEN_EMITTED;
1995 force_source_line = true;
1997 else
1998 *seen |= SEEN_NOTE;
2000 break;
2002 case NOTE_INSN_BLOCK_BEG:
2003 if (debug_info_level == DINFO_LEVEL_NORMAL
2004 || debug_info_level == DINFO_LEVEL_VERBOSE
2005 || write_symbols == DWARF2_DEBUG
2006 || write_symbols == VMS_AND_DWARF2_DEBUG
2007 || write_symbols == VMS_DEBUG)
2009 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2011 app_disable ();
2012 ++block_depth;
2013 high_block_linenum = last_linenum;
2015 /* Output debugging info about the symbol-block beginning. */
2016 if (!DECL_IGNORED_P (current_function_decl))
2017 debug_hooks->begin_block (last_linenum, n);
2019 /* Mark this block as output. */
2020 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2022 if (write_symbols == DBX_DEBUG
2023 || write_symbols == SDB_DEBUG)
2025 location_t *locus_ptr
2026 = block_nonartificial_location (NOTE_BLOCK (insn));
2028 if (locus_ptr != NULL)
2030 override_filename = LOCATION_FILE (*locus_ptr);
2031 override_linenum = LOCATION_LINE (*locus_ptr);
2034 break;
2036 case NOTE_INSN_BLOCK_END:
2037 if (debug_info_level == DINFO_LEVEL_NORMAL
2038 || debug_info_level == DINFO_LEVEL_VERBOSE
2039 || write_symbols == DWARF2_DEBUG
2040 || write_symbols == VMS_AND_DWARF2_DEBUG
2041 || write_symbols == VMS_DEBUG)
2043 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2045 app_disable ();
2047 /* End of a symbol-block. */
2048 --block_depth;
2049 gcc_assert (block_depth >= 0);
2051 if (!DECL_IGNORED_P (current_function_decl))
2052 debug_hooks->end_block (high_block_linenum, n);
2054 if (write_symbols == DBX_DEBUG
2055 || write_symbols == SDB_DEBUG)
2057 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2058 location_t *locus_ptr
2059 = block_nonartificial_location (outer_block);
2061 if (locus_ptr != NULL)
2063 override_filename = LOCATION_FILE (*locus_ptr);
2064 override_linenum = LOCATION_LINE (*locus_ptr);
2066 else
2068 override_filename = NULL;
2069 override_linenum = 0;
2072 break;
2074 case NOTE_INSN_DELETED_LABEL:
2075 /* Emit the label. We may have deleted the CODE_LABEL because
2076 the label could be proved to be unreachable, though still
2077 referenced (in the form of having its address taken. */
2078 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2079 break;
2081 case NOTE_INSN_VAR_LOCATION:
2082 case NOTE_INSN_CALL_ARG_LOCATION:
2083 if (!DECL_IGNORED_P (current_function_decl))
2084 debug_hooks->var_location (insn);
2085 break;
2087 default:
2088 gcc_unreachable ();
2089 break;
2091 break;
2093 case BARRIER:
2094 if (dwarf2out_do_frame ())
2095 dwarf2out_frame_debug (insn, false);
2096 break;
2098 case CODE_LABEL:
2099 /* The target port might emit labels in the output function for
2100 some insn, e.g. sh.c output_branchy_insn. */
2101 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2103 int align = LABEL_TO_ALIGNMENT (insn);
2104 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2105 int max_skip = LABEL_TO_MAX_SKIP (insn);
2106 #endif
2108 if (align && NEXT_INSN (insn))
2110 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2111 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2112 #else
2113 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2114 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2115 #else
2116 ASM_OUTPUT_ALIGN (file, align);
2117 #endif
2118 #endif
2121 CC_STATUS_INIT;
2123 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2124 debug_hooks->label (insn);
2126 app_disable ();
2128 next = next_nonnote_insn (insn);
2129 /* If this label is followed by a jump-table, make sure we put
2130 the label in the read-only section. Also possibly write the
2131 label and jump table together. */
2132 if (next != 0 && JUMP_TABLE_DATA_P (next))
2134 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2135 /* In this case, the case vector is being moved by the
2136 target, so don't output the label at all. Leave that
2137 to the back end macros. */
2138 #else
2139 if (! JUMP_TABLES_IN_TEXT_SECTION)
2141 int log_align;
2143 switch_to_section (targetm.asm_out.function_rodata_section
2144 (current_function_decl));
2146 #ifdef ADDR_VEC_ALIGN
2147 log_align = ADDR_VEC_ALIGN (next);
2148 #else
2149 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2150 #endif
2151 ASM_OUTPUT_ALIGN (file, log_align);
2153 else
2154 switch_to_section (current_function_section ());
2156 #ifdef ASM_OUTPUT_CASE_LABEL
2157 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2158 next);
2159 #else
2160 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2161 #endif
2162 #endif
2163 break;
2165 if (LABEL_ALT_ENTRY_P (insn))
2166 output_alternate_entry_point (file, insn);
2167 else
2168 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2169 break;
2171 default:
2173 rtx body = PATTERN (insn);
2174 int insn_code_number;
2175 const char *templ;
2176 bool is_stmt;
2178 /* Reset this early so it is correct for ASM statements. */
2179 current_insn_predicate = NULL_RTX;
2181 /* An INSN, JUMP_INSN or CALL_INSN.
2182 First check for special kinds that recog doesn't recognize. */
2184 if (GET_CODE (body) == USE /* These are just declarations. */
2185 || GET_CODE (body) == CLOBBER)
2186 break;
2188 #ifdef HAVE_cc0
2190 /* If there is a REG_CC_SETTER note on this insn, it means that
2191 the setting of the condition code was done in the delay slot
2192 of the insn that branched here. So recover the cc status
2193 from the insn that set it. */
2195 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2196 if (note)
2198 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2199 cc_prev_status = cc_status;
2202 #endif
2204 /* Detect insns that are really jump-tables
2205 and output them as such. */
2207 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2209 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2210 int vlen, idx;
2211 #endif
2213 if (! JUMP_TABLES_IN_TEXT_SECTION)
2214 switch_to_section (targetm.asm_out.function_rodata_section
2215 (current_function_decl));
2216 else
2217 switch_to_section (current_function_section ());
2219 app_disable ();
2221 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2222 if (GET_CODE (body) == ADDR_VEC)
2224 #ifdef ASM_OUTPUT_ADDR_VEC
2225 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2226 #else
2227 gcc_unreachable ();
2228 #endif
2230 else
2232 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2233 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2234 #else
2235 gcc_unreachable ();
2236 #endif
2238 #else
2239 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2240 for (idx = 0; idx < vlen; idx++)
2242 if (GET_CODE (body) == ADDR_VEC)
2244 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2245 ASM_OUTPUT_ADDR_VEC_ELT
2246 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2247 #else
2248 gcc_unreachable ();
2249 #endif
2251 else
2253 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2254 ASM_OUTPUT_ADDR_DIFF_ELT
2255 (file,
2256 body,
2257 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2258 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2259 #else
2260 gcc_unreachable ();
2261 #endif
2264 #ifdef ASM_OUTPUT_CASE_END
2265 ASM_OUTPUT_CASE_END (file,
2266 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2267 insn);
2268 #endif
2269 #endif
2271 switch_to_section (current_function_section ());
2273 break;
2275 /* Output this line note if it is the first or the last line
2276 note in a row. */
2277 if (!DECL_IGNORED_P (current_function_decl)
2278 && notice_source_line (insn, &is_stmt))
2279 (*debug_hooks->source_line) (last_linenum, last_filename,
2280 last_discriminator, is_stmt);
2282 if (GET_CODE (body) == ASM_INPUT)
2284 const char *string = XSTR (body, 0);
2286 /* There's no telling what that did to the condition codes. */
2287 CC_STATUS_INIT;
2289 if (string[0])
2291 expanded_location loc;
2293 app_enable ();
2294 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2295 if (*loc.file && loc.line)
2296 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2297 ASM_COMMENT_START, loc.line, loc.file);
2298 fprintf (asm_out_file, "\t%s\n", string);
2299 #if HAVE_AS_LINE_ZERO
2300 if (*loc.file && loc.line)
2301 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2302 #endif
2304 break;
2307 /* Detect `asm' construct with operands. */
2308 if (asm_noperands (body) >= 0)
2310 unsigned int noperands = asm_noperands (body);
2311 rtx *ops = XALLOCAVEC (rtx, noperands);
2312 const char *string;
2313 location_t loc;
2314 expanded_location expanded;
2316 /* There's no telling what that did to the condition codes. */
2317 CC_STATUS_INIT;
2319 /* Get out the operand values. */
2320 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2321 /* Inhibit dying on what would otherwise be compiler bugs. */
2322 insn_noperands = noperands;
2323 this_is_asm_operands = insn;
2324 expanded = expand_location (loc);
2326 #ifdef FINAL_PRESCAN_INSN
2327 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2328 #endif
2330 /* Output the insn using them. */
2331 if (string[0])
2333 app_enable ();
2334 if (expanded.file && expanded.line)
2335 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2336 ASM_COMMENT_START, expanded.line, expanded.file);
2337 output_asm_insn (string, ops);
2338 #if HAVE_AS_LINE_ZERO
2339 if (expanded.file && expanded.line)
2340 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2341 #endif
2344 if (targetm.asm_out.final_postscan_insn)
2345 targetm.asm_out.final_postscan_insn (file, insn, ops,
2346 insn_noperands);
2348 this_is_asm_operands = 0;
2349 break;
2352 app_disable ();
2354 if (GET_CODE (body) == SEQUENCE)
2356 /* A delayed-branch sequence */
2357 int i;
2359 final_sequence = body;
2361 /* Record the delay slots' frame information before the branch.
2362 This is needed for delayed calls: see execute_cfa_program(). */
2363 if (dwarf2out_do_frame ())
2364 for (i = 1; i < XVECLEN (body, 0); i++)
2365 dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2367 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2368 force the restoration of a comparison that was previously
2369 thought unnecessary. If that happens, cancel this sequence
2370 and cause that insn to be restored. */
2372 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2373 if (next != XVECEXP (body, 0, 1))
2375 final_sequence = 0;
2376 return next;
2379 for (i = 1; i < XVECLEN (body, 0); i++)
2381 rtx insn = XVECEXP (body, 0, i);
2382 rtx next = NEXT_INSN (insn);
2383 /* We loop in case any instruction in a delay slot gets
2384 split. */
2386 insn = final_scan_insn (insn, file, 0, 1, seen);
2387 while (insn != next);
2389 #ifdef DBR_OUTPUT_SEQEND
2390 DBR_OUTPUT_SEQEND (file);
2391 #endif
2392 final_sequence = 0;
2394 /* If the insn requiring the delay slot was a CALL_INSN, the
2395 insns in the delay slot are actually executed before the
2396 called function. Hence we don't preserve any CC-setting
2397 actions in these insns and the CC must be marked as being
2398 clobbered by the function. */
2399 if (CALL_P (XVECEXP (body, 0, 0)))
2401 CC_STATUS_INIT;
2403 break;
2406 /* We have a real machine instruction as rtl. */
2408 body = PATTERN (insn);
2410 #ifdef HAVE_cc0
2411 set = single_set (insn);
2413 /* Check for redundant test and compare instructions
2414 (when the condition codes are already set up as desired).
2415 This is done only when optimizing; if not optimizing,
2416 it should be possible for the user to alter a variable
2417 with the debugger in between statements
2418 and the next statement should reexamine the variable
2419 to compute the condition codes. */
2421 if (optimize_p)
2423 if (set
2424 && GET_CODE (SET_DEST (set)) == CC0
2425 && insn != last_ignored_compare)
2427 rtx src1, src2;
2428 if (GET_CODE (SET_SRC (set)) == SUBREG)
2429 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2431 src1 = SET_SRC (set);
2432 src2 = NULL_RTX;
2433 if (GET_CODE (SET_SRC (set)) == COMPARE)
2435 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2436 XEXP (SET_SRC (set), 0)
2437 = alter_subreg (&XEXP (SET_SRC (set), 0));
2438 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2439 XEXP (SET_SRC (set), 1)
2440 = alter_subreg (&XEXP (SET_SRC (set), 1));
2441 if (XEXP (SET_SRC (set), 1)
2442 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2443 src2 = XEXP (SET_SRC (set), 0);
2445 if ((cc_status.value1 != 0
2446 && rtx_equal_p (src1, cc_status.value1))
2447 || (cc_status.value2 != 0
2448 && rtx_equal_p (src1, cc_status.value2))
2449 || (src2 != 0 && cc_status.value1 != 0
2450 && rtx_equal_p (src2, cc_status.value1))
2451 || (src2 != 0 && cc_status.value2 != 0
2452 && rtx_equal_p (src2, cc_status.value2)))
2454 /* Don't delete insn if it has an addressing side-effect. */
2455 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2456 /* or if anything in it is volatile. */
2457 && ! volatile_refs_p (PATTERN (insn)))
2459 /* We don't really delete the insn; just ignore it. */
2460 last_ignored_compare = insn;
2461 break;
2467 /* If this is a conditional branch, maybe modify it
2468 if the cc's are in a nonstandard state
2469 so that it accomplishes the same thing that it would
2470 do straightforwardly if the cc's were set up normally. */
2472 if (cc_status.flags != 0
2473 && JUMP_P (insn)
2474 && GET_CODE (body) == SET
2475 && SET_DEST (body) == pc_rtx
2476 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2477 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2478 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2480 /* This function may alter the contents of its argument
2481 and clear some of the cc_status.flags bits.
2482 It may also return 1 meaning condition now always true
2483 or -1 meaning condition now always false
2484 or 2 meaning condition nontrivial but altered. */
2485 int result = alter_cond (XEXP (SET_SRC (body), 0));
2486 /* If condition now has fixed value, replace the IF_THEN_ELSE
2487 with its then-operand or its else-operand. */
2488 if (result == 1)
2489 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2490 if (result == -1)
2491 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2493 /* The jump is now either unconditional or a no-op.
2494 If it has become a no-op, don't try to output it.
2495 (It would not be recognized.) */
2496 if (SET_SRC (body) == pc_rtx)
2498 delete_insn (insn);
2499 break;
2501 else if (GET_CODE (SET_SRC (body)) == RETURN)
2502 /* Replace (set (pc) (return)) with (return). */
2503 PATTERN (insn) = body = SET_SRC (body);
2505 /* Rerecognize the instruction if it has changed. */
2506 if (result != 0)
2507 INSN_CODE (insn) = -1;
2510 /* If this is a conditional trap, maybe modify it if the cc's
2511 are in a nonstandard state so that it accomplishes the same
2512 thing that it would do straightforwardly if the cc's were
2513 set up normally. */
2514 if (cc_status.flags != 0
2515 && NONJUMP_INSN_P (insn)
2516 && GET_CODE (body) == TRAP_IF
2517 && COMPARISON_P (TRAP_CONDITION (body))
2518 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2520 /* This function may alter the contents of its argument
2521 and clear some of the cc_status.flags bits.
2522 It may also return 1 meaning condition now always true
2523 or -1 meaning condition now always false
2524 or 2 meaning condition nontrivial but altered. */
2525 int result = alter_cond (TRAP_CONDITION (body));
2527 /* If TRAP_CONDITION has become always false, delete the
2528 instruction. */
2529 if (result == -1)
2531 delete_insn (insn);
2532 break;
2535 /* If TRAP_CONDITION has become always true, replace
2536 TRAP_CONDITION with const_true_rtx. */
2537 if (result == 1)
2538 TRAP_CONDITION (body) = const_true_rtx;
2540 /* Rerecognize the instruction if it has changed. */
2541 if (result != 0)
2542 INSN_CODE (insn) = -1;
2545 /* Make same adjustments to instructions that examine the
2546 condition codes without jumping and instructions that
2547 handle conditional moves (if this machine has either one). */
2549 if (cc_status.flags != 0
2550 && set != 0)
2552 rtx cond_rtx, then_rtx, else_rtx;
2554 if (!JUMP_P (insn)
2555 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2557 cond_rtx = XEXP (SET_SRC (set), 0);
2558 then_rtx = XEXP (SET_SRC (set), 1);
2559 else_rtx = XEXP (SET_SRC (set), 2);
2561 else
2563 cond_rtx = SET_SRC (set);
2564 then_rtx = const_true_rtx;
2565 else_rtx = const0_rtx;
2568 switch (GET_CODE (cond_rtx))
2570 case GTU:
2571 case GT:
2572 case LTU:
2573 case LT:
2574 case GEU:
2575 case GE:
2576 case LEU:
2577 case LE:
2578 case EQ:
2579 case NE:
2581 int result;
2582 if (XEXP (cond_rtx, 0) != cc0_rtx)
2583 break;
2584 result = alter_cond (cond_rtx);
2585 if (result == 1)
2586 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2587 else if (result == -1)
2588 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2589 else if (result == 2)
2590 INSN_CODE (insn) = -1;
2591 if (SET_DEST (set) == SET_SRC (set))
2592 delete_insn (insn);
2594 break;
2596 default:
2597 break;
2601 #endif
2603 #ifdef HAVE_peephole
2604 /* Do machine-specific peephole optimizations if desired. */
2606 if (optimize_p && !flag_no_peephole && !nopeepholes)
2608 rtx next = peephole (insn);
2609 /* When peepholing, if there were notes within the peephole,
2610 emit them before the peephole. */
2611 if (next != 0 && next != NEXT_INSN (insn))
2613 rtx note, prev = PREV_INSN (insn);
2615 for (note = NEXT_INSN (insn); note != next;
2616 note = NEXT_INSN (note))
2617 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2619 /* Put the notes in the proper position for a later
2620 rescan. For example, the SH target can do this
2621 when generating a far jump in a delayed branch
2622 sequence. */
2623 note = NEXT_INSN (insn);
2624 PREV_INSN (note) = prev;
2625 NEXT_INSN (prev) = note;
2626 NEXT_INSN (PREV_INSN (next)) = insn;
2627 PREV_INSN (insn) = PREV_INSN (next);
2628 NEXT_INSN (insn) = next;
2629 PREV_INSN (next) = insn;
2632 /* PEEPHOLE might have changed this. */
2633 body = PATTERN (insn);
2635 #endif
2637 /* Try to recognize the instruction.
2638 If successful, verify that the operands satisfy the
2639 constraints for the instruction. Crash if they don't,
2640 since `reload' should have changed them so that they do. */
2642 insn_code_number = recog_memoized (insn);
2643 cleanup_subreg_operands (insn);
2645 /* Dump the insn in the assembly for debugging. */
2646 if (flag_dump_rtl_in_asm)
2648 print_rtx_head = ASM_COMMENT_START;
2649 print_rtl_single (asm_out_file, insn);
2650 print_rtx_head = "";
2653 if (! constrain_operands_cached (1))
2654 fatal_insn_not_found (insn);
2656 /* Some target machines need to prescan each insn before
2657 it is output. */
2659 #ifdef FINAL_PRESCAN_INSN
2660 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2661 #endif
2663 if (targetm.have_conditional_execution ()
2664 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2665 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2667 #ifdef HAVE_cc0
2668 cc_prev_status = cc_status;
2670 /* Update `cc_status' for this instruction.
2671 The instruction's output routine may change it further.
2672 If the output routine for a jump insn needs to depend
2673 on the cc status, it should look at cc_prev_status. */
2675 NOTICE_UPDATE_CC (body, insn);
2676 #endif
2678 current_output_insn = debug_insn = insn;
2680 if (CALL_P (insn) && dwarf2out_do_frame ())
2681 dwarf2out_frame_debug (insn, false);
2683 /* Find the proper template for this insn. */
2684 templ = get_insn_template (insn_code_number, insn);
2686 /* If the C code returns 0, it means that it is a jump insn
2687 which follows a deleted test insn, and that test insn
2688 needs to be reinserted. */
2689 if (templ == 0)
2691 rtx prev;
2693 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2695 /* We have already processed the notes between the setter and
2696 the user. Make sure we don't process them again, this is
2697 particularly important if one of the notes is a block
2698 scope note or an EH note. */
2699 for (prev = insn;
2700 prev != last_ignored_compare;
2701 prev = PREV_INSN (prev))
2703 if (NOTE_P (prev))
2704 delete_insn (prev); /* Use delete_note. */
2707 return prev;
2710 /* If the template is the string "#", it means that this insn must
2711 be split. */
2712 if (templ[0] == '#' && templ[1] == '\0')
2714 rtx new_rtx = try_split (body, insn, 0);
2716 /* If we didn't split the insn, go away. */
2717 if (new_rtx == insn && PATTERN (new_rtx) == body)
2718 fatal_insn ("could not split insn", insn);
2720 #ifdef HAVE_ATTR_length
2721 /* This instruction should have been split in shorten_branches,
2722 to ensure that we would have valid length info for the
2723 splitees. */
2724 gcc_unreachable ();
2725 #endif
2727 return new_rtx;
2730 /* ??? This will put the directives in the wrong place if
2731 get_insn_template outputs assembly directly. However calling it
2732 before get_insn_template breaks if the insns is split. */
2733 if (targetm.asm_out.unwind_emit_before_insn
2734 && targetm.asm_out.unwind_emit)
2735 targetm.asm_out.unwind_emit (asm_out_file, insn);
2737 if (CALL_P (insn))
2739 rtx x = call_from_call_insn (insn);
2740 x = XEXP (x, 0);
2741 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2743 tree t;
2744 x = XEXP (x, 0);
2745 t = SYMBOL_REF_DECL (x);
2746 if (t)
2747 assemble_external (t);
2749 if (!DECL_IGNORED_P (current_function_decl))
2750 debug_hooks->var_location (insn);
2753 /* Output assembler code from the template. */
2754 output_asm_insn (templ, recog_data.operand);
2756 /* Some target machines need to postscan each insn after
2757 it is output. */
2758 if (targetm.asm_out.final_postscan_insn)
2759 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2760 recog_data.n_operands);
2762 /* If necessary, report the effect that the instruction has on
2763 the unwind info. We've already done this for delay slots
2764 and call instructions. */
2765 if (final_sequence == 0
2766 #if !defined (HAVE_prologue)
2767 && !ACCUMULATE_OUTGOING_ARGS
2768 #endif
2769 && dwarf2out_do_frame ())
2770 dwarf2out_frame_debug (insn, true);
2772 if (!targetm.asm_out.unwind_emit_before_insn
2773 && targetm.asm_out.unwind_emit)
2774 targetm.asm_out.unwind_emit (asm_out_file, insn);
2776 current_output_insn = debug_insn = 0;
2779 return NEXT_INSN (insn);
2782 /* Return whether a source line note needs to be emitted before INSN.
2783 Sets IS_STMT to TRUE if the line should be marked as a possible
2784 breakpoint location. */
2786 static bool
2787 notice_source_line (rtx insn, bool *is_stmt)
2789 const char *filename;
2790 int linenum;
2792 if (override_filename)
2794 filename = override_filename;
2795 linenum = override_linenum;
2797 else
2799 filename = insn_file (insn);
2800 linenum = insn_line (insn);
2803 if (filename == NULL)
2804 return false;
2806 if (force_source_line
2807 || filename != last_filename
2808 || last_linenum != linenum)
2810 force_source_line = false;
2811 last_filename = filename;
2812 last_linenum = linenum;
2813 last_discriminator = discriminator;
2814 *is_stmt = true;
2815 high_block_linenum = MAX (last_linenum, high_block_linenum);
2816 high_function_linenum = MAX (last_linenum, high_function_linenum);
2817 return true;
2820 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
2822 /* If the discriminator changed, but the line number did not,
2823 output the line table entry with is_stmt false so the
2824 debugger does not treat this as a breakpoint location. */
2825 last_discriminator = discriminator;
2826 *is_stmt = false;
2827 return true;
2830 return false;
2833 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2834 directly to the desired hard register. */
2836 void
2837 cleanup_subreg_operands (rtx insn)
2839 int i;
2840 bool changed = false;
2841 extract_insn_cached (insn);
2842 for (i = 0; i < recog_data.n_operands; i++)
2844 /* The following test cannot use recog_data.operand when testing
2845 for a SUBREG: the underlying object might have been changed
2846 already if we are inside a match_operator expression that
2847 matches the else clause. Instead we test the underlying
2848 expression directly. */
2849 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2851 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2852 changed = true;
2854 else if (GET_CODE (recog_data.operand[i]) == PLUS
2855 || GET_CODE (recog_data.operand[i]) == MULT
2856 || MEM_P (recog_data.operand[i]))
2857 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2860 for (i = 0; i < recog_data.n_dups; i++)
2862 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2864 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2865 changed = true;
2867 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2868 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2869 || MEM_P (*recog_data.dup_loc[i]))
2870 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2872 if (changed)
2873 df_insn_rescan (insn);
2876 /* If X is a SUBREG, replace it with a REG or a MEM,
2877 based on the thing it is a subreg of. */
2880 alter_subreg (rtx *xp)
2882 rtx x = *xp;
2883 rtx y = SUBREG_REG (x);
2885 /* simplify_subreg does not remove subreg from volatile references.
2886 We are required to. */
2887 if (MEM_P (y))
2889 int offset = SUBREG_BYTE (x);
2891 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2892 contains 0 instead of the proper offset. See simplify_subreg. */
2893 if (offset == 0
2894 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2896 int difference = GET_MODE_SIZE (GET_MODE (y))
2897 - GET_MODE_SIZE (GET_MODE (x));
2898 if (WORDS_BIG_ENDIAN)
2899 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2900 if (BYTES_BIG_ENDIAN)
2901 offset += difference % UNITS_PER_WORD;
2904 *xp = adjust_address (y, GET_MODE (x), offset);
2906 else
2908 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2909 SUBREG_BYTE (x));
2911 if (new_rtx != 0)
2912 *xp = new_rtx;
2913 else if (REG_P (y))
2915 /* Simplify_subreg can't handle some REG cases, but we have to. */
2916 unsigned int regno;
2917 HOST_WIDE_INT offset;
2919 regno = subreg_regno (x);
2920 if (subreg_lowpart_p (x))
2921 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
2922 else
2923 offset = SUBREG_BYTE (x);
2924 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
2928 return *xp;
2931 /* Do alter_subreg on all the SUBREGs contained in X. */
2933 static rtx
2934 walk_alter_subreg (rtx *xp, bool *changed)
2936 rtx x = *xp;
2937 switch (GET_CODE (x))
2939 case PLUS:
2940 case MULT:
2941 case AND:
2942 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2943 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
2944 break;
2946 case MEM:
2947 case ZERO_EXTEND:
2948 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2949 break;
2951 case SUBREG:
2952 *changed = true;
2953 return alter_subreg (xp);
2955 default:
2956 break;
2959 return *xp;
2962 #ifdef HAVE_cc0
2964 /* Given BODY, the body of a jump instruction, alter the jump condition
2965 as required by the bits that are set in cc_status.flags.
2966 Not all of the bits there can be handled at this level in all cases.
2968 The value is normally 0.
2969 1 means that the condition has become always true.
2970 -1 means that the condition has become always false.
2971 2 means that COND has been altered. */
2973 static int
2974 alter_cond (rtx cond)
2976 int value = 0;
2978 if (cc_status.flags & CC_REVERSED)
2980 value = 2;
2981 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2984 if (cc_status.flags & CC_INVERTED)
2986 value = 2;
2987 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2990 if (cc_status.flags & CC_NOT_POSITIVE)
2991 switch (GET_CODE (cond))
2993 case LE:
2994 case LEU:
2995 case GEU:
2996 /* Jump becomes unconditional. */
2997 return 1;
2999 case GT:
3000 case GTU:
3001 case LTU:
3002 /* Jump becomes no-op. */
3003 return -1;
3005 case GE:
3006 PUT_CODE (cond, EQ);
3007 value = 2;
3008 break;
3010 case LT:
3011 PUT_CODE (cond, NE);
3012 value = 2;
3013 break;
3015 default:
3016 break;
3019 if (cc_status.flags & CC_NOT_NEGATIVE)
3020 switch (GET_CODE (cond))
3022 case GE:
3023 case GEU:
3024 /* Jump becomes unconditional. */
3025 return 1;
3027 case LT:
3028 case LTU:
3029 /* Jump becomes no-op. */
3030 return -1;
3032 case LE:
3033 case LEU:
3034 PUT_CODE (cond, EQ);
3035 value = 2;
3036 break;
3038 case GT:
3039 case GTU:
3040 PUT_CODE (cond, NE);
3041 value = 2;
3042 break;
3044 default:
3045 break;
3048 if (cc_status.flags & CC_NO_OVERFLOW)
3049 switch (GET_CODE (cond))
3051 case GEU:
3052 /* Jump becomes unconditional. */
3053 return 1;
3055 case LEU:
3056 PUT_CODE (cond, EQ);
3057 value = 2;
3058 break;
3060 case GTU:
3061 PUT_CODE (cond, NE);
3062 value = 2;
3063 break;
3065 case LTU:
3066 /* Jump becomes no-op. */
3067 return -1;
3069 default:
3070 break;
3073 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3074 switch (GET_CODE (cond))
3076 default:
3077 gcc_unreachable ();
3079 case NE:
3080 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3081 value = 2;
3082 break;
3084 case EQ:
3085 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3086 value = 2;
3087 break;
3090 if (cc_status.flags & CC_NOT_SIGNED)
3091 /* The flags are valid if signed condition operators are converted
3092 to unsigned. */
3093 switch (GET_CODE (cond))
3095 case LE:
3096 PUT_CODE (cond, LEU);
3097 value = 2;
3098 break;
3100 case LT:
3101 PUT_CODE (cond, LTU);
3102 value = 2;
3103 break;
3105 case GT:
3106 PUT_CODE (cond, GTU);
3107 value = 2;
3108 break;
3110 case GE:
3111 PUT_CODE (cond, GEU);
3112 value = 2;
3113 break;
3115 default:
3116 break;
3119 return value;
3121 #endif
3123 /* Report inconsistency between the assembler template and the operands.
3124 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3126 void
3127 output_operand_lossage (const char *cmsgid, ...)
3129 char *fmt_string;
3130 char *new_message;
3131 const char *pfx_str;
3132 va_list ap;
3134 va_start (ap, cmsgid);
3136 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3137 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3138 vasprintf (&new_message, fmt_string, ap);
3140 if (this_is_asm_operands)
3141 error_for_asm (this_is_asm_operands, "%s", new_message);
3142 else
3143 internal_error ("%s", new_message);
3145 free (fmt_string);
3146 free (new_message);
3147 va_end (ap);
3150 /* Output of assembler code from a template, and its subroutines. */
3152 /* Annotate the assembly with a comment describing the pattern and
3153 alternative used. */
3155 static void
3156 output_asm_name (void)
3158 if (debug_insn)
3160 int num = INSN_CODE (debug_insn);
3161 fprintf (asm_out_file, "\t%s %d\t%s",
3162 ASM_COMMENT_START, INSN_UID (debug_insn),
3163 insn_data[num].name);
3164 if (insn_data[num].n_alternatives > 1)
3165 fprintf (asm_out_file, "/%d", which_alternative + 1);
3166 #ifdef HAVE_ATTR_length
3167 fprintf (asm_out_file, "\t[length = %d]",
3168 get_attr_length (debug_insn));
3169 #endif
3170 /* Clear this so only the first assembler insn
3171 of any rtl insn will get the special comment for -dp. */
3172 debug_insn = 0;
3176 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3177 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3178 corresponds to the address of the object and 0 if to the object. */
3180 static tree
3181 get_mem_expr_from_op (rtx op, int *paddressp)
3183 tree expr;
3184 int inner_addressp;
3186 *paddressp = 0;
3188 if (REG_P (op))
3189 return REG_EXPR (op);
3190 else if (!MEM_P (op))
3191 return 0;
3193 if (MEM_EXPR (op) != 0)
3194 return MEM_EXPR (op);
3196 /* Otherwise we have an address, so indicate it and look at the address. */
3197 *paddressp = 1;
3198 op = XEXP (op, 0);
3200 /* First check if we have a decl for the address, then look at the right side
3201 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3202 But don't allow the address to itself be indirect. */
3203 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3204 return expr;
3205 else if (GET_CODE (op) == PLUS
3206 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3207 return expr;
3209 while (UNARY_P (op)
3210 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3211 op = XEXP (op, 0);
3213 expr = get_mem_expr_from_op (op, &inner_addressp);
3214 return inner_addressp ? 0 : expr;
3217 /* Output operand names for assembler instructions. OPERANDS is the
3218 operand vector, OPORDER is the order to write the operands, and NOPS
3219 is the number of operands to write. */
3221 static void
3222 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3224 int wrote = 0;
3225 int i;
3227 for (i = 0; i < nops; i++)
3229 int addressp;
3230 rtx op = operands[oporder[i]];
3231 tree expr = get_mem_expr_from_op (op, &addressp);
3233 fprintf (asm_out_file, "%c%s",
3234 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3235 wrote = 1;
3236 if (expr)
3238 fprintf (asm_out_file, "%s",
3239 addressp ? "*" : "");
3240 print_mem_expr (asm_out_file, expr);
3241 wrote = 1;
3243 else if (REG_P (op) && ORIGINAL_REGNO (op)
3244 && ORIGINAL_REGNO (op) != REGNO (op))
3245 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3249 /* Output text from TEMPLATE to the assembler output file,
3250 obeying %-directions to substitute operands taken from
3251 the vector OPERANDS.
3253 %N (for N a digit) means print operand N in usual manner.
3254 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3255 and print the label name with no punctuation.
3256 %cN means require operand N to be a constant
3257 and print the constant expression with no punctuation.
3258 %aN means expect operand N to be a memory address
3259 (not a memory reference!) and print a reference
3260 to that address.
3261 %nN means expect operand N to be a constant
3262 and print a constant expression for minus the value
3263 of the operand, with no other punctuation. */
3265 void
3266 output_asm_insn (const char *templ, rtx *operands)
3268 const char *p;
3269 int c;
3270 #ifdef ASSEMBLER_DIALECT
3271 int dialect = 0;
3272 #endif
3273 int oporder[MAX_RECOG_OPERANDS];
3274 char opoutput[MAX_RECOG_OPERANDS];
3275 int ops = 0;
3277 /* An insn may return a null string template
3278 in a case where no assembler code is needed. */
3279 if (*templ == 0)
3280 return;
3282 memset (opoutput, 0, sizeof opoutput);
3283 p = templ;
3284 putc ('\t', asm_out_file);
3286 #ifdef ASM_OUTPUT_OPCODE
3287 ASM_OUTPUT_OPCODE (asm_out_file, p);
3288 #endif
3290 while ((c = *p++))
3291 switch (c)
3293 case '\n':
3294 if (flag_verbose_asm)
3295 output_asm_operand_names (operands, oporder, ops);
3296 if (flag_print_asm_name)
3297 output_asm_name ();
3299 ops = 0;
3300 memset (opoutput, 0, sizeof opoutput);
3302 putc (c, asm_out_file);
3303 #ifdef ASM_OUTPUT_OPCODE
3304 while ((c = *p) == '\t')
3306 putc (c, asm_out_file);
3307 p++;
3309 ASM_OUTPUT_OPCODE (asm_out_file, p);
3310 #endif
3311 break;
3313 #ifdef ASSEMBLER_DIALECT
3314 case '{':
3316 int i;
3318 if (dialect)
3319 output_operand_lossage ("nested assembly dialect alternatives");
3320 else
3321 dialect = 1;
3323 /* If we want the first dialect, do nothing. Otherwise, skip
3324 DIALECT_NUMBER of strings ending with '|'. */
3325 for (i = 0; i < dialect_number; i++)
3327 while (*p && *p != '}' && *p++ != '|')
3329 if (*p == '}')
3330 break;
3331 if (*p == '|')
3332 p++;
3335 if (*p == '\0')
3336 output_operand_lossage ("unterminated assembly dialect alternative");
3338 break;
3340 case '|':
3341 if (dialect)
3343 /* Skip to close brace. */
3346 if (*p == '\0')
3348 output_operand_lossage ("unterminated assembly dialect alternative");
3349 break;
3352 while (*p++ != '}');
3353 dialect = 0;
3355 else
3356 putc (c, asm_out_file);
3357 break;
3359 case '}':
3360 if (! dialect)
3361 putc (c, asm_out_file);
3362 dialect = 0;
3363 break;
3364 #endif
3366 case '%':
3367 /* %% outputs a single %. */
3368 if (*p == '%')
3370 p++;
3371 putc (c, asm_out_file);
3373 /* %= outputs a number which is unique to each insn in the entire
3374 compilation. This is useful for making local labels that are
3375 referred to more than once in a given insn. */
3376 else if (*p == '=')
3378 p++;
3379 fprintf (asm_out_file, "%d", insn_counter);
3381 /* % followed by a letter and some digits
3382 outputs an operand in a special way depending on the letter.
3383 Letters `acln' are implemented directly.
3384 Other letters are passed to `output_operand' so that
3385 the TARGET_PRINT_OPERAND hook can define them. */
3386 else if (ISALPHA (*p))
3388 int letter = *p++;
3389 unsigned long opnum;
3390 char *endptr;
3392 opnum = strtoul (p, &endptr, 10);
3394 if (endptr == p)
3395 output_operand_lossage ("operand number missing "
3396 "after %%-letter");
3397 else if (this_is_asm_operands && opnum >= insn_noperands)
3398 output_operand_lossage ("operand number out of range");
3399 else if (letter == 'l')
3400 output_asm_label (operands[opnum]);
3401 else if (letter == 'a')
3402 output_address (operands[opnum]);
3403 else if (letter == 'c')
3405 if (CONSTANT_ADDRESS_P (operands[opnum]))
3406 output_addr_const (asm_out_file, operands[opnum]);
3407 else
3408 output_operand (operands[opnum], 'c');
3410 else if (letter == 'n')
3412 if (CONST_INT_P (operands[opnum]))
3413 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3414 - INTVAL (operands[opnum]));
3415 else
3417 putc ('-', asm_out_file);
3418 output_addr_const (asm_out_file, operands[opnum]);
3421 else
3422 output_operand (operands[opnum], letter);
3424 if (!opoutput[opnum])
3425 oporder[ops++] = opnum;
3426 opoutput[opnum] = 1;
3428 p = endptr;
3429 c = *p;
3431 /* % followed by a digit outputs an operand the default way. */
3432 else if (ISDIGIT (*p))
3434 unsigned long opnum;
3435 char *endptr;
3437 opnum = strtoul (p, &endptr, 10);
3438 if (this_is_asm_operands && opnum >= insn_noperands)
3439 output_operand_lossage ("operand number out of range");
3440 else
3441 output_operand (operands[opnum], 0);
3443 if (!opoutput[opnum])
3444 oporder[ops++] = opnum;
3445 opoutput[opnum] = 1;
3447 p = endptr;
3448 c = *p;
3450 /* % followed by punctuation: output something for that
3451 punctuation character alone, with no operand. The
3452 TARGET_PRINT_OPERAND hook decides what is actually done. */
3453 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3454 output_operand (NULL_RTX, *p++);
3455 else
3456 output_operand_lossage ("invalid %%-code");
3457 break;
3459 default:
3460 putc (c, asm_out_file);
3463 /* Write out the variable names for operands, if we know them. */
3464 if (flag_verbose_asm)
3465 output_asm_operand_names (operands, oporder, ops);
3466 if (flag_print_asm_name)
3467 output_asm_name ();
3469 putc ('\n', asm_out_file);
3472 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3474 void
3475 output_asm_label (rtx x)
3477 char buf[256];
3479 if (GET_CODE (x) == LABEL_REF)
3480 x = XEXP (x, 0);
3481 if (LABEL_P (x)
3482 || (NOTE_P (x)
3483 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3484 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3485 else
3486 output_operand_lossage ("'%%l' operand isn't a label");
3488 assemble_name (asm_out_file, buf);
3491 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3492 output_operand. Marks SYMBOL_REFs as referenced through use of
3493 assemble_external. */
3495 static int
3496 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3498 rtx x = *xp;
3500 /* If we have a used symbol, we may have to emit assembly
3501 annotations corresponding to whether the symbol is external, weak
3502 or has non-default visibility. */
3503 if (GET_CODE (x) == SYMBOL_REF)
3505 tree t;
3507 t = SYMBOL_REF_DECL (x);
3508 if (t)
3509 assemble_external (t);
3511 return -1;
3514 return 0;
3517 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3519 void
3520 mark_symbol_refs_as_used (rtx x)
3522 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3525 /* Print operand X using machine-dependent assembler syntax.
3526 CODE is a non-digit that preceded the operand-number in the % spec,
3527 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3528 between the % and the digits.
3529 When CODE is a non-letter, X is 0.
3531 The meanings of the letters are machine-dependent and controlled
3532 by TARGET_PRINT_OPERAND. */
3534 void
3535 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3537 if (x && GET_CODE (x) == SUBREG)
3538 x = alter_subreg (&x);
3540 /* X must not be a pseudo reg. */
3541 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3543 targetm.asm_out.print_operand (asm_out_file, x, code);
3545 if (x == NULL_RTX)
3546 return;
3548 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3551 /* Print a memory reference operand for address X using
3552 machine-dependent assembler syntax. */
3554 void
3555 output_address (rtx x)
3557 bool changed = false;
3558 walk_alter_subreg (&x, &changed);
3559 targetm.asm_out.print_operand_address (asm_out_file, x);
3562 /* Print an integer constant expression in assembler syntax.
3563 Addition and subtraction are the only arithmetic
3564 that may appear in these expressions. */
3566 void
3567 output_addr_const (FILE *file, rtx x)
3569 char buf[256];
3571 restart:
3572 switch (GET_CODE (x))
3574 case PC:
3575 putc ('.', file);
3576 break;
3578 case SYMBOL_REF:
3579 if (SYMBOL_REF_DECL (x))
3580 assemble_external (SYMBOL_REF_DECL (x));
3581 #ifdef ASM_OUTPUT_SYMBOL_REF
3582 ASM_OUTPUT_SYMBOL_REF (file, x);
3583 #else
3584 assemble_name (file, XSTR (x, 0));
3585 #endif
3586 break;
3588 case LABEL_REF:
3589 x = XEXP (x, 0);
3590 /* Fall through. */
3591 case CODE_LABEL:
3592 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3593 #ifdef ASM_OUTPUT_LABEL_REF
3594 ASM_OUTPUT_LABEL_REF (file, buf);
3595 #else
3596 assemble_name (file, buf);
3597 #endif
3598 break;
3600 case CONST_INT:
3601 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3602 break;
3604 case CONST:
3605 /* This used to output parentheses around the expression,
3606 but that does not work on the 386 (either ATT or BSD assembler). */
3607 output_addr_const (file, XEXP (x, 0));
3608 break;
3610 case CONST_DOUBLE:
3611 if (GET_MODE (x) == VOIDmode)
3613 /* We can use %d if the number is one word and positive. */
3614 if (CONST_DOUBLE_HIGH (x))
3615 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3616 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3617 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3618 else if (CONST_DOUBLE_LOW (x) < 0)
3619 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3620 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3621 else
3622 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3624 else
3625 /* We can't handle floating point constants;
3626 PRINT_OPERAND must handle them. */
3627 output_operand_lossage ("floating constant misused");
3628 break;
3630 case CONST_FIXED:
3631 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3632 (unsigned HOST_WIDE_INT) CONST_FIXED_VALUE_LOW (x));
3633 break;
3635 case PLUS:
3636 /* Some assemblers need integer constants to appear last (eg masm). */
3637 if (CONST_INT_P (XEXP (x, 0)))
3639 output_addr_const (file, XEXP (x, 1));
3640 if (INTVAL (XEXP (x, 0)) >= 0)
3641 fprintf (file, "+");
3642 output_addr_const (file, XEXP (x, 0));
3644 else
3646 output_addr_const (file, XEXP (x, 0));
3647 if (!CONST_INT_P (XEXP (x, 1))
3648 || INTVAL (XEXP (x, 1)) >= 0)
3649 fprintf (file, "+");
3650 output_addr_const (file, XEXP (x, 1));
3652 break;
3654 case MINUS:
3655 /* Avoid outputting things like x-x or x+5-x,
3656 since some assemblers can't handle that. */
3657 x = simplify_subtraction (x);
3658 if (GET_CODE (x) != MINUS)
3659 goto restart;
3661 output_addr_const (file, XEXP (x, 0));
3662 fprintf (file, "-");
3663 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3664 || GET_CODE (XEXP (x, 1)) == PC
3665 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3666 output_addr_const (file, XEXP (x, 1));
3667 else
3669 fputs (targetm.asm_out.open_paren, file);
3670 output_addr_const (file, XEXP (x, 1));
3671 fputs (targetm.asm_out.close_paren, file);
3673 break;
3675 case ZERO_EXTEND:
3676 case SIGN_EXTEND:
3677 case SUBREG:
3678 case TRUNCATE:
3679 output_addr_const (file, XEXP (x, 0));
3680 break;
3682 default:
3683 if (targetm.asm_out.output_addr_const_extra (file, x))
3684 break;
3686 output_operand_lossage ("invalid expression as operand");
3690 /* Output a quoted string. */
3692 void
3693 output_quoted_string (FILE *asm_file, const char *string)
3695 #ifdef OUTPUT_QUOTED_STRING
3696 OUTPUT_QUOTED_STRING (asm_file, string);
3697 #else
3698 char c;
3700 putc ('\"', asm_file);
3701 while ((c = *string++) != 0)
3703 if (ISPRINT (c))
3705 if (c == '\"' || c == '\\')
3706 putc ('\\', asm_file);
3707 putc (c, asm_file);
3709 else
3710 fprintf (asm_file, "\\%03o", (unsigned char) c);
3712 putc ('\"', asm_file);
3713 #endif
3716 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3717 %R prints the value of REGISTER_PREFIX.
3718 %L prints the value of LOCAL_LABEL_PREFIX.
3719 %U prints the value of USER_LABEL_PREFIX.
3720 %I prints the value of IMMEDIATE_PREFIX.
3721 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3722 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3724 We handle alternate assembler dialects here, just like output_asm_insn. */
3726 void
3727 asm_fprintf (FILE *file, const char *p, ...)
3729 char buf[10];
3730 char *q, c;
3731 va_list argptr;
3733 va_start (argptr, p);
3735 buf[0] = '%';
3737 while ((c = *p++))
3738 switch (c)
3740 #ifdef ASSEMBLER_DIALECT
3741 case '{':
3743 int i;
3745 /* If we want the first dialect, do nothing. Otherwise, skip
3746 DIALECT_NUMBER of strings ending with '|'. */
3747 for (i = 0; i < dialect_number; i++)
3749 while (*p && *p++ != '|')
3752 if (*p == '|')
3753 p++;
3756 break;
3758 case '|':
3759 /* Skip to close brace. */
3760 while (*p && *p++ != '}')
3762 break;
3764 case '}':
3765 break;
3766 #endif
3768 case '%':
3769 c = *p++;
3770 q = &buf[1];
3771 while (strchr ("-+ #0", c))
3773 *q++ = c;
3774 c = *p++;
3776 while (ISDIGIT (c) || c == '.')
3778 *q++ = c;
3779 c = *p++;
3781 switch (c)
3783 case '%':
3784 putc ('%', file);
3785 break;
3787 case 'd': case 'i': case 'u':
3788 case 'x': case 'X': case 'o':
3789 case 'c':
3790 *q++ = c;
3791 *q = 0;
3792 fprintf (file, buf, va_arg (argptr, int));
3793 break;
3795 case 'w':
3796 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3797 'o' cases, but we do not check for those cases. It
3798 means that the value is a HOST_WIDE_INT, which may be
3799 either `long' or `long long'. */
3800 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3801 q += strlen (HOST_WIDE_INT_PRINT);
3802 *q++ = *p++;
3803 *q = 0;
3804 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3805 break;
3807 case 'l':
3808 *q++ = c;
3809 #ifdef HAVE_LONG_LONG
3810 if (*p == 'l')
3812 *q++ = *p++;
3813 *q++ = *p++;
3814 *q = 0;
3815 fprintf (file, buf, va_arg (argptr, long long));
3817 else
3818 #endif
3820 *q++ = *p++;
3821 *q = 0;
3822 fprintf (file, buf, va_arg (argptr, long));
3825 break;
3827 case 's':
3828 *q++ = c;
3829 *q = 0;
3830 fprintf (file, buf, va_arg (argptr, char *));
3831 break;
3833 case 'O':
3834 #ifdef ASM_OUTPUT_OPCODE
3835 ASM_OUTPUT_OPCODE (asm_out_file, p);
3836 #endif
3837 break;
3839 case 'R':
3840 #ifdef REGISTER_PREFIX
3841 fprintf (file, "%s", REGISTER_PREFIX);
3842 #endif
3843 break;
3845 case 'I':
3846 #ifdef IMMEDIATE_PREFIX
3847 fprintf (file, "%s", IMMEDIATE_PREFIX);
3848 #endif
3849 break;
3851 case 'L':
3852 #ifdef LOCAL_LABEL_PREFIX
3853 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3854 #endif
3855 break;
3857 case 'U':
3858 fputs (user_label_prefix, file);
3859 break;
3861 #ifdef ASM_FPRINTF_EXTENSIONS
3862 /* Uppercase letters are reserved for general use by asm_fprintf
3863 and so are not available to target specific code. In order to
3864 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3865 they are defined here. As they get turned into real extensions
3866 to asm_fprintf they should be removed from this list. */
3867 case 'A': case 'B': case 'C': case 'D': case 'E':
3868 case 'F': case 'G': case 'H': case 'J': case 'K':
3869 case 'M': case 'N': case 'P': case 'Q': case 'S':
3870 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3871 break;
3873 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3874 #endif
3875 default:
3876 gcc_unreachable ();
3878 break;
3880 default:
3881 putc (c, file);
3883 va_end (argptr);
3886 /* Split up a CONST_DOUBLE or integer constant rtx
3887 into two rtx's for single words,
3888 storing in *FIRST the word that comes first in memory in the target
3889 and in *SECOND the other. */
3891 void
3892 split_double (rtx value, rtx *first, rtx *second)
3894 if (CONST_INT_P (value))
3896 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3898 /* In this case the CONST_INT holds both target words.
3899 Extract the bits from it into two word-sized pieces.
3900 Sign extend each half to HOST_WIDE_INT. */
3901 unsigned HOST_WIDE_INT low, high;
3902 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3903 unsigned bits_per_word = BITS_PER_WORD;
3905 /* Set sign_bit to the most significant bit of a word. */
3906 sign_bit = 1;
3907 sign_bit <<= bits_per_word - 1;
3909 /* Set mask so that all bits of the word are set. We could
3910 have used 1 << BITS_PER_WORD instead of basing the
3911 calculation on sign_bit. However, on machines where
3912 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3913 compiler warning, even though the code would never be
3914 executed. */
3915 mask = sign_bit << 1;
3916 mask--;
3918 /* Set sign_extend as any remaining bits. */
3919 sign_extend = ~mask;
3921 /* Pick the lower word and sign-extend it. */
3922 low = INTVAL (value);
3923 low &= mask;
3924 if (low & sign_bit)
3925 low |= sign_extend;
3927 /* Pick the higher word, shifted to the least significant
3928 bits, and sign-extend it. */
3929 high = INTVAL (value);
3930 high >>= bits_per_word - 1;
3931 high >>= 1;
3932 high &= mask;
3933 if (high & sign_bit)
3934 high |= sign_extend;
3936 /* Store the words in the target machine order. */
3937 if (WORDS_BIG_ENDIAN)
3939 *first = GEN_INT (high);
3940 *second = GEN_INT (low);
3942 else
3944 *first = GEN_INT (low);
3945 *second = GEN_INT (high);
3948 else
3950 /* The rule for using CONST_INT for a wider mode
3951 is that we regard the value as signed.
3952 So sign-extend it. */
3953 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3954 if (WORDS_BIG_ENDIAN)
3956 *first = high;
3957 *second = value;
3959 else
3961 *first = value;
3962 *second = high;
3966 else if (GET_CODE (value) != CONST_DOUBLE)
3968 if (WORDS_BIG_ENDIAN)
3970 *first = const0_rtx;
3971 *second = value;
3973 else
3975 *first = value;
3976 *second = const0_rtx;
3979 else if (GET_MODE (value) == VOIDmode
3980 /* This is the old way we did CONST_DOUBLE integers. */
3981 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3983 /* In an integer, the words are defined as most and least significant.
3984 So order them by the target's convention. */
3985 if (WORDS_BIG_ENDIAN)
3987 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3988 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3990 else
3992 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3993 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3996 else
3998 REAL_VALUE_TYPE r;
3999 long l[2];
4000 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
4002 /* Note, this converts the REAL_VALUE_TYPE to the target's
4003 format, splits up the floating point double and outputs
4004 exactly 32 bits of it into each of l[0] and l[1] --
4005 not necessarily BITS_PER_WORD bits. */
4006 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
4008 /* If 32 bits is an entire word for the target, but not for the host,
4009 then sign-extend on the host so that the number will look the same
4010 way on the host that it would on the target. See for instance
4011 simplify_unary_operation. The #if is needed to avoid compiler
4012 warnings. */
4014 #if HOST_BITS_PER_LONG > 32
4015 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
4017 if (l[0] & ((long) 1 << 31))
4018 l[0] |= ((long) (-1) << 32);
4019 if (l[1] & ((long) 1 << 31))
4020 l[1] |= ((long) (-1) << 32);
4022 #endif
4024 *first = GEN_INT (l[0]);
4025 *second = GEN_INT (l[1]);
4029 /* Return nonzero if this function has no function calls. */
4032 leaf_function_p (void)
4034 rtx insn;
4035 rtx link;
4037 if (crtl->profile || profile_arc_flag)
4038 return 0;
4040 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4042 if (CALL_P (insn)
4043 && ! SIBLING_CALL_P (insn))
4044 return 0;
4045 if (NONJUMP_INSN_P (insn)
4046 && GET_CODE (PATTERN (insn)) == SEQUENCE
4047 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4048 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4049 return 0;
4051 for (link = crtl->epilogue_delay_list;
4052 link;
4053 link = XEXP (link, 1))
4055 insn = XEXP (link, 0);
4057 if (CALL_P (insn)
4058 && ! SIBLING_CALL_P (insn))
4059 return 0;
4060 if (NONJUMP_INSN_P (insn)
4061 && GET_CODE (PATTERN (insn)) == SEQUENCE
4062 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4063 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4064 return 0;
4067 return 1;
4070 /* Return 1 if branch is a forward branch.
4071 Uses insn_shuid array, so it works only in the final pass. May be used by
4072 output templates to customary add branch prediction hints.
4075 final_forward_branch_p (rtx insn)
4077 int insn_id, label_id;
4079 gcc_assert (uid_shuid);
4080 insn_id = INSN_SHUID (insn);
4081 label_id = INSN_SHUID (JUMP_LABEL (insn));
4082 /* We've hit some insns that does not have id information available. */
4083 gcc_assert (insn_id && label_id);
4084 return insn_id < label_id;
4087 /* On some machines, a function with no call insns
4088 can run faster if it doesn't create its own register window.
4089 When output, the leaf function should use only the "output"
4090 registers. Ordinarily, the function would be compiled to use
4091 the "input" registers to find its arguments; it is a candidate
4092 for leaf treatment if it uses only the "input" registers.
4093 Leaf function treatment means renumbering so the function
4094 uses the "output" registers instead. */
4096 #ifdef LEAF_REGISTERS
4098 /* Return 1 if this function uses only the registers that can be
4099 safely renumbered. */
4102 only_leaf_regs_used (void)
4104 int i;
4105 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4107 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4108 if ((df_regs_ever_live_p (i) || global_regs[i])
4109 && ! permitted_reg_in_leaf_functions[i])
4110 return 0;
4112 if (crtl->uses_pic_offset_table
4113 && pic_offset_table_rtx != 0
4114 && REG_P (pic_offset_table_rtx)
4115 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4116 return 0;
4118 return 1;
4121 /* Scan all instructions and renumber all registers into those
4122 available in leaf functions. */
4124 static void
4125 leaf_renumber_regs (rtx first)
4127 rtx insn;
4129 /* Renumber only the actual patterns.
4130 The reg-notes can contain frame pointer refs,
4131 and renumbering them could crash, and should not be needed. */
4132 for (insn = first; insn; insn = NEXT_INSN (insn))
4133 if (INSN_P (insn))
4134 leaf_renumber_regs_insn (PATTERN (insn));
4135 for (insn = crtl->epilogue_delay_list;
4136 insn;
4137 insn = XEXP (insn, 1))
4138 if (INSN_P (XEXP (insn, 0)))
4139 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
4142 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4143 available in leaf functions. */
4145 void
4146 leaf_renumber_regs_insn (rtx in_rtx)
4148 int i, j;
4149 const char *format_ptr;
4151 if (in_rtx == 0)
4152 return;
4154 /* Renumber all input-registers into output-registers.
4155 renumbered_regs would be 1 for an output-register;
4156 they */
4158 if (REG_P (in_rtx))
4160 int newreg;
4162 /* Don't renumber the same reg twice. */
4163 if (in_rtx->used)
4164 return;
4166 newreg = REGNO (in_rtx);
4167 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4168 to reach here as part of a REG_NOTE. */
4169 if (newreg >= FIRST_PSEUDO_REGISTER)
4171 in_rtx->used = 1;
4172 return;
4174 newreg = LEAF_REG_REMAP (newreg);
4175 gcc_assert (newreg >= 0);
4176 df_set_regs_ever_live (REGNO (in_rtx), false);
4177 df_set_regs_ever_live (newreg, true);
4178 SET_REGNO (in_rtx, newreg);
4179 in_rtx->used = 1;
4182 if (INSN_P (in_rtx))
4184 /* Inside a SEQUENCE, we find insns.
4185 Renumber just the patterns of these insns,
4186 just as we do for the top-level insns. */
4187 leaf_renumber_regs_insn (PATTERN (in_rtx));
4188 return;
4191 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4193 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4194 switch (*format_ptr++)
4196 case 'e':
4197 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4198 break;
4200 case 'E':
4201 if (NULL != XVEC (in_rtx, i))
4203 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4204 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4206 break;
4208 case 'S':
4209 case 's':
4210 case '0':
4211 case 'i':
4212 case 'w':
4213 case 'n':
4214 case 'u':
4215 break;
4217 default:
4218 gcc_unreachable ();
4221 #endif
4223 /* Turn the RTL into assembly. */
4224 static unsigned int
4225 rest_of_handle_final (void)
4227 rtx x;
4228 const char *fnname;
4230 /* Get the function's name, as described by its RTL. This may be
4231 different from the DECL_NAME name used in the source file. */
4233 x = DECL_RTL (current_function_decl);
4234 gcc_assert (MEM_P (x));
4235 x = XEXP (x, 0);
4236 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4237 fnname = XSTR (x, 0);
4239 assemble_start_function (current_function_decl, fnname);
4240 final_start_function (get_insns (), asm_out_file, optimize);
4241 final (get_insns (), asm_out_file, optimize);
4242 final_end_function ();
4244 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4245 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4246 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4247 output_function_exception_table (fnname);
4249 assemble_end_function (current_function_decl, fnname);
4251 user_defined_section_attribute = false;
4253 /* Free up reg info memory. */
4254 free_reg_info ();
4256 if (! quiet_flag)
4257 fflush (asm_out_file);
4259 /* Write DBX symbols if requested. */
4261 /* Note that for those inline functions where we don't initially
4262 know for certain that we will be generating an out-of-line copy,
4263 the first invocation of this routine (rest_of_compilation) will
4264 skip over this code by doing a `goto exit_rest_of_compilation;'.
4265 Later on, wrapup_global_declarations will (indirectly) call
4266 rest_of_compilation again for those inline functions that need
4267 to have out-of-line copies generated. During that call, we
4268 *will* be routed past here. */
4270 timevar_push (TV_SYMOUT);
4271 if (!DECL_IGNORED_P (current_function_decl))
4272 debug_hooks->function_decl (current_function_decl);
4273 timevar_pop (TV_SYMOUT);
4275 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4276 DECL_INITIAL (current_function_decl) = error_mark_node;
4278 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4279 && targetm.have_ctors_dtors)
4280 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4281 decl_init_priority_lookup
4282 (current_function_decl));
4283 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4284 && targetm.have_ctors_dtors)
4285 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4286 decl_fini_priority_lookup
4287 (current_function_decl));
4288 return 0;
4291 struct rtl_opt_pass pass_final =
4294 RTL_PASS,
4295 "final", /* name */
4296 NULL, /* gate */
4297 rest_of_handle_final, /* execute */
4298 NULL, /* sub */
4299 NULL, /* next */
4300 0, /* static_pass_number */
4301 TV_FINAL, /* tv_id */
4302 0, /* properties_required */
4303 0, /* properties_provided */
4304 0, /* properties_destroyed */
4305 0, /* todo_flags_start */
4306 TODO_ggc_collect /* todo_flags_finish */
4311 static unsigned int
4312 rest_of_handle_shorten_branches (void)
4314 /* Shorten branches. */
4315 shorten_branches (get_insns ());
4316 return 0;
4319 struct rtl_opt_pass pass_shorten_branches =
4322 RTL_PASS,
4323 "shorten", /* name */
4324 NULL, /* gate */
4325 rest_of_handle_shorten_branches, /* execute */
4326 NULL, /* sub */
4327 NULL, /* next */
4328 0, /* static_pass_number */
4329 TV_FINAL, /* tv_id */
4330 0, /* properties_required */
4331 0, /* properties_provided */
4332 0, /* properties_destroyed */
4333 0, /* todo_flags_start */
4334 TODO_dump_func /* todo_flags_finish */
4339 static unsigned int
4340 rest_of_clean_state (void)
4342 rtx insn, next;
4343 FILE *final_output = NULL;
4344 int save_unnumbered = flag_dump_unnumbered;
4345 int save_noaddr = flag_dump_noaddr;
4347 if (flag_dump_final_insns)
4349 final_output = fopen (flag_dump_final_insns, "a");
4350 if (!final_output)
4352 error ("could not open final insn dump file %qs: %m",
4353 flag_dump_final_insns);
4354 flag_dump_final_insns = NULL;
4356 else
4358 const char *aname;
4359 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4361 aname = (IDENTIFIER_POINTER
4362 (DECL_ASSEMBLER_NAME (current_function_decl)));
4363 fprintf (final_output, "\n;; Function (%s) %s\n\n", aname,
4364 node->frequency == NODE_FREQUENCY_HOT
4365 ? " (hot)"
4366 : node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
4367 ? " (unlikely executed)"
4368 : node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
4369 ? " (executed once)"
4370 : "");
4372 flag_dump_noaddr = flag_dump_unnumbered = 1;
4373 if (flag_compare_debug_opt || flag_compare_debug)
4374 dump_flags |= TDF_NOUID;
4375 final_insns_dump_p = true;
4377 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4378 if (LABEL_P (insn))
4379 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4380 else
4382 if (NOTE_P (insn))
4383 set_block_for_insn (insn, NULL);
4384 INSN_UID (insn) = 0;
4389 /* It is very important to decompose the RTL instruction chain here:
4390 debug information keeps pointing into CODE_LABEL insns inside the function
4391 body. If these remain pointing to the other insns, we end up preserving
4392 whole RTL chain and attached detailed debug info in memory. */
4393 for (insn = get_insns (); insn; insn = next)
4395 next = NEXT_INSN (insn);
4396 NEXT_INSN (insn) = NULL;
4397 PREV_INSN (insn) = NULL;
4399 if (final_output
4400 && (!NOTE_P (insn) ||
4401 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4402 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4403 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4404 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4405 && NOTE_KIND (insn) != NOTE_INSN_CFA_RESTORE_STATE)))
4406 print_rtl_single (final_output, insn);
4409 if (final_output)
4411 flag_dump_noaddr = save_noaddr;
4412 flag_dump_unnumbered = save_unnumbered;
4413 final_insns_dump_p = false;
4415 if (fclose (final_output))
4417 error ("could not close final insn dump file %qs: %m",
4418 flag_dump_final_insns);
4419 flag_dump_final_insns = NULL;
4423 /* In case the function was not output,
4424 don't leave any temporary anonymous types
4425 queued up for sdb output. */
4426 #ifdef SDB_DEBUGGING_INFO
4427 if (write_symbols == SDB_DEBUG)
4428 sdbout_types (NULL_TREE);
4429 #endif
4431 flag_rerun_cse_after_global_opts = 0;
4432 reload_completed = 0;
4433 epilogue_completed = 0;
4434 #ifdef STACK_REGS
4435 regstack_completed = 0;
4436 #endif
4438 /* Clear out the insn_length contents now that they are no
4439 longer valid. */
4440 init_insn_lengths ();
4442 /* Show no temporary slots allocated. */
4443 init_temp_slots ();
4445 free_bb_for_insn ();
4447 delete_tree_ssa ();
4449 /* We can reduce stack alignment on call site only when we are sure that
4450 the function body just produced will be actually used in the final
4451 executable. */
4452 if (decl_binds_to_current_def_p (current_function_decl))
4454 unsigned int pref = crtl->preferred_stack_boundary;
4455 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4456 pref = crtl->stack_alignment_needed;
4457 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4458 = pref;
4461 /* Make sure volatile mem refs aren't considered valid operands for
4462 arithmetic insns. We must call this here if this is a nested inline
4463 function, since the above code leaves us in the init_recog state,
4464 and the function context push/pop code does not save/restore volatile_ok.
4466 ??? Maybe it isn't necessary for expand_start_function to call this
4467 anymore if we do it here? */
4469 init_recog_no_volatile ();
4471 /* We're done with this function. Free up memory if we can. */
4472 free_after_parsing (cfun);
4473 free_after_compilation (cfun);
4474 return 0;
4477 struct rtl_opt_pass pass_clean_state =
4480 RTL_PASS,
4481 "*clean_state", /* name */
4482 NULL, /* gate */
4483 rest_of_clean_state, /* execute */
4484 NULL, /* sub */
4485 NULL, /* next */
4486 0, /* static_pass_number */
4487 TV_FINAL, /* tv_id */
4488 0, /* properties_required */
4489 0, /* properties_provided */
4490 PROP_rtl, /* properties_destroyed */
4491 0, /* todo_flags_start */
4492 0 /* todo_flags_finish */