PR middle-end/55401
[official-gcc.git] / gcc / final.c
blob234376d40f60728bfdf679f49dec9b7edf24065b
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This is the final pass of the compiler.
24 It looks at the rtl code for a function and outputs assembler code.
26 Call `final_start_function' to output the assembler code for function entry,
27 `final' to output assembler code for some RTL code,
28 `final_end_function' to output assembler code for function exit.
29 If a function is compiled in several pieces, each piece is
30 output separately with `final'.
32 Some optimizations are also done at this level.
33 Move instructions that were made unnecessary by good register allocation
34 are detected and omitted from the output. (Though most of these
35 are removed by the last jump pass.)
37 Instructions to set the condition codes are omitted when it can be
38 seen that the condition codes already had the desired values.
40 In some cases it is sufficient if the inherited condition codes
41 have related values, but this may require the following insn
42 (the one that tests the condition codes) to be modified.
44 The code for the function prologue and epilogue are generated
45 directly in assembler by the target functions function_prologue and
46 function_epilogue. Those instructions never exist as rtl. */
48 #include "config.h"
49 #include "system.h"
50 #include "coretypes.h"
51 #include "tm.h"
53 #include "tree.h"
54 #include "rtl.h"
55 #include "tm_p.h"
56 #include "regs.h"
57 #include "insn-config.h"
58 #include "insn-attr.h"
59 #include "recog.h"
60 #include "conditions.h"
61 #include "flags.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "rtl-error.h"
67 #include "toplev.h" /* exact_log2, floor_log2 */
68 #include "reload.h"
69 #include "intl.h"
70 #include "basic-block.h"
71 #include "target.h"
72 #include "targhooks.h"
73 #include "debug.h"
74 #include "expr.h"
75 #include "tree-pass.h"
76 #include "tree-flow.h"
77 #include "cgraph.h"
78 #include "coverage.h"
79 #include "df.h"
80 #include "ggc.h"
81 #include "cfgloop.h"
82 #include "params.h"
83 #include "tree-pretty-print.h" /* for dump_function_header */
85 #ifdef XCOFF_DEBUGGING_INFO
86 #include "xcoffout.h" /* Needed for external data
87 declarations for e.g. AIX 4.x. */
88 #endif
90 #include "dwarf2out.h"
92 #ifdef DBX_DEBUGGING_INFO
93 #include "dbxout.h"
94 #endif
96 #ifdef SDB_DEBUGGING_INFO
97 #include "sdbout.h"
98 #endif
100 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
101 So define a null default for it to save conditionalization later. */
102 #ifndef CC_STATUS_INIT
103 #define CC_STATUS_INIT
104 #endif
106 /* Is the given character a logical line separator for the assembler? */
107 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
108 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
109 #endif
111 #ifndef JUMP_TABLES_IN_TEXT_SECTION
112 #define JUMP_TABLES_IN_TEXT_SECTION 0
113 #endif
115 /* Bitflags used by final_scan_insn. */
116 #define SEEN_BB 1
117 #define SEEN_NOTE 2
118 #define SEEN_EMITTED 4
120 /* Last insn processed by final_scan_insn. */
121 static rtx debug_insn;
122 rtx current_output_insn;
124 /* Line number of last NOTE. */
125 static int last_linenum;
127 /* Last discriminator written to assembly. */
128 static int last_discriminator;
130 /* Discriminator of current block. */
131 static int discriminator;
133 /* Highest line number in current block. */
134 static int high_block_linenum;
136 /* Likewise for function. */
137 static int high_function_linenum;
139 /* Filename of last NOTE. */
140 static const char *last_filename;
142 /* Override filename and line number. */
143 static const char *override_filename;
144 static int override_linenum;
146 /* Whether to force emission of a line note before the next insn. */
147 static bool force_source_line = false;
149 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
151 /* Nonzero while outputting an `asm' with operands.
152 This means that inconsistencies are the user's fault, so don't die.
153 The precise value is the insn being output, to pass to error_for_asm. */
154 rtx this_is_asm_operands;
156 /* Number of operands of this insn, for an `asm' with operands. */
157 static unsigned int insn_noperands;
159 /* Compare optimization flag. */
161 static rtx last_ignored_compare = 0;
163 /* Assign a unique number to each insn that is output.
164 This can be used to generate unique local labels. */
166 static int insn_counter = 0;
168 #ifdef HAVE_cc0
169 /* This variable contains machine-dependent flags (defined in tm.h)
170 set and examined by output routines
171 that describe how to interpret the condition codes properly. */
173 CC_STATUS cc_status;
175 /* During output of an insn, this contains a copy of cc_status
176 from before the insn. */
178 CC_STATUS cc_prev_status;
179 #endif
181 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
183 static int block_depth;
185 /* Nonzero if have enabled APP processing of our assembler output. */
187 static int app_on;
189 /* If we are outputting an insn sequence, this contains the sequence rtx.
190 Zero otherwise. */
192 rtx final_sequence;
194 #ifdef ASSEMBLER_DIALECT
196 /* Number of the assembler dialect to use, starting at 0. */
197 static int dialect_number;
198 #endif
200 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
201 rtx current_insn_predicate;
203 /* True if printing into -fdump-final-insns= dump. */
204 bool final_insns_dump_p;
206 static int asm_insn_count (rtx);
207 static void profile_function (FILE *);
208 static void profile_after_prologue (FILE *);
209 static bool notice_source_line (rtx, bool *);
210 static rtx walk_alter_subreg (rtx *, bool *);
211 static void output_asm_name (void);
212 static void output_alternate_entry_point (FILE *, rtx);
213 static tree get_mem_expr_from_op (rtx, int *);
214 static void output_asm_operand_names (rtx *, int *, int);
215 #ifdef LEAF_REGISTERS
216 static void leaf_renumber_regs (rtx);
217 #endif
218 #ifdef HAVE_cc0
219 static int alter_cond (rtx);
220 #endif
221 #ifndef ADDR_VEC_ALIGN
222 static int final_addr_vec_align (rtx);
223 #endif
224 static int align_fuzz (rtx, rtx, int, unsigned);
226 /* Initialize data in final at the beginning of a compilation. */
228 void
229 init_final (const char *filename ATTRIBUTE_UNUSED)
231 app_on = 0;
232 final_sequence = 0;
234 #ifdef ASSEMBLER_DIALECT
235 dialect_number = ASSEMBLER_DIALECT;
236 #endif
239 /* Default target function prologue and epilogue assembler output.
241 If not overridden for epilogue code, then the function body itself
242 contains return instructions wherever needed. */
243 void
244 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
245 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
249 void
250 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
251 tree decl ATTRIBUTE_UNUSED,
252 bool new_is_cold ATTRIBUTE_UNUSED)
256 /* Default target hook that outputs nothing to a stream. */
257 void
258 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
262 /* Enable APP processing of subsequent output.
263 Used before the output from an `asm' statement. */
265 void
266 app_enable (void)
268 if (! app_on)
270 fputs (ASM_APP_ON, asm_out_file);
271 app_on = 1;
275 /* Disable APP processing of subsequent output.
276 Called from varasm.c before most kinds of output. */
278 void
279 app_disable (void)
281 if (app_on)
283 fputs (ASM_APP_OFF, asm_out_file);
284 app_on = 0;
288 /* Return the number of slots filled in the current
289 delayed branch sequence (we don't count the insn needing the
290 delay slot). Zero if not in a delayed branch sequence. */
292 #ifdef DELAY_SLOTS
294 dbr_sequence_length (void)
296 if (final_sequence != 0)
297 return XVECLEN (final_sequence, 0) - 1;
298 else
299 return 0;
301 #endif
303 /* The next two pages contain routines used to compute the length of an insn
304 and to shorten branches. */
306 /* Arrays for insn lengths, and addresses. The latter is referenced by
307 `insn_current_length'. */
309 static int *insn_lengths;
311 vec<int> insn_addresses_;
313 /* Max uid for which the above arrays are valid. */
314 static int insn_lengths_max_uid;
316 /* Address of insn being processed. Used by `insn_current_length'. */
317 int insn_current_address;
319 /* Address of insn being processed in previous iteration. */
320 int insn_last_address;
322 /* known invariant alignment of insn being processed. */
323 int insn_current_align;
325 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
326 gives the next following alignment insn that increases the known
327 alignment, or NULL_RTX if there is no such insn.
328 For any alignment obtained this way, we can again index uid_align with
329 its uid to obtain the next following align that in turn increases the
330 alignment, till we reach NULL_RTX; the sequence obtained this way
331 for each insn we'll call the alignment chain of this insn in the following
332 comments. */
334 struct label_alignment
336 short alignment;
337 short max_skip;
340 static rtx *uid_align;
341 static int *uid_shuid;
342 static struct label_alignment *label_align;
344 /* Indicate that branch shortening hasn't yet been done. */
346 void
347 init_insn_lengths (void)
349 if (uid_shuid)
351 free (uid_shuid);
352 uid_shuid = 0;
354 if (insn_lengths)
356 free (insn_lengths);
357 insn_lengths = 0;
358 insn_lengths_max_uid = 0;
360 if (HAVE_ATTR_length)
361 INSN_ADDRESSES_FREE ();
362 if (uid_align)
364 free (uid_align);
365 uid_align = 0;
369 /* Obtain the current length of an insn. If branch shortening has been done,
370 get its actual length. Otherwise, use FALLBACK_FN to calculate the
371 length. */
372 static inline int
373 get_attr_length_1 (rtx insn, int (*fallback_fn) (rtx))
375 rtx body;
376 int i;
377 int length = 0;
379 if (!HAVE_ATTR_length)
380 return 0;
382 if (insn_lengths_max_uid > INSN_UID (insn))
383 return insn_lengths[INSN_UID (insn)];
384 else
385 switch (GET_CODE (insn))
387 case NOTE:
388 case BARRIER:
389 case CODE_LABEL:
390 case DEBUG_INSN:
391 return 0;
393 case CALL_INSN:
394 length = fallback_fn (insn);
395 break;
397 case JUMP_INSN:
398 body = PATTERN (insn);
399 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
401 /* Alignment is machine-dependent and should be handled by
402 ADDR_VEC_ALIGN. */
404 else
405 length = fallback_fn (insn);
406 break;
408 case INSN:
409 body = PATTERN (insn);
410 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
411 return 0;
413 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
414 length = asm_insn_count (body) * fallback_fn (insn);
415 else if (GET_CODE (body) == SEQUENCE)
416 for (i = 0; i < XVECLEN (body, 0); i++)
417 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
418 else
419 length = fallback_fn (insn);
420 break;
422 default:
423 break;
426 #ifdef ADJUST_INSN_LENGTH
427 ADJUST_INSN_LENGTH (insn, length);
428 #endif
429 return length;
432 /* Obtain the current length of an insn. If branch shortening has been done,
433 get its actual length. Otherwise, get its maximum length. */
435 get_attr_length (rtx insn)
437 return get_attr_length_1 (insn, insn_default_length);
440 /* Obtain the current length of an insn. If branch shortening has been done,
441 get its actual length. Otherwise, get its minimum length. */
443 get_attr_min_length (rtx insn)
445 return get_attr_length_1 (insn, insn_min_length);
448 /* Code to handle alignment inside shorten_branches. */
450 /* Here is an explanation how the algorithm in align_fuzz can give
451 proper results:
453 Call a sequence of instructions beginning with alignment point X
454 and continuing until the next alignment point `block X'. When `X'
455 is used in an expression, it means the alignment value of the
456 alignment point.
458 Call the distance between the start of the first insn of block X, and
459 the end of the last insn of block X `IX', for the `inner size of X'.
460 This is clearly the sum of the instruction lengths.
462 Likewise with the next alignment-delimited block following X, which we
463 shall call block Y.
465 Call the distance between the start of the first insn of block X, and
466 the start of the first insn of block Y `OX', for the `outer size of X'.
468 The estimated padding is then OX - IX.
470 OX can be safely estimated as
472 if (X >= Y)
473 OX = round_up(IX, Y)
474 else
475 OX = round_up(IX, X) + Y - X
477 Clearly est(IX) >= real(IX), because that only depends on the
478 instruction lengths, and those being overestimated is a given.
480 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
481 we needn't worry about that when thinking about OX.
483 When X >= Y, the alignment provided by Y adds no uncertainty factor
484 for branch ranges starting before X, so we can just round what we have.
485 But when X < Y, we don't know anything about the, so to speak,
486 `middle bits', so we have to assume the worst when aligning up from an
487 address mod X to one mod Y, which is Y - X. */
489 #ifndef LABEL_ALIGN
490 #define LABEL_ALIGN(LABEL) align_labels_log
491 #endif
493 #ifndef LOOP_ALIGN
494 #define LOOP_ALIGN(LABEL) align_loops_log
495 #endif
497 #ifndef LABEL_ALIGN_AFTER_BARRIER
498 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
499 #endif
501 #ifndef JUMP_ALIGN
502 #define JUMP_ALIGN(LABEL) align_jumps_log
503 #endif
506 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
508 return 0;
512 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
514 return align_loops_max_skip;
518 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
520 return align_labels_max_skip;
524 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
526 return align_jumps_max_skip;
529 #ifndef ADDR_VEC_ALIGN
530 static int
531 final_addr_vec_align (rtx addr_vec)
533 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
535 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
536 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
537 return exact_log2 (align);
541 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
542 #endif
544 #ifndef INSN_LENGTH_ALIGNMENT
545 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
546 #endif
548 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
550 static int min_labelno, max_labelno;
552 #define LABEL_TO_ALIGNMENT(LABEL) \
553 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
555 #define LABEL_TO_MAX_SKIP(LABEL) \
556 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
558 /* For the benefit of port specific code do this also as a function. */
561 label_to_alignment (rtx label)
563 if (CODE_LABEL_NUMBER (label) <= max_labelno)
564 return LABEL_TO_ALIGNMENT (label);
565 return 0;
569 label_to_max_skip (rtx label)
571 if (CODE_LABEL_NUMBER (label) <= max_labelno)
572 return LABEL_TO_MAX_SKIP (label);
573 return 0;
576 /* The differences in addresses
577 between a branch and its target might grow or shrink depending on
578 the alignment the start insn of the range (the branch for a forward
579 branch or the label for a backward branch) starts out on; if these
580 differences are used naively, they can even oscillate infinitely.
581 We therefore want to compute a 'worst case' address difference that
582 is independent of the alignment the start insn of the range end
583 up on, and that is at least as large as the actual difference.
584 The function align_fuzz calculates the amount we have to add to the
585 naively computed difference, by traversing the part of the alignment
586 chain of the start insn of the range that is in front of the end insn
587 of the range, and considering for each alignment the maximum amount
588 that it might contribute to a size increase.
590 For casesi tables, we also want to know worst case minimum amounts of
591 address difference, in case a machine description wants to introduce
592 some common offset that is added to all offsets in a table.
593 For this purpose, align_fuzz with a growth argument of 0 computes the
594 appropriate adjustment. */
596 /* Compute the maximum delta by which the difference of the addresses of
597 START and END might grow / shrink due to a different address for start
598 which changes the size of alignment insns between START and END.
599 KNOWN_ALIGN_LOG is the alignment known for START.
600 GROWTH should be ~0 if the objective is to compute potential code size
601 increase, and 0 if the objective is to compute potential shrink.
602 The return value is undefined for any other value of GROWTH. */
604 static int
605 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
607 int uid = INSN_UID (start);
608 rtx align_label;
609 int known_align = 1 << known_align_log;
610 int end_shuid = INSN_SHUID (end);
611 int fuzz = 0;
613 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
615 int align_addr, new_align;
617 uid = INSN_UID (align_label);
618 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
619 if (uid_shuid[uid] > end_shuid)
620 break;
621 known_align_log = LABEL_TO_ALIGNMENT (align_label);
622 new_align = 1 << known_align_log;
623 if (new_align < known_align)
624 continue;
625 fuzz += (-align_addr ^ growth) & (new_align - known_align);
626 known_align = new_align;
628 return fuzz;
631 /* Compute a worst-case reference address of a branch so that it
632 can be safely used in the presence of aligned labels. Since the
633 size of the branch itself is unknown, the size of the branch is
634 not included in the range. I.e. for a forward branch, the reference
635 address is the end address of the branch as known from the previous
636 branch shortening pass, minus a value to account for possible size
637 increase due to alignment. For a backward branch, it is the start
638 address of the branch as known from the current pass, plus a value
639 to account for possible size increase due to alignment.
640 NB.: Therefore, the maximum offset allowed for backward branches needs
641 to exclude the branch size. */
644 insn_current_reference_address (rtx branch)
646 rtx dest, seq;
647 int seq_uid;
649 if (! INSN_ADDRESSES_SET_P ())
650 return 0;
652 seq = NEXT_INSN (PREV_INSN (branch));
653 seq_uid = INSN_UID (seq);
654 if (!JUMP_P (branch))
655 /* This can happen for example on the PA; the objective is to know the
656 offset to address something in front of the start of the function.
657 Thus, we can treat it like a backward branch.
658 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
659 any alignment we'd encounter, so we skip the call to align_fuzz. */
660 return insn_current_address;
661 dest = JUMP_LABEL (branch);
663 /* BRANCH has no proper alignment chain set, so use SEQ.
664 BRANCH also has no INSN_SHUID. */
665 if (INSN_SHUID (seq) < INSN_SHUID (dest))
667 /* Forward branch. */
668 return (insn_last_address + insn_lengths[seq_uid]
669 - align_fuzz (seq, dest, length_unit_log, ~0));
671 else
673 /* Backward branch. */
674 return (insn_current_address
675 + align_fuzz (dest, seq, length_unit_log, ~0));
679 /* Compute branch alignments based on frequency information in the
680 CFG. */
682 unsigned int
683 compute_alignments (void)
685 int log, max_skip, max_log;
686 basic_block bb;
687 int freq_max = 0;
688 int freq_threshold = 0;
690 if (label_align)
692 free (label_align);
693 label_align = 0;
696 max_labelno = max_label_num ();
697 min_labelno = get_first_label_num ();
698 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
700 /* If not optimizing or optimizing for size, don't assign any alignments. */
701 if (! optimize || optimize_function_for_size_p (cfun))
702 return 0;
704 if (dump_file)
706 dump_reg_info (dump_file);
707 dump_flow_info (dump_file, TDF_DETAILS);
708 flow_loops_dump (dump_file, NULL, 1);
710 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
711 FOR_EACH_BB (bb)
712 if (bb->frequency > freq_max)
713 freq_max = bb->frequency;
714 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
716 if (dump_file)
717 fprintf(dump_file, "freq_max: %i\n",freq_max);
718 FOR_EACH_BB (bb)
720 rtx label = BB_HEAD (bb);
721 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
722 edge e;
723 edge_iterator ei;
725 if (!LABEL_P (label)
726 || optimize_bb_for_size_p (bb))
728 if (dump_file)
729 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
730 bb->index, bb->frequency, bb->loop_father->num,
731 bb_loop_depth (bb));
732 continue;
734 max_log = LABEL_ALIGN (label);
735 max_skip = targetm.asm_out.label_align_max_skip (label);
737 FOR_EACH_EDGE (e, ei, bb->preds)
739 if (e->flags & EDGE_FALLTHRU)
740 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
741 else
742 branch_frequency += EDGE_FREQUENCY (e);
744 if (dump_file)
746 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
747 bb->index, bb->frequency, bb->loop_father->num,
748 bb_loop_depth (bb),
749 fallthru_frequency, branch_frequency);
750 if (!bb->loop_father->inner && bb->loop_father->num)
751 fprintf (dump_file, " inner_loop");
752 if (bb->loop_father->header == bb)
753 fprintf (dump_file, " loop_header");
754 fprintf (dump_file, "\n");
757 /* There are two purposes to align block with no fallthru incoming edge:
758 1) to avoid fetch stalls when branch destination is near cache boundary
759 2) to improve cache efficiency in case the previous block is not executed
760 (so it does not need to be in the cache).
762 We to catch first case, we align frequently executed blocks.
763 To catch the second, we align blocks that are executed more frequently
764 than the predecessor and the predecessor is likely to not be executed
765 when function is called. */
767 if (!has_fallthru
768 && (branch_frequency > freq_threshold
769 || (bb->frequency > bb->prev_bb->frequency * 10
770 && (bb->prev_bb->frequency
771 <= ENTRY_BLOCK_PTR->frequency / 2))))
773 log = JUMP_ALIGN (label);
774 if (dump_file)
775 fprintf(dump_file, " jump alignment added.\n");
776 if (max_log < log)
778 max_log = log;
779 max_skip = targetm.asm_out.jump_align_max_skip (label);
782 /* In case block is frequent and reached mostly by non-fallthru edge,
783 align it. It is most likely a first block of loop. */
784 if (has_fallthru
785 && optimize_bb_for_speed_p (bb)
786 && branch_frequency + fallthru_frequency > freq_threshold
787 && (branch_frequency
788 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
790 log = LOOP_ALIGN (label);
791 if (dump_file)
792 fprintf(dump_file, " internal loop alignment added.\n");
793 if (max_log < log)
795 max_log = log;
796 max_skip = targetm.asm_out.loop_align_max_skip (label);
799 LABEL_TO_ALIGNMENT (label) = max_log;
800 LABEL_TO_MAX_SKIP (label) = max_skip;
803 loop_optimizer_finalize ();
804 free_dominance_info (CDI_DOMINATORS);
805 return 0;
808 struct rtl_opt_pass pass_compute_alignments =
811 RTL_PASS,
812 "alignments", /* name */
813 OPTGROUP_NONE, /* optinfo_flags */
814 NULL, /* gate */
815 compute_alignments, /* execute */
816 NULL, /* sub */
817 NULL, /* next */
818 0, /* static_pass_number */
819 TV_NONE, /* tv_id */
820 0, /* properties_required */
821 0, /* properties_provided */
822 0, /* properties_destroyed */
823 0, /* todo_flags_start */
824 TODO_verify_rtl_sharing
825 | TODO_ggc_collect /* todo_flags_finish */
830 /* Make a pass over all insns and compute their actual lengths by shortening
831 any branches of variable length if possible. */
833 /* shorten_branches might be called multiple times: for example, the SH
834 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
835 In order to do this, it needs proper length information, which it obtains
836 by calling shorten_branches. This cannot be collapsed with
837 shorten_branches itself into a single pass unless we also want to integrate
838 reorg.c, since the branch splitting exposes new instructions with delay
839 slots. */
841 void
842 shorten_branches (rtx first)
844 rtx insn;
845 int max_uid;
846 int i;
847 int max_log;
848 int max_skip;
849 #define MAX_CODE_ALIGN 16
850 rtx seq;
851 int something_changed = 1;
852 char *varying_length;
853 rtx body;
854 int uid;
855 rtx align_tab[MAX_CODE_ALIGN];
857 /* Compute maximum UID and allocate label_align / uid_shuid. */
858 max_uid = get_max_uid ();
860 /* Free uid_shuid before reallocating it. */
861 free (uid_shuid);
863 uid_shuid = XNEWVEC (int, max_uid);
865 if (max_labelno != max_label_num ())
867 int old = max_labelno;
868 int n_labels;
869 int n_old_labels;
871 max_labelno = max_label_num ();
873 n_labels = max_labelno - min_labelno + 1;
874 n_old_labels = old - min_labelno + 1;
876 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
878 /* Range of labels grows monotonically in the function. Failing here
879 means that the initialization of array got lost. */
880 gcc_assert (n_old_labels <= n_labels);
882 memset (label_align + n_old_labels, 0,
883 (n_labels - n_old_labels) * sizeof (struct label_alignment));
886 /* Initialize label_align and set up uid_shuid to be strictly
887 monotonically rising with insn order. */
888 /* We use max_log here to keep track of the maximum alignment we want to
889 impose on the next CODE_LABEL (or the current one if we are processing
890 the CODE_LABEL itself). */
892 max_log = 0;
893 max_skip = 0;
895 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
897 int log;
899 INSN_SHUID (insn) = i++;
900 if (INSN_P (insn))
901 continue;
903 if (LABEL_P (insn))
905 rtx next;
906 bool next_is_jumptable;
908 /* Merge in alignments computed by compute_alignments. */
909 log = LABEL_TO_ALIGNMENT (insn);
910 if (max_log < log)
912 max_log = log;
913 max_skip = LABEL_TO_MAX_SKIP (insn);
916 next = next_nonnote_insn (insn);
917 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
918 if (!next_is_jumptable)
920 log = LABEL_ALIGN (insn);
921 if (max_log < log)
923 max_log = log;
924 max_skip = targetm.asm_out.label_align_max_skip (insn);
927 /* ADDR_VECs only take room if read-only data goes into the text
928 section. */
929 if ((JUMP_TABLES_IN_TEXT_SECTION
930 || readonly_data_section == text_section)
931 && next_is_jumptable)
933 log = ADDR_VEC_ALIGN (next);
934 if (max_log < log)
936 max_log = log;
937 max_skip = targetm.asm_out.label_align_max_skip (insn);
940 LABEL_TO_ALIGNMENT (insn) = max_log;
941 LABEL_TO_MAX_SKIP (insn) = max_skip;
942 max_log = 0;
943 max_skip = 0;
945 else if (BARRIER_P (insn))
947 rtx label;
949 for (label = insn; label && ! INSN_P (label);
950 label = NEXT_INSN (label))
951 if (LABEL_P (label))
953 log = LABEL_ALIGN_AFTER_BARRIER (insn);
954 if (max_log < log)
956 max_log = log;
957 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
959 break;
963 if (!HAVE_ATTR_length)
964 return;
966 /* Allocate the rest of the arrays. */
967 insn_lengths = XNEWVEC (int, max_uid);
968 insn_lengths_max_uid = max_uid;
969 /* Syntax errors can lead to labels being outside of the main insn stream.
970 Initialize insn_addresses, so that we get reproducible results. */
971 INSN_ADDRESSES_ALLOC (max_uid);
973 varying_length = XCNEWVEC (char, max_uid);
975 /* Initialize uid_align. We scan instructions
976 from end to start, and keep in align_tab[n] the last seen insn
977 that does an alignment of at least n+1, i.e. the successor
978 in the alignment chain for an insn that does / has a known
979 alignment of n. */
980 uid_align = XCNEWVEC (rtx, max_uid);
982 for (i = MAX_CODE_ALIGN; --i >= 0;)
983 align_tab[i] = NULL_RTX;
984 seq = get_last_insn ();
985 for (; seq; seq = PREV_INSN (seq))
987 int uid = INSN_UID (seq);
988 int log;
989 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
990 uid_align[uid] = align_tab[0];
991 if (log)
993 /* Found an alignment label. */
994 uid_align[uid] = align_tab[log];
995 for (i = log - 1; i >= 0; i--)
996 align_tab[i] = seq;
1000 /* When optimizing, we start assuming minimum length, and keep increasing
1001 lengths as we find the need for this, till nothing changes.
1002 When not optimizing, we start assuming maximum lengths, and
1003 do a single pass to update the lengths. */
1004 bool increasing = optimize != 0;
1006 #ifdef CASE_VECTOR_SHORTEN_MODE
1007 if (optimize)
1009 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1010 label fields. */
1012 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1013 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1014 int rel;
1016 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1018 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1019 int len, i, min, max, insn_shuid;
1020 int min_align;
1021 addr_diff_vec_flags flags;
1023 if (!JUMP_P (insn)
1024 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1025 continue;
1026 pat = PATTERN (insn);
1027 len = XVECLEN (pat, 1);
1028 gcc_assert (len > 0);
1029 min_align = MAX_CODE_ALIGN;
1030 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1032 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1033 int shuid = INSN_SHUID (lab);
1034 if (shuid < min)
1036 min = shuid;
1037 min_lab = lab;
1039 if (shuid > max)
1041 max = shuid;
1042 max_lab = lab;
1044 if (min_align > LABEL_TO_ALIGNMENT (lab))
1045 min_align = LABEL_TO_ALIGNMENT (lab);
1047 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1048 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1049 insn_shuid = INSN_SHUID (insn);
1050 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1051 memset (&flags, 0, sizeof (flags));
1052 flags.min_align = min_align;
1053 flags.base_after_vec = rel > insn_shuid;
1054 flags.min_after_vec = min > insn_shuid;
1055 flags.max_after_vec = max > insn_shuid;
1056 flags.min_after_base = min > rel;
1057 flags.max_after_base = max > rel;
1058 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1060 if (increasing)
1061 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1064 #endif /* CASE_VECTOR_SHORTEN_MODE */
1066 /* Compute initial lengths, addresses, and varying flags for each insn. */
1067 int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
1069 for (insn_current_address = 0, insn = first;
1070 insn != 0;
1071 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1073 uid = INSN_UID (insn);
1075 insn_lengths[uid] = 0;
1077 if (LABEL_P (insn))
1079 int log = LABEL_TO_ALIGNMENT (insn);
1080 if (log)
1082 int align = 1 << log;
1083 int new_address = (insn_current_address + align - 1) & -align;
1084 insn_lengths[uid] = new_address - insn_current_address;
1088 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1090 if (NOTE_P (insn) || BARRIER_P (insn)
1091 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1092 continue;
1093 if (INSN_DELETED_P (insn))
1094 continue;
1096 body = PATTERN (insn);
1097 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1099 /* This only takes room if read-only data goes into the text
1100 section. */
1101 if (JUMP_TABLES_IN_TEXT_SECTION
1102 || readonly_data_section == text_section)
1103 insn_lengths[uid] = (XVECLEN (body,
1104 GET_CODE (body) == ADDR_DIFF_VEC)
1105 * GET_MODE_SIZE (GET_MODE (body)));
1106 /* Alignment is handled by ADDR_VEC_ALIGN. */
1108 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1109 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1110 else if (GET_CODE (body) == SEQUENCE)
1112 int i;
1113 int const_delay_slots;
1114 #ifdef DELAY_SLOTS
1115 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1116 #else
1117 const_delay_slots = 0;
1118 #endif
1119 int (*inner_length_fun) (rtx)
1120 = const_delay_slots ? length_fun : insn_default_length;
1121 /* Inside a delay slot sequence, we do not do any branch shortening
1122 if the shortening could change the number of delay slots
1123 of the branch. */
1124 for (i = 0; i < XVECLEN (body, 0); i++)
1126 rtx inner_insn = XVECEXP (body, 0, i);
1127 int inner_uid = INSN_UID (inner_insn);
1128 int inner_length;
1130 if (GET_CODE (body) == ASM_INPUT
1131 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1132 inner_length = (asm_insn_count (PATTERN (inner_insn))
1133 * insn_default_length (inner_insn));
1134 else
1135 inner_length = inner_length_fun (inner_insn);
1137 insn_lengths[inner_uid] = inner_length;
1138 if (const_delay_slots)
1140 if ((varying_length[inner_uid]
1141 = insn_variable_length_p (inner_insn)) != 0)
1142 varying_length[uid] = 1;
1143 INSN_ADDRESSES (inner_uid) = (insn_current_address
1144 + insn_lengths[uid]);
1146 else
1147 varying_length[inner_uid] = 0;
1148 insn_lengths[uid] += inner_length;
1151 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1153 insn_lengths[uid] = length_fun (insn);
1154 varying_length[uid] = insn_variable_length_p (insn);
1157 /* If needed, do any adjustment. */
1158 #ifdef ADJUST_INSN_LENGTH
1159 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1160 if (insn_lengths[uid] < 0)
1161 fatal_insn ("negative insn length", insn);
1162 #endif
1165 /* Now loop over all the insns finding varying length insns. For each,
1166 get the current insn length. If it has changed, reflect the change.
1167 When nothing changes for a full pass, we are done. */
1169 while (something_changed)
1171 something_changed = 0;
1172 insn_current_align = MAX_CODE_ALIGN - 1;
1173 for (insn_current_address = 0, insn = first;
1174 insn != 0;
1175 insn = NEXT_INSN (insn))
1177 int new_length;
1178 #ifdef ADJUST_INSN_LENGTH
1179 int tmp_length;
1180 #endif
1181 int length_align;
1183 uid = INSN_UID (insn);
1185 if (LABEL_P (insn))
1187 int log = LABEL_TO_ALIGNMENT (insn);
1188 if (log > insn_current_align)
1190 int align = 1 << log;
1191 int new_address= (insn_current_address + align - 1) & -align;
1192 insn_lengths[uid] = new_address - insn_current_address;
1193 insn_current_align = log;
1194 insn_current_address = new_address;
1196 else
1197 insn_lengths[uid] = 0;
1198 INSN_ADDRESSES (uid) = insn_current_address;
1199 continue;
1202 length_align = INSN_LENGTH_ALIGNMENT (insn);
1203 if (length_align < insn_current_align)
1204 insn_current_align = length_align;
1206 insn_last_address = INSN_ADDRESSES (uid);
1207 INSN_ADDRESSES (uid) = insn_current_address;
1209 #ifdef CASE_VECTOR_SHORTEN_MODE
1210 if (optimize && JUMP_P (insn)
1211 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1213 rtx body = PATTERN (insn);
1214 int old_length = insn_lengths[uid];
1215 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1216 rtx min_lab = XEXP (XEXP (body, 2), 0);
1217 rtx max_lab = XEXP (XEXP (body, 3), 0);
1218 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1219 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1220 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1221 rtx prev;
1222 int rel_align = 0;
1223 addr_diff_vec_flags flags;
1224 enum machine_mode vec_mode;
1226 /* Avoid automatic aggregate initialization. */
1227 flags = ADDR_DIFF_VEC_FLAGS (body);
1229 /* Try to find a known alignment for rel_lab. */
1230 for (prev = rel_lab;
1231 prev
1232 && ! insn_lengths[INSN_UID (prev)]
1233 && ! (varying_length[INSN_UID (prev)] & 1);
1234 prev = PREV_INSN (prev))
1235 if (varying_length[INSN_UID (prev)] & 2)
1237 rel_align = LABEL_TO_ALIGNMENT (prev);
1238 break;
1241 /* See the comment on addr_diff_vec_flags in rtl.h for the
1242 meaning of the flags values. base: REL_LAB vec: INSN */
1243 /* Anything after INSN has still addresses from the last
1244 pass; adjust these so that they reflect our current
1245 estimate for this pass. */
1246 if (flags.base_after_vec)
1247 rel_addr += insn_current_address - insn_last_address;
1248 if (flags.min_after_vec)
1249 min_addr += insn_current_address - insn_last_address;
1250 if (flags.max_after_vec)
1251 max_addr += insn_current_address - insn_last_address;
1252 /* We want to know the worst case, i.e. lowest possible value
1253 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1254 its offset is positive, and we have to be wary of code shrink;
1255 otherwise, it is negative, and we have to be vary of code
1256 size increase. */
1257 if (flags.min_after_base)
1259 /* If INSN is between REL_LAB and MIN_LAB, the size
1260 changes we are about to make can change the alignment
1261 within the observed offset, therefore we have to break
1262 it up into two parts that are independent. */
1263 if (! flags.base_after_vec && flags.min_after_vec)
1265 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1266 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1268 else
1269 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1271 else
1273 if (flags.base_after_vec && ! flags.min_after_vec)
1275 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1276 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1278 else
1279 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1281 /* Likewise, determine the highest lowest possible value
1282 for the offset of MAX_LAB. */
1283 if (flags.max_after_base)
1285 if (! flags.base_after_vec && flags.max_after_vec)
1287 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1288 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1290 else
1291 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1293 else
1295 if (flags.base_after_vec && ! flags.max_after_vec)
1297 max_addr += align_fuzz (max_lab, insn, 0, 0);
1298 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1300 else
1301 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1303 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1304 max_addr - rel_addr, body);
1305 if (!increasing
1306 || (GET_MODE_SIZE (vec_mode)
1307 >= GET_MODE_SIZE (GET_MODE (body))))
1308 PUT_MODE (body, vec_mode);
1309 if (JUMP_TABLES_IN_TEXT_SECTION
1310 || readonly_data_section == text_section)
1312 insn_lengths[uid]
1313 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1314 insn_current_address += insn_lengths[uid];
1315 if (insn_lengths[uid] != old_length)
1316 something_changed = 1;
1319 continue;
1321 #endif /* CASE_VECTOR_SHORTEN_MODE */
1323 if (! (varying_length[uid]))
1325 if (NONJUMP_INSN_P (insn)
1326 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1328 int i;
1330 body = PATTERN (insn);
1331 for (i = 0; i < XVECLEN (body, 0); i++)
1333 rtx inner_insn = XVECEXP (body, 0, i);
1334 int inner_uid = INSN_UID (inner_insn);
1336 INSN_ADDRESSES (inner_uid) = insn_current_address;
1338 insn_current_address += insn_lengths[inner_uid];
1341 else
1342 insn_current_address += insn_lengths[uid];
1344 continue;
1347 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1349 int i;
1351 body = PATTERN (insn);
1352 new_length = 0;
1353 for (i = 0; i < XVECLEN (body, 0); i++)
1355 rtx inner_insn = XVECEXP (body, 0, i);
1356 int inner_uid = INSN_UID (inner_insn);
1357 int inner_length;
1359 INSN_ADDRESSES (inner_uid) = insn_current_address;
1361 /* insn_current_length returns 0 for insns with a
1362 non-varying length. */
1363 if (! varying_length[inner_uid])
1364 inner_length = insn_lengths[inner_uid];
1365 else
1366 inner_length = insn_current_length (inner_insn);
1368 if (inner_length != insn_lengths[inner_uid])
1370 if (!increasing || inner_length > insn_lengths[inner_uid])
1372 insn_lengths[inner_uid] = inner_length;
1373 something_changed = 1;
1375 else
1376 inner_length = insn_lengths[inner_uid];
1378 insn_current_address += inner_length;
1379 new_length += inner_length;
1382 else
1384 new_length = insn_current_length (insn);
1385 insn_current_address += new_length;
1388 #ifdef ADJUST_INSN_LENGTH
1389 /* If needed, do any adjustment. */
1390 tmp_length = new_length;
1391 ADJUST_INSN_LENGTH (insn, new_length);
1392 insn_current_address += (new_length - tmp_length);
1393 #endif
1395 if (new_length != insn_lengths[uid]
1396 && (!increasing || new_length > insn_lengths[uid]))
1398 insn_lengths[uid] = new_length;
1399 something_changed = 1;
1401 else
1402 insn_current_address += insn_lengths[uid] - new_length;
1404 /* For a non-optimizing compile, do only a single pass. */
1405 if (!increasing)
1406 break;
1409 free (varying_length);
1412 /* Given the body of an INSN known to be generated by an ASM statement, return
1413 the number of machine instructions likely to be generated for this insn.
1414 This is used to compute its length. */
1416 static int
1417 asm_insn_count (rtx body)
1419 const char *templ;
1421 if (GET_CODE (body) == ASM_INPUT)
1422 templ = XSTR (body, 0);
1423 else
1424 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1426 return asm_str_count (templ);
1429 /* Return the number of machine instructions likely to be generated for the
1430 inline-asm template. */
1432 asm_str_count (const char *templ)
1434 int count = 1;
1436 if (!*templ)
1437 return 0;
1439 for (; *templ; templ++)
1440 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1441 || *templ == '\n')
1442 count++;
1444 return count;
1447 /* ??? This is probably the wrong place for these. */
1448 /* Structure recording the mapping from source file and directory
1449 names at compile time to those to be embedded in debug
1450 information. */
1451 typedef struct debug_prefix_map
1453 const char *old_prefix;
1454 const char *new_prefix;
1455 size_t old_len;
1456 size_t new_len;
1457 struct debug_prefix_map *next;
1458 } debug_prefix_map;
1460 /* Linked list of such structures. */
1461 debug_prefix_map *debug_prefix_maps;
1464 /* Record a debug file prefix mapping. ARG is the argument to
1465 -fdebug-prefix-map and must be of the form OLD=NEW. */
1467 void
1468 add_debug_prefix_map (const char *arg)
1470 debug_prefix_map *map;
1471 const char *p;
1473 p = strchr (arg, '=');
1474 if (!p)
1476 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1477 return;
1479 map = XNEW (debug_prefix_map);
1480 map->old_prefix = xstrndup (arg, p - arg);
1481 map->old_len = p - arg;
1482 p++;
1483 map->new_prefix = xstrdup (p);
1484 map->new_len = strlen (p);
1485 map->next = debug_prefix_maps;
1486 debug_prefix_maps = map;
1489 /* Perform user-specified mapping of debug filename prefixes. Return
1490 the new name corresponding to FILENAME. */
1492 const char *
1493 remap_debug_filename (const char *filename)
1495 debug_prefix_map *map;
1496 char *s;
1497 const char *name;
1498 size_t name_len;
1500 for (map = debug_prefix_maps; map; map = map->next)
1501 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1502 break;
1503 if (!map)
1504 return filename;
1505 name = filename + map->old_len;
1506 name_len = strlen (name) + 1;
1507 s = (char *) alloca (name_len + map->new_len);
1508 memcpy (s, map->new_prefix, map->new_len);
1509 memcpy (s + map->new_len, name, name_len);
1510 return ggc_strdup (s);
1513 /* Return true if DWARF2 debug info can be emitted for DECL. */
1515 static bool
1516 dwarf2_debug_info_emitted_p (tree decl)
1518 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1519 return false;
1521 if (DECL_IGNORED_P (decl))
1522 return false;
1524 return true;
1527 /* Return scope resulting from combination of S1 and S2. */
1528 static tree
1529 choose_inner_scope (tree s1, tree s2)
1531 if (!s1)
1532 return s2;
1533 if (!s2)
1534 return s1;
1535 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1536 return s1;
1537 return s2;
1540 /* Emit lexical block notes needed to change scope from S1 to S2. */
1542 static void
1543 change_scope (rtx orig_insn, tree s1, tree s2)
1545 rtx insn = orig_insn;
1546 tree com = NULL_TREE;
1547 tree ts1 = s1, ts2 = s2;
1548 tree s;
1550 while (ts1 != ts2)
1552 gcc_assert (ts1 && ts2);
1553 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1554 ts1 = BLOCK_SUPERCONTEXT (ts1);
1555 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1556 ts2 = BLOCK_SUPERCONTEXT (ts2);
1557 else
1559 ts1 = BLOCK_SUPERCONTEXT (ts1);
1560 ts2 = BLOCK_SUPERCONTEXT (ts2);
1563 com = ts1;
1565 /* Close scopes. */
1566 s = s1;
1567 while (s != com)
1569 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1570 NOTE_BLOCK (note) = s;
1571 s = BLOCK_SUPERCONTEXT (s);
1574 /* Open scopes. */
1575 s = s2;
1576 while (s != com)
1578 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1579 NOTE_BLOCK (insn) = s;
1580 s = BLOCK_SUPERCONTEXT (s);
1584 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1585 on the scope tree and the newly reordered instructions. */
1587 static void
1588 reemit_insn_block_notes (void)
1590 tree cur_block = DECL_INITIAL (cfun->decl);
1591 rtx insn, note;
1593 insn = get_insns ();
1594 if (!active_insn_p (insn))
1595 insn = next_active_insn (insn);
1596 for (; insn; insn = next_active_insn (insn))
1598 tree this_block;
1600 /* Avoid putting scope notes between jump table and its label. */
1601 if (JUMP_TABLE_DATA_P (insn))
1602 continue;
1604 this_block = insn_scope (insn);
1605 /* For sequences compute scope resulting from merging all scopes
1606 of instructions nested inside. */
1607 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1609 int i;
1610 rtx body = PATTERN (insn);
1612 this_block = NULL;
1613 for (i = 0; i < XVECLEN (body, 0); i++)
1614 this_block = choose_inner_scope (this_block,
1615 insn_scope (XVECEXP (body, 0, i)));
1617 if (! this_block)
1619 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1620 continue;
1621 else
1622 this_block = DECL_INITIAL (cfun->decl);
1625 if (this_block != cur_block)
1627 change_scope (insn, cur_block, this_block);
1628 cur_block = this_block;
1632 /* change_scope emits before the insn, not after. */
1633 note = emit_note (NOTE_INSN_DELETED);
1634 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1635 delete_insn (note);
1637 reorder_blocks ();
1640 /* Output assembler code for the start of a function,
1641 and initialize some of the variables in this file
1642 for the new function. The label for the function and associated
1643 assembler pseudo-ops have already been output in `assemble_start_function'.
1645 FIRST is the first insn of the rtl for the function being compiled.
1646 FILE is the file to write assembler code to.
1647 OPTIMIZE_P is nonzero if we should eliminate redundant
1648 test and compare insns. */
1650 void
1651 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1652 int optimize_p ATTRIBUTE_UNUSED)
1654 block_depth = 0;
1656 this_is_asm_operands = 0;
1658 last_filename = LOCATION_FILE (prologue_location);
1659 last_linenum = LOCATION_LINE (prologue_location);
1660 last_discriminator = discriminator = 0;
1662 high_block_linenum = high_function_linenum = last_linenum;
1664 if (!DECL_IGNORED_P (current_function_decl))
1665 debug_hooks->begin_prologue (last_linenum, last_filename);
1667 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1668 dwarf2out_begin_prologue (0, NULL);
1670 #ifdef LEAF_REG_REMAP
1671 if (crtl->uses_only_leaf_regs)
1672 leaf_renumber_regs (first);
1673 #endif
1675 /* The Sun386i and perhaps other machines don't work right
1676 if the profiling code comes after the prologue. */
1677 if (targetm.profile_before_prologue () && crtl->profile)
1678 profile_function (file);
1680 /* If debugging, assign block numbers to all of the blocks in this
1681 function. */
1682 if (write_symbols)
1684 reemit_insn_block_notes ();
1685 number_blocks (current_function_decl);
1686 /* We never actually put out begin/end notes for the top-level
1687 block in the function. But, conceptually, that block is
1688 always needed. */
1689 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1692 if (warn_frame_larger_than
1693 && get_frame_size () > frame_larger_than_size)
1695 /* Issue a warning */
1696 warning (OPT_Wframe_larger_than_,
1697 "the frame size of %wd bytes is larger than %wd bytes",
1698 get_frame_size (), frame_larger_than_size);
1701 /* First output the function prologue: code to set up the stack frame. */
1702 targetm.asm_out.function_prologue (file, get_frame_size ());
1704 /* If the machine represents the prologue as RTL, the profiling code must
1705 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1706 #ifdef HAVE_prologue
1707 if (! HAVE_prologue)
1708 #endif
1709 profile_after_prologue (file);
1712 static void
1713 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1715 if (!targetm.profile_before_prologue () && crtl->profile)
1716 profile_function (file);
1719 static void
1720 profile_function (FILE *file ATTRIBUTE_UNUSED)
1722 #ifndef NO_PROFILE_COUNTERS
1723 # define NO_PROFILE_COUNTERS 0
1724 #endif
1725 #ifdef ASM_OUTPUT_REG_PUSH
1726 rtx sval = NULL, chain = NULL;
1728 if (cfun->returns_struct)
1729 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1730 true);
1731 if (cfun->static_chain_decl)
1732 chain = targetm.calls.static_chain (current_function_decl, true);
1733 #endif /* ASM_OUTPUT_REG_PUSH */
1735 if (! NO_PROFILE_COUNTERS)
1737 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1738 switch_to_section (data_section);
1739 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1740 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1741 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1744 switch_to_section (current_function_section ());
1746 #ifdef ASM_OUTPUT_REG_PUSH
1747 if (sval && REG_P (sval))
1748 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1749 if (chain && REG_P (chain))
1750 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1751 #endif
1753 FUNCTION_PROFILER (file, current_function_funcdef_no);
1755 #ifdef ASM_OUTPUT_REG_PUSH
1756 if (chain && REG_P (chain))
1757 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1758 if (sval && REG_P (sval))
1759 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1760 #endif
1763 /* Output assembler code for the end of a function.
1764 For clarity, args are same as those of `final_start_function'
1765 even though not all of them are needed. */
1767 void
1768 final_end_function (void)
1770 app_disable ();
1772 if (!DECL_IGNORED_P (current_function_decl))
1773 debug_hooks->end_function (high_function_linenum);
1775 /* Finally, output the function epilogue:
1776 code to restore the stack frame and return to the caller. */
1777 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1779 /* And debug output. */
1780 if (!DECL_IGNORED_P (current_function_decl))
1781 debug_hooks->end_epilogue (last_linenum, last_filename);
1783 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1784 && dwarf2out_do_frame ())
1785 dwarf2out_end_epilogue (last_linenum, last_filename);
1789 /* Dumper helper for basic block information. FILE is the assembly
1790 output file, and INSN is the instruction being emitted. */
1792 static void
1793 dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
1794 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1796 basic_block bb;
1798 if (!flag_debug_asm)
1799 return;
1801 if (INSN_UID (insn) < bb_map_size
1802 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1804 edge e;
1805 edge_iterator ei;
1807 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1808 if (bb->frequency)
1809 fprintf (file, " freq:%d", bb->frequency);
1810 if (bb->count)
1811 fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
1812 bb->count);
1813 fprintf (file, " seq:%d", (*bb_seqn)++);
1814 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1815 FOR_EACH_EDGE (e, ei, bb->preds)
1817 dump_edge_info (file, e, TDF_DETAILS, 0);
1819 fprintf (file, "\n");
1821 if (INSN_UID (insn) < bb_map_size
1822 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1824 edge e;
1825 edge_iterator ei;
1827 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1828 FOR_EACH_EDGE (e, ei, bb->succs)
1830 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1832 fprintf (file, "\n");
1836 /* Output assembler code for some insns: all or part of a function.
1837 For description of args, see `final_start_function', above. */
1839 void
1840 final (rtx first, FILE *file, int optimize_p)
1842 rtx insn, next;
1843 int seen = 0;
1845 /* Used for -dA dump. */
1846 basic_block *start_to_bb = NULL;
1847 basic_block *end_to_bb = NULL;
1848 int bb_map_size = 0;
1849 int bb_seqn = 0;
1851 last_ignored_compare = 0;
1853 #ifdef HAVE_cc0
1854 for (insn = first; insn; insn = NEXT_INSN (insn))
1856 /* If CC tracking across branches is enabled, record the insn which
1857 jumps to each branch only reached from one place. */
1858 if (optimize_p && JUMP_P (insn))
1860 rtx lab = JUMP_LABEL (insn);
1861 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
1863 LABEL_REFS (lab) = insn;
1867 #endif
1869 init_recog ();
1871 CC_STATUS_INIT;
1873 if (flag_debug_asm)
1875 basic_block bb;
1877 bb_map_size = get_max_uid () + 1;
1878 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1879 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1881 /* There is no cfg for a thunk. */
1882 if (!cfun->is_thunk)
1883 FOR_EACH_BB_REVERSE (bb)
1885 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1886 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1890 /* Output the insns. */
1891 for (insn = first; insn;)
1893 if (HAVE_ATTR_length)
1895 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1897 /* This can be triggered by bugs elsewhere in the compiler if
1898 new insns are created after init_insn_lengths is called. */
1899 gcc_assert (NOTE_P (insn));
1900 insn_current_address = -1;
1902 else
1903 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1906 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1907 bb_map_size, &bb_seqn);
1908 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1911 if (flag_debug_asm)
1913 free (start_to_bb);
1914 free (end_to_bb);
1917 /* Remove CFI notes, to avoid compare-debug failures. */
1918 for (insn = first; insn; insn = next)
1920 next = NEXT_INSN (insn);
1921 if (NOTE_P (insn)
1922 && (NOTE_KIND (insn) == NOTE_INSN_CFI
1923 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
1924 delete_insn (insn);
1928 const char *
1929 get_insn_template (int code, rtx insn)
1931 switch (insn_data[code].output_format)
1933 case INSN_OUTPUT_FORMAT_SINGLE:
1934 return insn_data[code].output.single;
1935 case INSN_OUTPUT_FORMAT_MULTI:
1936 return insn_data[code].output.multi[which_alternative];
1937 case INSN_OUTPUT_FORMAT_FUNCTION:
1938 gcc_assert (insn);
1939 return (*insn_data[code].output.function) (recog_data.operand, insn);
1941 default:
1942 gcc_unreachable ();
1946 /* Emit the appropriate declaration for an alternate-entry-point
1947 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1948 LABEL_KIND != LABEL_NORMAL.
1950 The case fall-through in this function is intentional. */
1951 static void
1952 output_alternate_entry_point (FILE *file, rtx insn)
1954 const char *name = LABEL_NAME (insn);
1956 switch (LABEL_KIND (insn))
1958 case LABEL_WEAK_ENTRY:
1959 #ifdef ASM_WEAKEN_LABEL
1960 ASM_WEAKEN_LABEL (file, name);
1961 #endif
1962 case LABEL_GLOBAL_ENTRY:
1963 targetm.asm_out.globalize_label (file, name);
1964 case LABEL_STATIC_ENTRY:
1965 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1966 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1967 #endif
1968 ASM_OUTPUT_LABEL (file, name);
1969 break;
1971 case LABEL_NORMAL:
1972 default:
1973 gcc_unreachable ();
1977 /* Given a CALL_INSN, find and return the nested CALL. */
1978 static rtx
1979 call_from_call_insn (rtx insn)
1981 rtx x;
1982 gcc_assert (CALL_P (insn));
1983 x = PATTERN (insn);
1985 while (GET_CODE (x) != CALL)
1987 switch (GET_CODE (x))
1989 default:
1990 gcc_unreachable ();
1991 case COND_EXEC:
1992 x = COND_EXEC_CODE (x);
1993 break;
1994 case PARALLEL:
1995 x = XVECEXP (x, 0, 0);
1996 break;
1997 case SET:
1998 x = XEXP (x, 1);
1999 break;
2002 return x;
2005 /* The final scan for one insn, INSN.
2006 Args are same as in `final', except that INSN
2007 is the insn being scanned.
2008 Value returned is the next insn to be scanned.
2010 NOPEEPHOLES is the flag to disallow peephole processing (currently
2011 used for within delayed branch sequence output).
2013 SEEN is used to track the end of the prologue, for emitting
2014 debug information. We force the emission of a line note after
2015 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
2016 at the beginning of the second basic block, whichever comes
2017 first. */
2020 final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2021 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2023 #ifdef HAVE_cc0
2024 rtx set;
2025 #endif
2026 rtx next;
2028 insn_counter++;
2030 /* Ignore deleted insns. These can occur when we split insns (due to a
2031 template of "#") while not optimizing. */
2032 if (INSN_DELETED_P (insn))
2033 return NEXT_INSN (insn);
2035 switch (GET_CODE (insn))
2037 case NOTE:
2038 switch (NOTE_KIND (insn))
2040 case NOTE_INSN_DELETED:
2041 break;
2043 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2044 in_cold_section_p = !in_cold_section_p;
2046 if (dwarf2out_do_frame ())
2047 dwarf2out_switch_text_section ();
2048 else if (!DECL_IGNORED_P (current_function_decl))
2049 debug_hooks->switch_text_section ();
2051 switch_to_section (current_function_section ());
2052 targetm.asm_out.function_switched_text_sections (asm_out_file,
2053 current_function_decl,
2054 in_cold_section_p);
2055 break;
2057 case NOTE_INSN_BASIC_BLOCK:
2058 if (targetm.asm_out.unwind_emit)
2059 targetm.asm_out.unwind_emit (asm_out_file, insn);
2061 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
2063 *seen |= SEEN_EMITTED;
2064 force_source_line = true;
2066 else
2067 *seen |= SEEN_BB;
2069 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2071 break;
2073 case NOTE_INSN_EH_REGION_BEG:
2074 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2075 NOTE_EH_HANDLER (insn));
2076 break;
2078 case NOTE_INSN_EH_REGION_END:
2079 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2080 NOTE_EH_HANDLER (insn));
2081 break;
2083 case NOTE_INSN_PROLOGUE_END:
2084 targetm.asm_out.function_end_prologue (file);
2085 profile_after_prologue (file);
2087 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2089 *seen |= SEEN_EMITTED;
2090 force_source_line = true;
2092 else
2093 *seen |= SEEN_NOTE;
2095 break;
2097 case NOTE_INSN_EPILOGUE_BEG:
2098 if (!DECL_IGNORED_P (current_function_decl))
2099 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2100 targetm.asm_out.function_begin_epilogue (file);
2101 break;
2103 case NOTE_INSN_CFI:
2104 dwarf2out_emit_cfi (NOTE_CFI (insn));
2105 break;
2107 case NOTE_INSN_CFI_LABEL:
2108 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2109 NOTE_LABEL_NUMBER (insn));
2110 break;
2112 case NOTE_INSN_FUNCTION_BEG:
2113 app_disable ();
2114 if (!DECL_IGNORED_P (current_function_decl))
2115 debug_hooks->end_prologue (last_linenum, last_filename);
2117 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2119 *seen |= SEEN_EMITTED;
2120 force_source_line = true;
2122 else
2123 *seen |= SEEN_NOTE;
2125 break;
2127 case NOTE_INSN_BLOCK_BEG:
2128 if (debug_info_level == DINFO_LEVEL_NORMAL
2129 || debug_info_level == DINFO_LEVEL_VERBOSE
2130 || write_symbols == DWARF2_DEBUG
2131 || write_symbols == VMS_AND_DWARF2_DEBUG
2132 || write_symbols == VMS_DEBUG)
2134 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2136 app_disable ();
2137 ++block_depth;
2138 high_block_linenum = last_linenum;
2140 /* Output debugging info about the symbol-block beginning. */
2141 if (!DECL_IGNORED_P (current_function_decl))
2142 debug_hooks->begin_block (last_linenum, n);
2144 /* Mark this block as output. */
2145 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2147 if (write_symbols == DBX_DEBUG
2148 || write_symbols == SDB_DEBUG)
2150 location_t *locus_ptr
2151 = block_nonartificial_location (NOTE_BLOCK (insn));
2153 if (locus_ptr != NULL)
2155 override_filename = LOCATION_FILE (*locus_ptr);
2156 override_linenum = LOCATION_LINE (*locus_ptr);
2159 break;
2161 case NOTE_INSN_BLOCK_END:
2162 if (debug_info_level == DINFO_LEVEL_NORMAL
2163 || debug_info_level == DINFO_LEVEL_VERBOSE
2164 || write_symbols == DWARF2_DEBUG
2165 || write_symbols == VMS_AND_DWARF2_DEBUG
2166 || write_symbols == VMS_DEBUG)
2168 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2170 app_disable ();
2172 /* End of a symbol-block. */
2173 --block_depth;
2174 gcc_assert (block_depth >= 0);
2176 if (!DECL_IGNORED_P (current_function_decl))
2177 debug_hooks->end_block (high_block_linenum, n);
2179 if (write_symbols == DBX_DEBUG
2180 || write_symbols == SDB_DEBUG)
2182 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2183 location_t *locus_ptr
2184 = block_nonartificial_location (outer_block);
2186 if (locus_ptr != NULL)
2188 override_filename = LOCATION_FILE (*locus_ptr);
2189 override_linenum = LOCATION_LINE (*locus_ptr);
2191 else
2193 override_filename = NULL;
2194 override_linenum = 0;
2197 break;
2199 case NOTE_INSN_DELETED_LABEL:
2200 /* Emit the label. We may have deleted the CODE_LABEL because
2201 the label could be proved to be unreachable, though still
2202 referenced (in the form of having its address taken. */
2203 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2204 break;
2206 case NOTE_INSN_DELETED_DEBUG_LABEL:
2207 /* Similarly, but need to use different namespace for it. */
2208 if (CODE_LABEL_NUMBER (insn) != -1)
2209 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2210 break;
2212 case NOTE_INSN_VAR_LOCATION:
2213 case NOTE_INSN_CALL_ARG_LOCATION:
2214 if (!DECL_IGNORED_P (current_function_decl))
2215 debug_hooks->var_location (insn);
2216 break;
2218 default:
2219 gcc_unreachable ();
2220 break;
2222 break;
2224 case BARRIER:
2225 break;
2227 case CODE_LABEL:
2228 /* The target port might emit labels in the output function for
2229 some insn, e.g. sh.c output_branchy_insn. */
2230 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2232 int align = LABEL_TO_ALIGNMENT (insn);
2233 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2234 int max_skip = LABEL_TO_MAX_SKIP (insn);
2235 #endif
2237 if (align && NEXT_INSN (insn))
2239 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2240 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2241 #else
2242 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2243 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2244 #else
2245 ASM_OUTPUT_ALIGN (file, align);
2246 #endif
2247 #endif
2250 CC_STATUS_INIT;
2252 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2253 debug_hooks->label (insn);
2255 app_disable ();
2257 next = next_nonnote_insn (insn);
2258 /* If this label is followed by a jump-table, make sure we put
2259 the label in the read-only section. Also possibly write the
2260 label and jump table together. */
2261 if (next != 0 && JUMP_TABLE_DATA_P (next))
2263 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2264 /* In this case, the case vector is being moved by the
2265 target, so don't output the label at all. Leave that
2266 to the back end macros. */
2267 #else
2268 if (! JUMP_TABLES_IN_TEXT_SECTION)
2270 int log_align;
2272 switch_to_section (targetm.asm_out.function_rodata_section
2273 (current_function_decl));
2275 #ifdef ADDR_VEC_ALIGN
2276 log_align = ADDR_VEC_ALIGN (next);
2277 #else
2278 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2279 #endif
2280 ASM_OUTPUT_ALIGN (file, log_align);
2282 else
2283 switch_to_section (current_function_section ());
2285 #ifdef ASM_OUTPUT_CASE_LABEL
2286 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2287 next);
2288 #else
2289 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2290 #endif
2291 #endif
2292 break;
2294 if (LABEL_ALT_ENTRY_P (insn))
2295 output_alternate_entry_point (file, insn);
2296 else
2297 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2298 break;
2300 default:
2302 rtx body = PATTERN (insn);
2303 int insn_code_number;
2304 const char *templ;
2305 bool is_stmt;
2307 /* Reset this early so it is correct for ASM statements. */
2308 current_insn_predicate = NULL_RTX;
2310 /* An INSN, JUMP_INSN or CALL_INSN.
2311 First check for special kinds that recog doesn't recognize. */
2313 if (GET_CODE (body) == USE /* These are just declarations. */
2314 || GET_CODE (body) == CLOBBER)
2315 break;
2317 #ifdef HAVE_cc0
2319 /* If there is a REG_CC_SETTER note on this insn, it means that
2320 the setting of the condition code was done in the delay slot
2321 of the insn that branched here. So recover the cc status
2322 from the insn that set it. */
2324 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2325 if (note)
2327 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2328 cc_prev_status = cc_status;
2331 #endif
2333 /* Detect insns that are really jump-tables
2334 and output them as such. */
2336 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2338 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2339 int vlen, idx;
2340 #endif
2342 if (! JUMP_TABLES_IN_TEXT_SECTION)
2343 switch_to_section (targetm.asm_out.function_rodata_section
2344 (current_function_decl));
2345 else
2346 switch_to_section (current_function_section ());
2348 app_disable ();
2350 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2351 if (GET_CODE (body) == ADDR_VEC)
2353 #ifdef ASM_OUTPUT_ADDR_VEC
2354 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2355 #else
2356 gcc_unreachable ();
2357 #endif
2359 else
2361 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2362 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2363 #else
2364 gcc_unreachable ();
2365 #endif
2367 #else
2368 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2369 for (idx = 0; idx < vlen; idx++)
2371 if (GET_CODE (body) == ADDR_VEC)
2373 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2374 ASM_OUTPUT_ADDR_VEC_ELT
2375 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2376 #else
2377 gcc_unreachable ();
2378 #endif
2380 else
2382 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2383 ASM_OUTPUT_ADDR_DIFF_ELT
2384 (file,
2385 body,
2386 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2387 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2388 #else
2389 gcc_unreachable ();
2390 #endif
2393 #ifdef ASM_OUTPUT_CASE_END
2394 ASM_OUTPUT_CASE_END (file,
2395 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2396 insn);
2397 #endif
2398 #endif
2400 switch_to_section (current_function_section ());
2402 break;
2404 /* Output this line note if it is the first or the last line
2405 note in a row. */
2406 if (!DECL_IGNORED_P (current_function_decl)
2407 && notice_source_line (insn, &is_stmt))
2408 (*debug_hooks->source_line) (last_linenum, last_filename,
2409 last_discriminator, is_stmt);
2411 if (GET_CODE (body) == ASM_INPUT)
2413 const char *string = XSTR (body, 0);
2415 /* There's no telling what that did to the condition codes. */
2416 CC_STATUS_INIT;
2418 if (string[0])
2420 expanded_location loc;
2422 app_enable ();
2423 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2424 if (*loc.file && loc.line)
2425 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2426 ASM_COMMENT_START, loc.line, loc.file);
2427 fprintf (asm_out_file, "\t%s\n", string);
2428 #if HAVE_AS_LINE_ZERO
2429 if (*loc.file && loc.line)
2430 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2431 #endif
2433 break;
2436 /* Detect `asm' construct with operands. */
2437 if (asm_noperands (body) >= 0)
2439 unsigned int noperands = asm_noperands (body);
2440 rtx *ops = XALLOCAVEC (rtx, noperands);
2441 const char *string;
2442 location_t loc;
2443 expanded_location expanded;
2445 /* There's no telling what that did to the condition codes. */
2446 CC_STATUS_INIT;
2448 /* Get out the operand values. */
2449 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2450 /* Inhibit dying on what would otherwise be compiler bugs. */
2451 insn_noperands = noperands;
2452 this_is_asm_operands = insn;
2453 expanded = expand_location (loc);
2455 #ifdef FINAL_PRESCAN_INSN
2456 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2457 #endif
2459 /* Output the insn using them. */
2460 if (string[0])
2462 app_enable ();
2463 if (expanded.file && expanded.line)
2464 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2465 ASM_COMMENT_START, expanded.line, expanded.file);
2466 output_asm_insn (string, ops);
2467 #if HAVE_AS_LINE_ZERO
2468 if (expanded.file && expanded.line)
2469 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2470 #endif
2473 if (targetm.asm_out.final_postscan_insn)
2474 targetm.asm_out.final_postscan_insn (file, insn, ops,
2475 insn_noperands);
2477 this_is_asm_operands = 0;
2478 break;
2481 app_disable ();
2483 if (GET_CODE (body) == SEQUENCE)
2485 /* A delayed-branch sequence */
2486 int i;
2488 final_sequence = body;
2490 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2491 force the restoration of a comparison that was previously
2492 thought unnecessary. If that happens, cancel this sequence
2493 and cause that insn to be restored. */
2495 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2496 if (next != XVECEXP (body, 0, 1))
2498 final_sequence = 0;
2499 return next;
2502 for (i = 1; i < XVECLEN (body, 0); i++)
2504 rtx insn = XVECEXP (body, 0, i);
2505 rtx next = NEXT_INSN (insn);
2506 /* We loop in case any instruction in a delay slot gets
2507 split. */
2509 insn = final_scan_insn (insn, file, 0, 1, seen);
2510 while (insn != next);
2512 #ifdef DBR_OUTPUT_SEQEND
2513 DBR_OUTPUT_SEQEND (file);
2514 #endif
2515 final_sequence = 0;
2517 /* If the insn requiring the delay slot was a CALL_INSN, the
2518 insns in the delay slot are actually executed before the
2519 called function. Hence we don't preserve any CC-setting
2520 actions in these insns and the CC must be marked as being
2521 clobbered by the function. */
2522 if (CALL_P (XVECEXP (body, 0, 0)))
2524 CC_STATUS_INIT;
2526 break;
2529 /* We have a real machine instruction as rtl. */
2531 body = PATTERN (insn);
2533 #ifdef HAVE_cc0
2534 set = single_set (insn);
2536 /* Check for redundant test and compare instructions
2537 (when the condition codes are already set up as desired).
2538 This is done only when optimizing; if not optimizing,
2539 it should be possible for the user to alter a variable
2540 with the debugger in between statements
2541 and the next statement should reexamine the variable
2542 to compute the condition codes. */
2544 if (optimize_p)
2546 if (set
2547 && GET_CODE (SET_DEST (set)) == CC0
2548 && insn != last_ignored_compare)
2550 rtx src1, src2;
2551 if (GET_CODE (SET_SRC (set)) == SUBREG)
2552 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2554 src1 = SET_SRC (set);
2555 src2 = NULL_RTX;
2556 if (GET_CODE (SET_SRC (set)) == COMPARE)
2558 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2559 XEXP (SET_SRC (set), 0)
2560 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2561 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2562 XEXP (SET_SRC (set), 1)
2563 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2564 if (XEXP (SET_SRC (set), 1)
2565 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2566 src2 = XEXP (SET_SRC (set), 0);
2568 if ((cc_status.value1 != 0
2569 && rtx_equal_p (src1, cc_status.value1))
2570 || (cc_status.value2 != 0
2571 && rtx_equal_p (src1, cc_status.value2))
2572 || (src2 != 0 && cc_status.value1 != 0
2573 && rtx_equal_p (src2, cc_status.value1))
2574 || (src2 != 0 && cc_status.value2 != 0
2575 && rtx_equal_p (src2, cc_status.value2)))
2577 /* Don't delete insn if it has an addressing side-effect. */
2578 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2579 /* or if anything in it is volatile. */
2580 && ! volatile_refs_p (PATTERN (insn)))
2582 /* We don't really delete the insn; just ignore it. */
2583 last_ignored_compare = insn;
2584 break;
2590 /* If this is a conditional branch, maybe modify it
2591 if the cc's are in a nonstandard state
2592 so that it accomplishes the same thing that it would
2593 do straightforwardly if the cc's were set up normally. */
2595 if (cc_status.flags != 0
2596 && JUMP_P (insn)
2597 && GET_CODE (body) == SET
2598 && SET_DEST (body) == pc_rtx
2599 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2600 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2601 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2603 /* This function may alter the contents of its argument
2604 and clear some of the cc_status.flags bits.
2605 It may also return 1 meaning condition now always true
2606 or -1 meaning condition now always false
2607 or 2 meaning condition nontrivial but altered. */
2608 int result = alter_cond (XEXP (SET_SRC (body), 0));
2609 /* If condition now has fixed value, replace the IF_THEN_ELSE
2610 with its then-operand or its else-operand. */
2611 if (result == 1)
2612 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2613 if (result == -1)
2614 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2616 /* The jump is now either unconditional or a no-op.
2617 If it has become a no-op, don't try to output it.
2618 (It would not be recognized.) */
2619 if (SET_SRC (body) == pc_rtx)
2621 delete_insn (insn);
2622 break;
2624 else if (ANY_RETURN_P (SET_SRC (body)))
2625 /* Replace (set (pc) (return)) with (return). */
2626 PATTERN (insn) = body = SET_SRC (body);
2628 /* Rerecognize the instruction if it has changed. */
2629 if (result != 0)
2630 INSN_CODE (insn) = -1;
2633 /* If this is a conditional trap, maybe modify it if the cc's
2634 are in a nonstandard state so that it accomplishes the same
2635 thing that it would do straightforwardly if the cc's were
2636 set up normally. */
2637 if (cc_status.flags != 0
2638 && NONJUMP_INSN_P (insn)
2639 && GET_CODE (body) == TRAP_IF
2640 && COMPARISON_P (TRAP_CONDITION (body))
2641 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2643 /* This function may alter the contents of its argument
2644 and clear some of the cc_status.flags bits.
2645 It may also return 1 meaning condition now always true
2646 or -1 meaning condition now always false
2647 or 2 meaning condition nontrivial but altered. */
2648 int result = alter_cond (TRAP_CONDITION (body));
2650 /* If TRAP_CONDITION has become always false, delete the
2651 instruction. */
2652 if (result == -1)
2654 delete_insn (insn);
2655 break;
2658 /* If TRAP_CONDITION has become always true, replace
2659 TRAP_CONDITION with const_true_rtx. */
2660 if (result == 1)
2661 TRAP_CONDITION (body) = const_true_rtx;
2663 /* Rerecognize the instruction if it has changed. */
2664 if (result != 0)
2665 INSN_CODE (insn) = -1;
2668 /* Make same adjustments to instructions that examine the
2669 condition codes without jumping and instructions that
2670 handle conditional moves (if this machine has either one). */
2672 if (cc_status.flags != 0
2673 && set != 0)
2675 rtx cond_rtx, then_rtx, else_rtx;
2677 if (!JUMP_P (insn)
2678 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2680 cond_rtx = XEXP (SET_SRC (set), 0);
2681 then_rtx = XEXP (SET_SRC (set), 1);
2682 else_rtx = XEXP (SET_SRC (set), 2);
2684 else
2686 cond_rtx = SET_SRC (set);
2687 then_rtx = const_true_rtx;
2688 else_rtx = const0_rtx;
2691 if (COMPARISON_P (cond_rtx)
2692 && XEXP (cond_rtx, 0) == cc0_rtx)
2694 int result;
2695 result = alter_cond (cond_rtx);
2696 if (result == 1)
2697 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2698 else if (result == -1)
2699 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2700 else if (result == 2)
2701 INSN_CODE (insn) = -1;
2702 if (SET_DEST (set) == SET_SRC (set))
2703 delete_insn (insn);
2707 #endif
2709 #ifdef HAVE_peephole
2710 /* Do machine-specific peephole optimizations if desired. */
2712 if (optimize_p && !flag_no_peephole && !nopeepholes)
2714 rtx next = peephole (insn);
2715 /* When peepholing, if there were notes within the peephole,
2716 emit them before the peephole. */
2717 if (next != 0 && next != NEXT_INSN (insn))
2719 rtx note, prev = PREV_INSN (insn);
2721 for (note = NEXT_INSN (insn); note != next;
2722 note = NEXT_INSN (note))
2723 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2725 /* Put the notes in the proper position for a later
2726 rescan. For example, the SH target can do this
2727 when generating a far jump in a delayed branch
2728 sequence. */
2729 note = NEXT_INSN (insn);
2730 PREV_INSN (note) = prev;
2731 NEXT_INSN (prev) = note;
2732 NEXT_INSN (PREV_INSN (next)) = insn;
2733 PREV_INSN (insn) = PREV_INSN (next);
2734 NEXT_INSN (insn) = next;
2735 PREV_INSN (next) = insn;
2738 /* PEEPHOLE might have changed this. */
2739 body = PATTERN (insn);
2741 #endif
2743 /* Try to recognize the instruction.
2744 If successful, verify that the operands satisfy the
2745 constraints for the instruction. Crash if they don't,
2746 since `reload' should have changed them so that they do. */
2748 insn_code_number = recog_memoized (insn);
2749 cleanup_subreg_operands (insn);
2751 /* Dump the insn in the assembly for debugging (-dAP).
2752 If the final dump is requested as slim RTL, dump slim
2753 RTL to the assembly file also. */
2754 if (flag_dump_rtl_in_asm)
2756 print_rtx_head = ASM_COMMENT_START;
2757 if (! (dump_flags & TDF_SLIM))
2758 print_rtl_single (asm_out_file, insn);
2759 else
2760 dump_insn_slim (asm_out_file, insn);
2761 print_rtx_head = "";
2764 if (! constrain_operands_cached (1))
2765 fatal_insn_not_found (insn);
2767 /* Some target machines need to prescan each insn before
2768 it is output. */
2770 #ifdef FINAL_PRESCAN_INSN
2771 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2772 #endif
2774 if (targetm.have_conditional_execution ()
2775 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2776 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2778 #ifdef HAVE_cc0
2779 cc_prev_status = cc_status;
2781 /* Update `cc_status' for this instruction.
2782 The instruction's output routine may change it further.
2783 If the output routine for a jump insn needs to depend
2784 on the cc status, it should look at cc_prev_status. */
2786 NOTICE_UPDATE_CC (body, insn);
2787 #endif
2789 current_output_insn = debug_insn = insn;
2791 /* Find the proper template for this insn. */
2792 templ = get_insn_template (insn_code_number, insn);
2794 /* If the C code returns 0, it means that it is a jump insn
2795 which follows a deleted test insn, and that test insn
2796 needs to be reinserted. */
2797 if (templ == 0)
2799 rtx prev;
2801 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2803 /* We have already processed the notes between the setter and
2804 the user. Make sure we don't process them again, this is
2805 particularly important if one of the notes is a block
2806 scope note or an EH note. */
2807 for (prev = insn;
2808 prev != last_ignored_compare;
2809 prev = PREV_INSN (prev))
2811 if (NOTE_P (prev))
2812 delete_insn (prev); /* Use delete_note. */
2815 return prev;
2818 /* If the template is the string "#", it means that this insn must
2819 be split. */
2820 if (templ[0] == '#' && templ[1] == '\0')
2822 rtx new_rtx = try_split (body, insn, 0);
2824 /* If we didn't split the insn, go away. */
2825 if (new_rtx == insn && PATTERN (new_rtx) == body)
2826 fatal_insn ("could not split insn", insn);
2828 /* If we have a length attribute, this instruction should have
2829 been split in shorten_branches, to ensure that we would have
2830 valid length info for the splitees. */
2831 gcc_assert (!HAVE_ATTR_length);
2833 return new_rtx;
2836 /* ??? This will put the directives in the wrong place if
2837 get_insn_template outputs assembly directly. However calling it
2838 before get_insn_template breaks if the insns is split. */
2839 if (targetm.asm_out.unwind_emit_before_insn
2840 && targetm.asm_out.unwind_emit)
2841 targetm.asm_out.unwind_emit (asm_out_file, insn);
2843 if (CALL_P (insn))
2845 rtx x = call_from_call_insn (insn);
2846 x = XEXP (x, 0);
2847 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2849 tree t;
2850 x = XEXP (x, 0);
2851 t = SYMBOL_REF_DECL (x);
2852 if (t)
2853 assemble_external (t);
2855 if (!DECL_IGNORED_P (current_function_decl))
2856 debug_hooks->var_location (insn);
2859 /* Output assembler code from the template. */
2860 output_asm_insn (templ, recog_data.operand);
2862 /* Some target machines need to postscan each insn after
2863 it is output. */
2864 if (targetm.asm_out.final_postscan_insn)
2865 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2866 recog_data.n_operands);
2868 if (!targetm.asm_out.unwind_emit_before_insn
2869 && targetm.asm_out.unwind_emit)
2870 targetm.asm_out.unwind_emit (asm_out_file, insn);
2872 current_output_insn = debug_insn = 0;
2875 return NEXT_INSN (insn);
2878 /* Return whether a source line note needs to be emitted before INSN.
2879 Sets IS_STMT to TRUE if the line should be marked as a possible
2880 breakpoint location. */
2882 static bool
2883 notice_source_line (rtx insn, bool *is_stmt)
2885 const char *filename;
2886 int linenum;
2888 if (override_filename)
2890 filename = override_filename;
2891 linenum = override_linenum;
2893 else
2895 filename = insn_file (insn);
2896 linenum = insn_line (insn);
2899 if (filename == NULL)
2900 return false;
2902 if (force_source_line
2903 || filename != last_filename
2904 || last_linenum != linenum)
2906 force_source_line = false;
2907 last_filename = filename;
2908 last_linenum = linenum;
2909 last_discriminator = discriminator;
2910 *is_stmt = true;
2911 high_block_linenum = MAX (last_linenum, high_block_linenum);
2912 high_function_linenum = MAX (last_linenum, high_function_linenum);
2913 return true;
2916 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
2918 /* If the discriminator changed, but the line number did not,
2919 output the line table entry with is_stmt false so the
2920 debugger does not treat this as a breakpoint location. */
2921 last_discriminator = discriminator;
2922 *is_stmt = false;
2923 return true;
2926 return false;
2929 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2930 directly to the desired hard register. */
2932 void
2933 cleanup_subreg_operands (rtx insn)
2935 int i;
2936 bool changed = false;
2937 extract_insn_cached (insn);
2938 for (i = 0; i < recog_data.n_operands; i++)
2940 /* The following test cannot use recog_data.operand when testing
2941 for a SUBREG: the underlying object might have been changed
2942 already if we are inside a match_operator expression that
2943 matches the else clause. Instead we test the underlying
2944 expression directly. */
2945 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2947 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
2948 changed = true;
2950 else if (GET_CODE (recog_data.operand[i]) == PLUS
2951 || GET_CODE (recog_data.operand[i]) == MULT
2952 || MEM_P (recog_data.operand[i]))
2953 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2956 for (i = 0; i < recog_data.n_dups; i++)
2958 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2960 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
2961 changed = true;
2963 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2964 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2965 || MEM_P (*recog_data.dup_loc[i]))
2966 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2968 if (changed)
2969 df_insn_rescan (insn);
2972 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
2973 the thing it is a subreg of. Do it anyway if FINAL_P. */
2976 alter_subreg (rtx *xp, bool final_p)
2978 rtx x = *xp;
2979 rtx y = SUBREG_REG (x);
2981 /* simplify_subreg does not remove subreg from volatile references.
2982 We are required to. */
2983 if (MEM_P (y))
2985 int offset = SUBREG_BYTE (x);
2987 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2988 contains 0 instead of the proper offset. See simplify_subreg. */
2989 if (offset == 0
2990 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2992 int difference = GET_MODE_SIZE (GET_MODE (y))
2993 - GET_MODE_SIZE (GET_MODE (x));
2994 if (WORDS_BIG_ENDIAN)
2995 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2996 if (BYTES_BIG_ENDIAN)
2997 offset += difference % UNITS_PER_WORD;
3000 if (final_p)
3001 *xp = adjust_address (y, GET_MODE (x), offset);
3002 else
3003 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3005 else
3007 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3008 SUBREG_BYTE (x));
3010 if (new_rtx != 0)
3011 *xp = new_rtx;
3012 else if (final_p && REG_P (y))
3014 /* Simplify_subreg can't handle some REG cases, but we have to. */
3015 unsigned int regno;
3016 HOST_WIDE_INT offset;
3018 regno = subreg_regno (x);
3019 if (subreg_lowpart_p (x))
3020 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3021 else
3022 offset = SUBREG_BYTE (x);
3023 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3027 return *xp;
3030 /* Do alter_subreg on all the SUBREGs contained in X. */
3032 static rtx
3033 walk_alter_subreg (rtx *xp, bool *changed)
3035 rtx x = *xp;
3036 switch (GET_CODE (x))
3038 case PLUS:
3039 case MULT:
3040 case AND:
3041 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3042 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3043 break;
3045 case MEM:
3046 case ZERO_EXTEND:
3047 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3048 break;
3050 case SUBREG:
3051 *changed = true;
3052 return alter_subreg (xp, true);
3054 default:
3055 break;
3058 return *xp;
3061 #ifdef HAVE_cc0
3063 /* Given BODY, the body of a jump instruction, alter the jump condition
3064 as required by the bits that are set in cc_status.flags.
3065 Not all of the bits there can be handled at this level in all cases.
3067 The value is normally 0.
3068 1 means that the condition has become always true.
3069 -1 means that the condition has become always false.
3070 2 means that COND has been altered. */
3072 static int
3073 alter_cond (rtx cond)
3075 int value = 0;
3077 if (cc_status.flags & CC_REVERSED)
3079 value = 2;
3080 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3083 if (cc_status.flags & CC_INVERTED)
3085 value = 2;
3086 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3089 if (cc_status.flags & CC_NOT_POSITIVE)
3090 switch (GET_CODE (cond))
3092 case LE:
3093 case LEU:
3094 case GEU:
3095 /* Jump becomes unconditional. */
3096 return 1;
3098 case GT:
3099 case GTU:
3100 case LTU:
3101 /* Jump becomes no-op. */
3102 return -1;
3104 case GE:
3105 PUT_CODE (cond, EQ);
3106 value = 2;
3107 break;
3109 case LT:
3110 PUT_CODE (cond, NE);
3111 value = 2;
3112 break;
3114 default:
3115 break;
3118 if (cc_status.flags & CC_NOT_NEGATIVE)
3119 switch (GET_CODE (cond))
3121 case GE:
3122 case GEU:
3123 /* Jump becomes unconditional. */
3124 return 1;
3126 case LT:
3127 case LTU:
3128 /* Jump becomes no-op. */
3129 return -1;
3131 case LE:
3132 case LEU:
3133 PUT_CODE (cond, EQ);
3134 value = 2;
3135 break;
3137 case GT:
3138 case GTU:
3139 PUT_CODE (cond, NE);
3140 value = 2;
3141 break;
3143 default:
3144 break;
3147 if (cc_status.flags & CC_NO_OVERFLOW)
3148 switch (GET_CODE (cond))
3150 case GEU:
3151 /* Jump becomes unconditional. */
3152 return 1;
3154 case LEU:
3155 PUT_CODE (cond, EQ);
3156 value = 2;
3157 break;
3159 case GTU:
3160 PUT_CODE (cond, NE);
3161 value = 2;
3162 break;
3164 case LTU:
3165 /* Jump becomes no-op. */
3166 return -1;
3168 default:
3169 break;
3172 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3173 switch (GET_CODE (cond))
3175 default:
3176 gcc_unreachable ();
3178 case NE:
3179 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3180 value = 2;
3181 break;
3183 case EQ:
3184 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3185 value = 2;
3186 break;
3189 if (cc_status.flags & CC_NOT_SIGNED)
3190 /* The flags are valid if signed condition operators are converted
3191 to unsigned. */
3192 switch (GET_CODE (cond))
3194 case LE:
3195 PUT_CODE (cond, LEU);
3196 value = 2;
3197 break;
3199 case LT:
3200 PUT_CODE (cond, LTU);
3201 value = 2;
3202 break;
3204 case GT:
3205 PUT_CODE (cond, GTU);
3206 value = 2;
3207 break;
3209 case GE:
3210 PUT_CODE (cond, GEU);
3211 value = 2;
3212 break;
3214 default:
3215 break;
3218 return value;
3220 #endif
3222 /* Report inconsistency between the assembler template and the operands.
3223 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3225 void
3226 output_operand_lossage (const char *cmsgid, ...)
3228 char *fmt_string;
3229 char *new_message;
3230 const char *pfx_str;
3231 va_list ap;
3233 va_start (ap, cmsgid);
3235 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3236 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3237 vasprintf (&new_message, fmt_string, ap);
3239 if (this_is_asm_operands)
3240 error_for_asm (this_is_asm_operands, "%s", new_message);
3241 else
3242 internal_error ("%s", new_message);
3244 free (fmt_string);
3245 free (new_message);
3246 va_end (ap);
3249 /* Output of assembler code from a template, and its subroutines. */
3251 /* Annotate the assembly with a comment describing the pattern and
3252 alternative used. */
3254 static void
3255 output_asm_name (void)
3257 if (debug_insn)
3259 int num = INSN_CODE (debug_insn);
3260 fprintf (asm_out_file, "\t%s %d\t%s",
3261 ASM_COMMENT_START, INSN_UID (debug_insn),
3262 insn_data[num].name);
3263 if (insn_data[num].n_alternatives > 1)
3264 fprintf (asm_out_file, "/%d", which_alternative + 1);
3266 if (HAVE_ATTR_length)
3267 fprintf (asm_out_file, "\t[length = %d]",
3268 get_attr_length (debug_insn));
3270 /* Clear this so only the first assembler insn
3271 of any rtl insn will get the special comment for -dp. */
3272 debug_insn = 0;
3276 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3277 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3278 corresponds to the address of the object and 0 if to the object. */
3280 static tree
3281 get_mem_expr_from_op (rtx op, int *paddressp)
3283 tree expr;
3284 int inner_addressp;
3286 *paddressp = 0;
3288 if (REG_P (op))
3289 return REG_EXPR (op);
3290 else if (!MEM_P (op))
3291 return 0;
3293 if (MEM_EXPR (op) != 0)
3294 return MEM_EXPR (op);
3296 /* Otherwise we have an address, so indicate it and look at the address. */
3297 *paddressp = 1;
3298 op = XEXP (op, 0);
3300 /* First check if we have a decl for the address, then look at the right side
3301 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3302 But don't allow the address to itself be indirect. */
3303 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3304 return expr;
3305 else if (GET_CODE (op) == PLUS
3306 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3307 return expr;
3309 while (UNARY_P (op)
3310 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3311 op = XEXP (op, 0);
3313 expr = get_mem_expr_from_op (op, &inner_addressp);
3314 return inner_addressp ? 0 : expr;
3317 /* Output operand names for assembler instructions. OPERANDS is the
3318 operand vector, OPORDER is the order to write the operands, and NOPS
3319 is the number of operands to write. */
3321 static void
3322 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3324 int wrote = 0;
3325 int i;
3327 for (i = 0; i < nops; i++)
3329 int addressp;
3330 rtx op = operands[oporder[i]];
3331 tree expr = get_mem_expr_from_op (op, &addressp);
3333 fprintf (asm_out_file, "%c%s",
3334 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3335 wrote = 1;
3336 if (expr)
3338 fprintf (asm_out_file, "%s",
3339 addressp ? "*" : "");
3340 print_mem_expr (asm_out_file, expr);
3341 wrote = 1;
3343 else if (REG_P (op) && ORIGINAL_REGNO (op)
3344 && ORIGINAL_REGNO (op) != REGNO (op))
3345 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3349 #ifdef ASSEMBLER_DIALECT
3350 /* Helper function to parse assembler dialects in the asm string.
3351 This is called from output_asm_insn and asm_fprintf. */
3352 static const char *
3353 do_assembler_dialects (const char *p, int *dialect)
3355 char c = *(p - 1);
3357 switch (c)
3359 case '{':
3361 int i;
3363 if (*dialect)
3364 output_operand_lossage ("nested assembly dialect alternatives");
3365 else
3366 *dialect = 1;
3368 /* If we want the first dialect, do nothing. Otherwise, skip
3369 DIALECT_NUMBER of strings ending with '|'. */
3370 for (i = 0; i < dialect_number; i++)
3372 while (*p && *p != '}' && *p++ != '|')
3374 if (*p == '}')
3375 break;
3378 if (*p == '\0')
3379 output_operand_lossage ("unterminated assembly dialect alternative");
3381 break;
3383 case '|':
3384 if (*dialect)
3386 /* Skip to close brace. */
3389 if (*p == '\0')
3391 output_operand_lossage ("unterminated assembly dialect alternative");
3392 break;
3395 while (*p++ != '}');
3396 *dialect = 0;
3398 else
3399 putc (c, asm_out_file);
3400 break;
3402 case '}':
3403 if (! *dialect)
3404 putc (c, asm_out_file);
3405 *dialect = 0;
3406 break;
3407 default:
3408 gcc_unreachable ();
3411 return p;
3413 #endif
3415 /* Output text from TEMPLATE to the assembler output file,
3416 obeying %-directions to substitute operands taken from
3417 the vector OPERANDS.
3419 %N (for N a digit) means print operand N in usual manner.
3420 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3421 and print the label name with no punctuation.
3422 %cN means require operand N to be a constant
3423 and print the constant expression with no punctuation.
3424 %aN means expect operand N to be a memory address
3425 (not a memory reference!) and print a reference
3426 to that address.
3427 %nN means expect operand N to be a constant
3428 and print a constant expression for minus the value
3429 of the operand, with no other punctuation. */
3431 void
3432 output_asm_insn (const char *templ, rtx *operands)
3434 const char *p;
3435 int c;
3436 #ifdef ASSEMBLER_DIALECT
3437 int dialect = 0;
3438 #endif
3439 int oporder[MAX_RECOG_OPERANDS];
3440 char opoutput[MAX_RECOG_OPERANDS];
3441 int ops = 0;
3443 /* An insn may return a null string template
3444 in a case where no assembler code is needed. */
3445 if (*templ == 0)
3446 return;
3448 memset (opoutput, 0, sizeof opoutput);
3449 p = templ;
3450 putc ('\t', asm_out_file);
3452 #ifdef ASM_OUTPUT_OPCODE
3453 ASM_OUTPUT_OPCODE (asm_out_file, p);
3454 #endif
3456 while ((c = *p++))
3457 switch (c)
3459 case '\n':
3460 if (flag_verbose_asm)
3461 output_asm_operand_names (operands, oporder, ops);
3462 if (flag_print_asm_name)
3463 output_asm_name ();
3465 ops = 0;
3466 memset (opoutput, 0, sizeof opoutput);
3468 putc (c, asm_out_file);
3469 #ifdef ASM_OUTPUT_OPCODE
3470 while ((c = *p) == '\t')
3472 putc (c, asm_out_file);
3473 p++;
3475 ASM_OUTPUT_OPCODE (asm_out_file, p);
3476 #endif
3477 break;
3479 #ifdef ASSEMBLER_DIALECT
3480 case '{':
3481 case '}':
3482 case '|':
3483 p = do_assembler_dialects (p, &dialect);
3484 break;
3485 #endif
3487 case '%':
3488 /* %% outputs a single %. */
3489 if (*p == '%')
3491 p++;
3492 putc (c, asm_out_file);
3494 /* %= outputs a number which is unique to each insn in the entire
3495 compilation. This is useful for making local labels that are
3496 referred to more than once in a given insn. */
3497 else if (*p == '=')
3499 p++;
3500 fprintf (asm_out_file, "%d", insn_counter);
3502 /* % followed by a letter and some digits
3503 outputs an operand in a special way depending on the letter.
3504 Letters `acln' are implemented directly.
3505 Other letters are passed to `output_operand' so that
3506 the TARGET_PRINT_OPERAND hook can define them. */
3507 else if (ISALPHA (*p))
3509 int letter = *p++;
3510 unsigned long opnum;
3511 char *endptr;
3513 opnum = strtoul (p, &endptr, 10);
3515 if (endptr == p)
3516 output_operand_lossage ("operand number missing "
3517 "after %%-letter");
3518 else if (this_is_asm_operands && opnum >= insn_noperands)
3519 output_operand_lossage ("operand number out of range");
3520 else if (letter == 'l')
3521 output_asm_label (operands[opnum]);
3522 else if (letter == 'a')
3523 output_address (operands[opnum]);
3524 else if (letter == 'c')
3526 if (CONSTANT_ADDRESS_P (operands[opnum]))
3527 output_addr_const (asm_out_file, operands[opnum]);
3528 else
3529 output_operand (operands[opnum], 'c');
3531 else if (letter == 'n')
3533 if (CONST_INT_P (operands[opnum]))
3534 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3535 - INTVAL (operands[opnum]));
3536 else
3538 putc ('-', asm_out_file);
3539 output_addr_const (asm_out_file, operands[opnum]);
3542 else
3543 output_operand (operands[opnum], letter);
3545 if (!opoutput[opnum])
3546 oporder[ops++] = opnum;
3547 opoutput[opnum] = 1;
3549 p = endptr;
3550 c = *p;
3552 /* % followed by a digit outputs an operand the default way. */
3553 else if (ISDIGIT (*p))
3555 unsigned long opnum;
3556 char *endptr;
3558 opnum = strtoul (p, &endptr, 10);
3559 if (this_is_asm_operands && opnum >= insn_noperands)
3560 output_operand_lossage ("operand number out of range");
3561 else
3562 output_operand (operands[opnum], 0);
3564 if (!opoutput[opnum])
3565 oporder[ops++] = opnum;
3566 opoutput[opnum] = 1;
3568 p = endptr;
3569 c = *p;
3571 /* % followed by punctuation: output something for that
3572 punctuation character alone, with no operand. The
3573 TARGET_PRINT_OPERAND hook decides what is actually done. */
3574 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3575 output_operand (NULL_RTX, *p++);
3576 else
3577 output_operand_lossage ("invalid %%-code");
3578 break;
3580 default:
3581 putc (c, asm_out_file);
3584 /* Write out the variable names for operands, if we know them. */
3585 if (flag_verbose_asm)
3586 output_asm_operand_names (operands, oporder, ops);
3587 if (flag_print_asm_name)
3588 output_asm_name ();
3590 putc ('\n', asm_out_file);
3593 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3595 void
3596 output_asm_label (rtx x)
3598 char buf[256];
3600 if (GET_CODE (x) == LABEL_REF)
3601 x = XEXP (x, 0);
3602 if (LABEL_P (x)
3603 || (NOTE_P (x)
3604 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3605 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3606 else
3607 output_operand_lossage ("'%%l' operand isn't a label");
3609 assemble_name (asm_out_file, buf);
3612 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3613 output_operand. Marks SYMBOL_REFs as referenced through use of
3614 assemble_external. */
3616 static int
3617 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3619 rtx x = *xp;
3621 /* If we have a used symbol, we may have to emit assembly
3622 annotations corresponding to whether the symbol is external, weak
3623 or has non-default visibility. */
3624 if (GET_CODE (x) == SYMBOL_REF)
3626 tree t;
3628 t = SYMBOL_REF_DECL (x);
3629 if (t)
3630 assemble_external (t);
3632 return -1;
3635 return 0;
3638 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3640 void
3641 mark_symbol_refs_as_used (rtx x)
3643 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3646 /* Print operand X using machine-dependent assembler syntax.
3647 CODE is a non-digit that preceded the operand-number in the % spec,
3648 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3649 between the % and the digits.
3650 When CODE is a non-letter, X is 0.
3652 The meanings of the letters are machine-dependent and controlled
3653 by TARGET_PRINT_OPERAND. */
3655 void
3656 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3658 if (x && GET_CODE (x) == SUBREG)
3659 x = alter_subreg (&x, true);
3661 /* X must not be a pseudo reg. */
3662 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3664 targetm.asm_out.print_operand (asm_out_file, x, code);
3666 if (x == NULL_RTX)
3667 return;
3669 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3672 /* Print a memory reference operand for address X using
3673 machine-dependent assembler syntax. */
3675 void
3676 output_address (rtx x)
3678 bool changed = false;
3679 walk_alter_subreg (&x, &changed);
3680 targetm.asm_out.print_operand_address (asm_out_file, x);
3683 /* Print an integer constant expression in assembler syntax.
3684 Addition and subtraction are the only arithmetic
3685 that may appear in these expressions. */
3687 void
3688 output_addr_const (FILE *file, rtx x)
3690 char buf[256];
3692 restart:
3693 switch (GET_CODE (x))
3695 case PC:
3696 putc ('.', file);
3697 break;
3699 case SYMBOL_REF:
3700 if (SYMBOL_REF_DECL (x))
3701 assemble_external (SYMBOL_REF_DECL (x));
3702 #ifdef ASM_OUTPUT_SYMBOL_REF
3703 ASM_OUTPUT_SYMBOL_REF (file, x);
3704 #else
3705 assemble_name (file, XSTR (x, 0));
3706 #endif
3707 break;
3709 case LABEL_REF:
3710 x = XEXP (x, 0);
3711 /* Fall through. */
3712 case CODE_LABEL:
3713 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3714 #ifdef ASM_OUTPUT_LABEL_REF
3715 ASM_OUTPUT_LABEL_REF (file, buf);
3716 #else
3717 assemble_name (file, buf);
3718 #endif
3719 break;
3721 case CONST_INT:
3722 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3723 break;
3725 case CONST:
3726 /* This used to output parentheses around the expression,
3727 but that does not work on the 386 (either ATT or BSD assembler). */
3728 output_addr_const (file, XEXP (x, 0));
3729 break;
3731 case CONST_DOUBLE:
3732 if (GET_MODE (x) == VOIDmode)
3734 /* We can use %d if the number is one word and positive. */
3735 if (CONST_DOUBLE_HIGH (x))
3736 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3737 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3738 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3739 else if (CONST_DOUBLE_LOW (x) < 0)
3740 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3741 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3742 else
3743 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3745 else
3746 /* We can't handle floating point constants;
3747 PRINT_OPERAND must handle them. */
3748 output_operand_lossage ("floating constant misused");
3749 break;
3751 case CONST_FIXED:
3752 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3753 break;
3755 case PLUS:
3756 /* Some assemblers need integer constants to appear last (eg masm). */
3757 if (CONST_INT_P (XEXP (x, 0)))
3759 output_addr_const (file, XEXP (x, 1));
3760 if (INTVAL (XEXP (x, 0)) >= 0)
3761 fprintf (file, "+");
3762 output_addr_const (file, XEXP (x, 0));
3764 else
3766 output_addr_const (file, XEXP (x, 0));
3767 if (!CONST_INT_P (XEXP (x, 1))
3768 || INTVAL (XEXP (x, 1)) >= 0)
3769 fprintf (file, "+");
3770 output_addr_const (file, XEXP (x, 1));
3772 break;
3774 case MINUS:
3775 /* Avoid outputting things like x-x or x+5-x,
3776 since some assemblers can't handle that. */
3777 x = simplify_subtraction (x);
3778 if (GET_CODE (x) != MINUS)
3779 goto restart;
3781 output_addr_const (file, XEXP (x, 0));
3782 fprintf (file, "-");
3783 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3784 || GET_CODE (XEXP (x, 1)) == PC
3785 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3786 output_addr_const (file, XEXP (x, 1));
3787 else
3789 fputs (targetm.asm_out.open_paren, file);
3790 output_addr_const (file, XEXP (x, 1));
3791 fputs (targetm.asm_out.close_paren, file);
3793 break;
3795 case ZERO_EXTEND:
3796 case SIGN_EXTEND:
3797 case SUBREG:
3798 case TRUNCATE:
3799 output_addr_const (file, XEXP (x, 0));
3800 break;
3802 default:
3803 if (targetm.asm_out.output_addr_const_extra (file, x))
3804 break;
3806 output_operand_lossage ("invalid expression as operand");
3810 /* Output a quoted string. */
3812 void
3813 output_quoted_string (FILE *asm_file, const char *string)
3815 #ifdef OUTPUT_QUOTED_STRING
3816 OUTPUT_QUOTED_STRING (asm_file, string);
3817 #else
3818 char c;
3820 putc ('\"', asm_file);
3821 while ((c = *string++) != 0)
3823 if (ISPRINT (c))
3825 if (c == '\"' || c == '\\')
3826 putc ('\\', asm_file);
3827 putc (c, asm_file);
3829 else
3830 fprintf (asm_file, "\\%03o", (unsigned char) c);
3832 putc ('\"', asm_file);
3833 #endif
3836 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3838 void
3839 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3841 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3842 if (value == 0)
3843 putc ('0', f);
3844 else
3846 char *p = buf + sizeof (buf);
3848 *--p = "0123456789abcdef"[value % 16];
3849 while ((value /= 16) != 0);
3850 *--p = 'x';
3851 *--p = '0';
3852 fwrite (p, 1, buf + sizeof (buf) - p, f);
3856 /* Internal function that prints an unsigned long in decimal in reverse.
3857 The output string IS NOT null-terminated. */
3859 static int
3860 sprint_ul_rev (char *s, unsigned long value)
3862 int i = 0;
3865 s[i] = "0123456789"[value % 10];
3866 value /= 10;
3867 i++;
3868 /* alternate version, without modulo */
3869 /* oldval = value; */
3870 /* value /= 10; */
3871 /* s[i] = "0123456789" [oldval - 10*value]; */
3872 /* i++ */
3874 while (value != 0);
3875 return i;
3878 /* Write an unsigned long as decimal to a file, fast. */
3880 void
3881 fprint_ul (FILE *f, unsigned long value)
3883 /* python says: len(str(2**64)) == 20 */
3884 char s[20];
3885 int i;
3887 i = sprint_ul_rev (s, value);
3889 /* It's probably too small to bother with string reversal and fputs. */
3892 i--;
3893 putc (s[i], f);
3895 while (i != 0);
3898 /* Write an unsigned long as decimal to a string, fast.
3899 s must be wide enough to not overflow, at least 21 chars.
3900 Returns the length of the string (without terminating '\0'). */
3903 sprint_ul (char *s, unsigned long value)
3905 int len;
3906 char tmp_c;
3907 int i;
3908 int j;
3910 len = sprint_ul_rev (s, value);
3911 s[len] = '\0';
3913 /* Reverse the string. */
3914 i = 0;
3915 j = len - 1;
3916 while (i < j)
3918 tmp_c = s[i];
3919 s[i] = s[j];
3920 s[j] = tmp_c;
3921 i++; j--;
3924 return len;
3927 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3928 %R prints the value of REGISTER_PREFIX.
3929 %L prints the value of LOCAL_LABEL_PREFIX.
3930 %U prints the value of USER_LABEL_PREFIX.
3931 %I prints the value of IMMEDIATE_PREFIX.
3932 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3933 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3935 We handle alternate assembler dialects here, just like output_asm_insn. */
3937 void
3938 asm_fprintf (FILE *file, const char *p, ...)
3940 char buf[10];
3941 char *q, c;
3942 #ifdef ASSEMBLER_DIALECT
3943 int dialect = 0;
3944 #endif
3945 va_list argptr;
3947 va_start (argptr, p);
3949 buf[0] = '%';
3951 while ((c = *p++))
3952 switch (c)
3954 #ifdef ASSEMBLER_DIALECT
3955 case '{':
3956 case '}':
3957 case '|':
3958 p = do_assembler_dialects (p, &dialect);
3959 break;
3960 #endif
3962 case '%':
3963 c = *p++;
3964 q = &buf[1];
3965 while (strchr ("-+ #0", c))
3967 *q++ = c;
3968 c = *p++;
3970 while (ISDIGIT (c) || c == '.')
3972 *q++ = c;
3973 c = *p++;
3975 switch (c)
3977 case '%':
3978 putc ('%', file);
3979 break;
3981 case 'd': case 'i': case 'u':
3982 case 'x': case 'X': case 'o':
3983 case 'c':
3984 *q++ = c;
3985 *q = 0;
3986 fprintf (file, buf, va_arg (argptr, int));
3987 break;
3989 case 'w':
3990 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3991 'o' cases, but we do not check for those cases. It
3992 means that the value is a HOST_WIDE_INT, which may be
3993 either `long' or `long long'. */
3994 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3995 q += strlen (HOST_WIDE_INT_PRINT);
3996 *q++ = *p++;
3997 *q = 0;
3998 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3999 break;
4001 case 'l':
4002 *q++ = c;
4003 #ifdef HAVE_LONG_LONG
4004 if (*p == 'l')
4006 *q++ = *p++;
4007 *q++ = *p++;
4008 *q = 0;
4009 fprintf (file, buf, va_arg (argptr, long long));
4011 else
4012 #endif
4014 *q++ = *p++;
4015 *q = 0;
4016 fprintf (file, buf, va_arg (argptr, long));
4019 break;
4021 case 's':
4022 *q++ = c;
4023 *q = 0;
4024 fprintf (file, buf, va_arg (argptr, char *));
4025 break;
4027 case 'O':
4028 #ifdef ASM_OUTPUT_OPCODE
4029 ASM_OUTPUT_OPCODE (asm_out_file, p);
4030 #endif
4031 break;
4033 case 'R':
4034 #ifdef REGISTER_PREFIX
4035 fprintf (file, "%s", REGISTER_PREFIX);
4036 #endif
4037 break;
4039 case 'I':
4040 #ifdef IMMEDIATE_PREFIX
4041 fprintf (file, "%s", IMMEDIATE_PREFIX);
4042 #endif
4043 break;
4045 case 'L':
4046 #ifdef LOCAL_LABEL_PREFIX
4047 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4048 #endif
4049 break;
4051 case 'U':
4052 fputs (user_label_prefix, file);
4053 break;
4055 #ifdef ASM_FPRINTF_EXTENSIONS
4056 /* Uppercase letters are reserved for general use by asm_fprintf
4057 and so are not available to target specific code. In order to
4058 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4059 they are defined here. As they get turned into real extensions
4060 to asm_fprintf they should be removed from this list. */
4061 case 'A': case 'B': case 'C': case 'D': case 'E':
4062 case 'F': case 'G': case 'H': case 'J': case 'K':
4063 case 'M': case 'N': case 'P': case 'Q': case 'S':
4064 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4065 break;
4067 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4068 #endif
4069 default:
4070 gcc_unreachable ();
4072 break;
4074 default:
4075 putc (c, file);
4077 va_end (argptr);
4080 /* Return nonzero if this function has no function calls. */
4083 leaf_function_p (void)
4085 rtx insn;
4087 if (crtl->profile || profile_arc_flag)
4088 return 0;
4090 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4092 if (CALL_P (insn)
4093 && ! SIBLING_CALL_P (insn))
4094 return 0;
4095 if (NONJUMP_INSN_P (insn)
4096 && GET_CODE (PATTERN (insn)) == SEQUENCE
4097 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4098 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4099 return 0;
4102 return 1;
4105 /* Return 1 if branch is a forward branch.
4106 Uses insn_shuid array, so it works only in the final pass. May be used by
4107 output templates to customary add branch prediction hints.
4110 final_forward_branch_p (rtx insn)
4112 int insn_id, label_id;
4114 gcc_assert (uid_shuid);
4115 insn_id = INSN_SHUID (insn);
4116 label_id = INSN_SHUID (JUMP_LABEL (insn));
4117 /* We've hit some insns that does not have id information available. */
4118 gcc_assert (insn_id && label_id);
4119 return insn_id < label_id;
4122 /* On some machines, a function with no call insns
4123 can run faster if it doesn't create its own register window.
4124 When output, the leaf function should use only the "output"
4125 registers. Ordinarily, the function would be compiled to use
4126 the "input" registers to find its arguments; it is a candidate
4127 for leaf treatment if it uses only the "input" registers.
4128 Leaf function treatment means renumbering so the function
4129 uses the "output" registers instead. */
4131 #ifdef LEAF_REGISTERS
4133 /* Return 1 if this function uses only the registers that can be
4134 safely renumbered. */
4137 only_leaf_regs_used (void)
4139 int i;
4140 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4142 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4143 if ((df_regs_ever_live_p (i) || global_regs[i])
4144 && ! permitted_reg_in_leaf_functions[i])
4145 return 0;
4147 if (crtl->uses_pic_offset_table
4148 && pic_offset_table_rtx != 0
4149 && REG_P (pic_offset_table_rtx)
4150 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4151 return 0;
4153 return 1;
4156 /* Scan all instructions and renumber all registers into those
4157 available in leaf functions. */
4159 static void
4160 leaf_renumber_regs (rtx first)
4162 rtx insn;
4164 /* Renumber only the actual patterns.
4165 The reg-notes can contain frame pointer refs,
4166 and renumbering them could crash, and should not be needed. */
4167 for (insn = first; insn; insn = NEXT_INSN (insn))
4168 if (INSN_P (insn))
4169 leaf_renumber_regs_insn (PATTERN (insn));
4172 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4173 available in leaf functions. */
4175 void
4176 leaf_renumber_regs_insn (rtx in_rtx)
4178 int i, j;
4179 const char *format_ptr;
4181 if (in_rtx == 0)
4182 return;
4184 /* Renumber all input-registers into output-registers.
4185 renumbered_regs would be 1 for an output-register;
4186 they */
4188 if (REG_P (in_rtx))
4190 int newreg;
4192 /* Don't renumber the same reg twice. */
4193 if (in_rtx->used)
4194 return;
4196 newreg = REGNO (in_rtx);
4197 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4198 to reach here as part of a REG_NOTE. */
4199 if (newreg >= FIRST_PSEUDO_REGISTER)
4201 in_rtx->used = 1;
4202 return;
4204 newreg = LEAF_REG_REMAP (newreg);
4205 gcc_assert (newreg >= 0);
4206 df_set_regs_ever_live (REGNO (in_rtx), false);
4207 df_set_regs_ever_live (newreg, true);
4208 SET_REGNO (in_rtx, newreg);
4209 in_rtx->used = 1;
4212 if (INSN_P (in_rtx))
4214 /* Inside a SEQUENCE, we find insns.
4215 Renumber just the patterns of these insns,
4216 just as we do for the top-level insns. */
4217 leaf_renumber_regs_insn (PATTERN (in_rtx));
4218 return;
4221 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4223 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4224 switch (*format_ptr++)
4226 case 'e':
4227 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4228 break;
4230 case 'E':
4231 if (NULL != XVEC (in_rtx, i))
4233 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4234 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4236 break;
4238 case 'S':
4239 case 's':
4240 case '0':
4241 case 'i':
4242 case 'w':
4243 case 'n':
4244 case 'u':
4245 break;
4247 default:
4248 gcc_unreachable ();
4251 #endif
4253 /* Turn the RTL into assembly. */
4254 static unsigned int
4255 rest_of_handle_final (void)
4257 rtx x;
4258 const char *fnname;
4260 /* Get the function's name, as described by its RTL. This may be
4261 different from the DECL_NAME name used in the source file. */
4263 x = DECL_RTL (current_function_decl);
4264 gcc_assert (MEM_P (x));
4265 x = XEXP (x, 0);
4266 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4267 fnname = XSTR (x, 0);
4269 assemble_start_function (current_function_decl, fnname);
4270 final_start_function (get_insns (), asm_out_file, optimize);
4271 final (get_insns (), asm_out_file, optimize);
4272 final_end_function ();
4274 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4275 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4276 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4277 output_function_exception_table (fnname);
4279 assemble_end_function (current_function_decl, fnname);
4281 user_defined_section_attribute = false;
4283 /* Free up reg info memory. */
4284 free_reg_info ();
4286 if (! quiet_flag)
4287 fflush (asm_out_file);
4289 /* Write DBX symbols if requested. */
4291 /* Note that for those inline functions where we don't initially
4292 know for certain that we will be generating an out-of-line copy,
4293 the first invocation of this routine (rest_of_compilation) will
4294 skip over this code by doing a `goto exit_rest_of_compilation;'.
4295 Later on, wrapup_global_declarations will (indirectly) call
4296 rest_of_compilation again for those inline functions that need
4297 to have out-of-line copies generated. During that call, we
4298 *will* be routed past here. */
4300 timevar_push (TV_SYMOUT);
4301 if (!DECL_IGNORED_P (current_function_decl))
4302 debug_hooks->function_decl (current_function_decl);
4303 timevar_pop (TV_SYMOUT);
4305 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4306 DECL_INITIAL (current_function_decl) = error_mark_node;
4308 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4309 && targetm.have_ctors_dtors)
4310 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4311 decl_init_priority_lookup
4312 (current_function_decl));
4313 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4314 && targetm.have_ctors_dtors)
4315 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4316 decl_fini_priority_lookup
4317 (current_function_decl));
4318 return 0;
4321 struct rtl_opt_pass pass_final =
4324 RTL_PASS,
4325 "final", /* name */
4326 OPTGROUP_NONE, /* optinfo_flags */
4327 NULL, /* gate */
4328 rest_of_handle_final, /* execute */
4329 NULL, /* sub */
4330 NULL, /* next */
4331 0, /* static_pass_number */
4332 TV_FINAL, /* tv_id */
4333 0, /* properties_required */
4334 0, /* properties_provided */
4335 0, /* properties_destroyed */
4336 0, /* todo_flags_start */
4337 TODO_ggc_collect /* todo_flags_finish */
4342 static unsigned int
4343 rest_of_handle_shorten_branches (void)
4345 /* Shorten branches. */
4346 shorten_branches (get_insns ());
4347 return 0;
4350 struct rtl_opt_pass pass_shorten_branches =
4353 RTL_PASS,
4354 "shorten", /* name */
4355 OPTGROUP_NONE, /* optinfo_flags */
4356 NULL, /* gate */
4357 rest_of_handle_shorten_branches, /* execute */
4358 NULL, /* sub */
4359 NULL, /* next */
4360 0, /* static_pass_number */
4361 TV_SHORTEN_BRANCH, /* tv_id */
4362 0, /* properties_required */
4363 0, /* properties_provided */
4364 0, /* properties_destroyed */
4365 0, /* todo_flags_start */
4366 0 /* todo_flags_finish */
4371 static unsigned int
4372 rest_of_clean_state (void)
4374 rtx insn, next;
4375 FILE *final_output = NULL;
4376 int save_unnumbered = flag_dump_unnumbered;
4377 int save_noaddr = flag_dump_noaddr;
4379 if (flag_dump_final_insns)
4381 final_output = fopen (flag_dump_final_insns, "a");
4382 if (!final_output)
4384 error ("could not open final insn dump file %qs: %m",
4385 flag_dump_final_insns);
4386 flag_dump_final_insns = NULL;
4388 else
4390 flag_dump_noaddr = flag_dump_unnumbered = 1;
4391 if (flag_compare_debug_opt || flag_compare_debug)
4392 dump_flags |= TDF_NOUID;
4393 dump_function_header (final_output, current_function_decl,
4394 dump_flags);
4395 final_insns_dump_p = true;
4397 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4398 if (LABEL_P (insn))
4399 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4400 else
4402 if (NOTE_P (insn))
4403 set_block_for_insn (insn, NULL);
4404 INSN_UID (insn) = 0;
4409 /* It is very important to decompose the RTL instruction chain here:
4410 debug information keeps pointing into CODE_LABEL insns inside the function
4411 body. If these remain pointing to the other insns, we end up preserving
4412 whole RTL chain and attached detailed debug info in memory. */
4413 for (insn = get_insns (); insn; insn = next)
4415 next = NEXT_INSN (insn);
4416 NEXT_INSN (insn) = NULL;
4417 PREV_INSN (insn) = NULL;
4419 if (final_output
4420 && (!NOTE_P (insn) ||
4421 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4422 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4423 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4424 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4425 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4426 print_rtl_single (final_output, insn);
4429 if (final_output)
4431 flag_dump_noaddr = save_noaddr;
4432 flag_dump_unnumbered = save_unnumbered;
4433 final_insns_dump_p = false;
4435 if (fclose (final_output))
4437 error ("could not close final insn dump file %qs: %m",
4438 flag_dump_final_insns);
4439 flag_dump_final_insns = NULL;
4443 /* In case the function was not output,
4444 don't leave any temporary anonymous types
4445 queued up for sdb output. */
4446 #ifdef SDB_DEBUGGING_INFO
4447 if (write_symbols == SDB_DEBUG)
4448 sdbout_types (NULL_TREE);
4449 #endif
4451 flag_rerun_cse_after_global_opts = 0;
4452 reload_completed = 0;
4453 epilogue_completed = 0;
4454 #ifdef STACK_REGS
4455 regstack_completed = 0;
4456 #endif
4458 /* Clear out the insn_length contents now that they are no
4459 longer valid. */
4460 init_insn_lengths ();
4462 /* Show no temporary slots allocated. */
4463 init_temp_slots ();
4465 free_bb_for_insn ();
4467 delete_tree_ssa ();
4469 /* We can reduce stack alignment on call site only when we are sure that
4470 the function body just produced will be actually used in the final
4471 executable. */
4472 if (decl_binds_to_current_def_p (current_function_decl))
4474 unsigned int pref = crtl->preferred_stack_boundary;
4475 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4476 pref = crtl->stack_alignment_needed;
4477 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4478 = pref;
4481 /* Make sure volatile mem refs aren't considered valid operands for
4482 arithmetic insns. We must call this here if this is a nested inline
4483 function, since the above code leaves us in the init_recog state,
4484 and the function context push/pop code does not save/restore volatile_ok.
4486 ??? Maybe it isn't necessary for expand_start_function to call this
4487 anymore if we do it here? */
4489 init_recog_no_volatile ();
4491 /* We're done with this function. Free up memory if we can. */
4492 free_after_parsing (cfun);
4493 free_after_compilation (cfun);
4494 return 0;
4497 struct rtl_opt_pass pass_clean_state =
4500 RTL_PASS,
4501 "*clean_state", /* name */
4502 OPTGROUP_NONE, /* optinfo_flags */
4503 NULL, /* gate */
4504 rest_of_clean_state, /* execute */
4505 NULL, /* sub */
4506 NULL, /* next */
4507 0, /* static_pass_number */
4508 TV_FINAL, /* tv_id */
4509 0, /* properties_required */
4510 0, /* properties_provided */
4511 PROP_rtl, /* properties_destroyed */
4512 0, /* todo_flags_start */
4513 0 /* todo_flags_finish */