mips.h (set_volatile): Delete.
[official-gcc.git] / gcc / final.c
blob654f847c5e30bafe013e4dfa1b8a83ddaef75ffb
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74 #include "tree-pass.h"
75 #include "timevar.h"
76 #include "cgraph.h"
77 #include "coverage.h"
78 #include "df.h"
79 #include "vecprim.h"
80 #include "ggc.h"
81 #include "cfgloop.h"
82 #include "params.h"
84 #ifdef XCOFF_DEBUGGING_INFO
85 #include "xcoffout.h" /* Needed for external data
86 declarations for e.g. AIX 4.x. */
87 #endif
89 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
90 #include "dwarf2out.h"
91 #endif
93 #ifdef DBX_DEBUGGING_INFO
94 #include "dbxout.h"
95 #endif
97 #ifdef SDB_DEBUGGING_INFO
98 #include "sdbout.h"
99 #endif
101 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
102 null default for it to save conditionalization later. */
103 #ifndef CC_STATUS_INIT
104 #define CC_STATUS_INIT
105 #endif
107 /* How to start an assembler comment. */
108 #ifndef ASM_COMMENT_START
109 #define ASM_COMMENT_START ";#"
110 #endif
112 /* Is the given character a logical line separator for the assembler? */
113 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
114 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
115 #endif
117 #ifndef JUMP_TABLES_IN_TEXT_SECTION
118 #define JUMP_TABLES_IN_TEXT_SECTION 0
119 #endif
121 /* Bitflags used by final_scan_insn. */
122 #define SEEN_BB 1
123 #define SEEN_NOTE 2
124 #define SEEN_EMITTED 4
126 /* Last insn processed by final_scan_insn. */
127 static rtx debug_insn;
128 rtx current_output_insn;
130 /* Line number of last NOTE. */
131 static int last_linenum;
133 /* Highest line number in current block. */
134 static int high_block_linenum;
136 /* Likewise for function. */
137 static int high_function_linenum;
139 /* Filename of last NOTE. */
140 static const char *last_filename;
142 /* Override filename and line number. */
143 static const char *override_filename;
144 static int override_linenum;
146 /* Whether to force emission of a line note before the next insn. */
147 static bool force_source_line = false;
149 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
151 /* Nonzero while outputting an `asm' with operands.
152 This means that inconsistencies are the user's fault, so don't die.
153 The precise value is the insn being output, to pass to error_for_asm. */
154 rtx this_is_asm_operands;
156 /* Number of operands of this insn, for an `asm' with operands. */
157 static unsigned int insn_noperands;
159 /* Compare optimization flag. */
161 static rtx last_ignored_compare = 0;
163 /* Assign a unique number to each insn that is output.
164 This can be used to generate unique local labels. */
166 static int insn_counter = 0;
168 #ifdef HAVE_cc0
169 /* This variable contains machine-dependent flags (defined in tm.h)
170 set and examined by output routines
171 that describe how to interpret the condition codes properly. */
173 CC_STATUS cc_status;
175 /* During output of an insn, this contains a copy of cc_status
176 from before the insn. */
178 CC_STATUS cc_prev_status;
179 #endif
181 /* Nonzero means current function must be given a frame pointer.
182 Initialized in function.c to 0. Set only in reload1.c as per
183 the needs of the function. */
185 int frame_pointer_needed;
187 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
189 static int block_depth;
191 /* Nonzero if have enabled APP processing of our assembler output. */
193 static int app_on;
195 /* If we are outputting an insn sequence, this contains the sequence rtx.
196 Zero otherwise. */
198 rtx final_sequence;
200 #ifdef ASSEMBLER_DIALECT
202 /* Number of the assembler dialect to use, starting at 0. */
203 static int dialect_number;
204 #endif
206 #ifdef HAVE_conditional_execution
207 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
208 rtx current_insn_predicate;
209 #endif
211 #ifdef HAVE_ATTR_length
212 static int asm_insn_count (rtx);
213 #endif
214 static void profile_function (FILE *);
215 static void profile_after_prologue (FILE *);
216 static bool notice_source_line (rtx);
217 static rtx walk_alter_subreg (rtx *, bool *);
218 static void output_asm_name (void);
219 static void output_alternate_entry_point (FILE *, rtx);
220 static tree get_mem_expr_from_op (rtx, int *);
221 static void output_asm_operand_names (rtx *, int *, int);
222 static void output_operand (rtx, int);
223 #ifdef LEAF_REGISTERS
224 static void leaf_renumber_regs (rtx);
225 #endif
226 #ifdef HAVE_cc0
227 static int alter_cond (rtx);
228 #endif
229 #ifndef ADDR_VEC_ALIGN
230 static int final_addr_vec_align (rtx);
231 #endif
232 #ifdef HAVE_ATTR_length
233 static int align_fuzz (rtx, rtx, int, unsigned);
234 #endif
236 /* Initialize data in final at the beginning of a compilation. */
238 void
239 init_final (const char *filename ATTRIBUTE_UNUSED)
241 app_on = 0;
242 final_sequence = 0;
244 #ifdef ASSEMBLER_DIALECT
245 dialect_number = ASSEMBLER_DIALECT;
246 #endif
249 /* Default target function prologue and epilogue assembler output.
251 If not overridden for epilogue code, then the function body itself
252 contains return instructions wherever needed. */
253 void
254 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
255 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
259 /* Default target hook that outputs nothing to a stream. */
260 void
261 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
265 /* Enable APP processing of subsequent output.
266 Used before the output from an `asm' statement. */
268 void
269 app_enable (void)
271 if (! app_on)
273 fputs (ASM_APP_ON, asm_out_file);
274 app_on = 1;
278 /* Disable APP processing of subsequent output.
279 Called from varasm.c before most kinds of output. */
281 void
282 app_disable (void)
284 if (app_on)
286 fputs (ASM_APP_OFF, asm_out_file);
287 app_on = 0;
291 /* Return the number of slots filled in the current
292 delayed branch sequence (we don't count the insn needing the
293 delay slot). Zero if not in a delayed branch sequence. */
295 #ifdef DELAY_SLOTS
297 dbr_sequence_length (void)
299 if (final_sequence != 0)
300 return XVECLEN (final_sequence, 0) - 1;
301 else
302 return 0;
304 #endif
306 /* The next two pages contain routines used to compute the length of an insn
307 and to shorten branches. */
309 /* Arrays for insn lengths, and addresses. The latter is referenced by
310 `insn_current_length'. */
312 static int *insn_lengths;
314 VEC(int,heap) *insn_addresses_;
316 /* Max uid for which the above arrays are valid. */
317 static int insn_lengths_max_uid;
319 /* Address of insn being processed. Used by `insn_current_length'. */
320 int insn_current_address;
322 /* Address of insn being processed in previous iteration. */
323 int insn_last_address;
325 /* known invariant alignment of insn being processed. */
326 int insn_current_align;
328 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
329 gives the next following alignment insn that increases the known
330 alignment, or NULL_RTX if there is no such insn.
331 For any alignment obtained this way, we can again index uid_align with
332 its uid to obtain the next following align that in turn increases the
333 alignment, till we reach NULL_RTX; the sequence obtained this way
334 for each insn we'll call the alignment chain of this insn in the following
335 comments. */
337 struct label_alignment
339 short alignment;
340 short max_skip;
343 static rtx *uid_align;
344 static int *uid_shuid;
345 static struct label_alignment *label_align;
347 /* Indicate that branch shortening hasn't yet been done. */
349 void
350 init_insn_lengths (void)
352 if (uid_shuid)
354 free (uid_shuid);
355 uid_shuid = 0;
357 if (insn_lengths)
359 free (insn_lengths);
360 insn_lengths = 0;
361 insn_lengths_max_uid = 0;
363 #ifdef HAVE_ATTR_length
364 INSN_ADDRESSES_FREE ();
365 #endif
366 if (uid_align)
368 free (uid_align);
369 uid_align = 0;
373 /* Obtain the current length of an insn. If branch shortening has been done,
374 get its actual length. Otherwise, use FALLBACK_FN to calculate the
375 length. */
376 static inline int
377 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
378 int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
380 #ifdef HAVE_ATTR_length
381 rtx body;
382 int i;
383 int length = 0;
385 if (insn_lengths_max_uid > INSN_UID (insn))
386 return insn_lengths[INSN_UID (insn)];
387 else
388 switch (GET_CODE (insn))
390 case NOTE:
391 case BARRIER:
392 case CODE_LABEL:
393 return 0;
395 case CALL_INSN:
396 length = fallback_fn (insn);
397 break;
399 case JUMP_INSN:
400 body = PATTERN (insn);
401 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
403 /* Alignment is machine-dependent and should be handled by
404 ADDR_VEC_ALIGN. */
406 else
407 length = fallback_fn (insn);
408 break;
410 case INSN:
411 body = PATTERN (insn);
412 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
413 return 0;
415 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
416 length = asm_insn_count (body) * fallback_fn (insn);
417 else if (GET_CODE (body) == SEQUENCE)
418 for (i = 0; i < XVECLEN (body, 0); i++)
419 length += get_attr_length (XVECEXP (body, 0, i));
420 else
421 length = fallback_fn (insn);
422 break;
424 default:
425 break;
428 #ifdef ADJUST_INSN_LENGTH
429 ADJUST_INSN_LENGTH (insn, length);
430 #endif
431 return length;
432 #else /* not HAVE_ATTR_length */
433 return 0;
434 #define insn_default_length 0
435 #define insn_min_length 0
436 #endif /* not HAVE_ATTR_length */
439 /* Obtain the current length of an insn. If branch shortening has been done,
440 get its actual length. Otherwise, get its maximum length. */
442 get_attr_length (rtx insn)
444 return get_attr_length_1 (insn, insn_default_length);
447 /* Obtain the current length of an insn. If branch shortening has been done,
448 get its actual length. Otherwise, get its minimum length. */
450 get_attr_min_length (rtx insn)
452 return get_attr_length_1 (insn, insn_min_length);
455 /* Code to handle alignment inside shorten_branches. */
457 /* Here is an explanation how the algorithm in align_fuzz can give
458 proper results:
460 Call a sequence of instructions beginning with alignment point X
461 and continuing until the next alignment point `block X'. When `X'
462 is used in an expression, it means the alignment value of the
463 alignment point.
465 Call the distance between the start of the first insn of block X, and
466 the end of the last insn of block X `IX', for the `inner size of X'.
467 This is clearly the sum of the instruction lengths.
469 Likewise with the next alignment-delimited block following X, which we
470 shall call block Y.
472 Call the distance between the start of the first insn of block X, and
473 the start of the first insn of block Y `OX', for the `outer size of X'.
475 The estimated padding is then OX - IX.
477 OX can be safely estimated as
479 if (X >= Y)
480 OX = round_up(IX, Y)
481 else
482 OX = round_up(IX, X) + Y - X
484 Clearly est(IX) >= real(IX), because that only depends on the
485 instruction lengths, and those being overestimated is a given.
487 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
488 we needn't worry about that when thinking about OX.
490 When X >= Y, the alignment provided by Y adds no uncertainty factor
491 for branch ranges starting before X, so we can just round what we have.
492 But when X < Y, we don't know anything about the, so to speak,
493 `middle bits', so we have to assume the worst when aligning up from an
494 address mod X to one mod Y, which is Y - X. */
496 #ifndef LABEL_ALIGN
497 #define LABEL_ALIGN(LABEL) align_labels_log
498 #endif
500 #ifndef LABEL_ALIGN_MAX_SKIP
501 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
502 #endif
504 #ifndef LOOP_ALIGN
505 #define LOOP_ALIGN(LABEL) align_loops_log
506 #endif
508 #ifndef LOOP_ALIGN_MAX_SKIP
509 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
510 #endif
512 #ifndef LABEL_ALIGN_AFTER_BARRIER
513 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
514 #endif
516 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
517 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
518 #endif
520 #ifndef JUMP_ALIGN
521 #define JUMP_ALIGN(LABEL) align_jumps_log
522 #endif
524 #ifndef JUMP_ALIGN_MAX_SKIP
525 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
526 #endif
528 #ifndef ADDR_VEC_ALIGN
529 static int
530 final_addr_vec_align (rtx addr_vec)
532 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
534 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
535 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
536 return exact_log2 (align);
540 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
541 #endif
543 #ifndef INSN_LENGTH_ALIGNMENT
544 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
545 #endif
547 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
549 static int min_labelno, max_labelno;
551 #define LABEL_TO_ALIGNMENT(LABEL) \
552 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
554 #define LABEL_TO_MAX_SKIP(LABEL) \
555 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
557 /* For the benefit of port specific code do this also as a function. */
560 label_to_alignment (rtx label)
562 return LABEL_TO_ALIGNMENT (label);
565 #ifdef HAVE_ATTR_length
566 /* The differences in addresses
567 between a branch and its target might grow or shrink depending on
568 the alignment the start insn of the range (the branch for a forward
569 branch or the label for a backward branch) starts out on; if these
570 differences are used naively, they can even oscillate infinitely.
571 We therefore want to compute a 'worst case' address difference that
572 is independent of the alignment the start insn of the range end
573 up on, and that is at least as large as the actual difference.
574 The function align_fuzz calculates the amount we have to add to the
575 naively computed difference, by traversing the part of the alignment
576 chain of the start insn of the range that is in front of the end insn
577 of the range, and considering for each alignment the maximum amount
578 that it might contribute to a size increase.
580 For casesi tables, we also want to know worst case minimum amounts of
581 address difference, in case a machine description wants to introduce
582 some common offset that is added to all offsets in a table.
583 For this purpose, align_fuzz with a growth argument of 0 computes the
584 appropriate adjustment. */
586 /* Compute the maximum delta by which the difference of the addresses of
587 START and END might grow / shrink due to a different address for start
588 which changes the size of alignment insns between START and END.
589 KNOWN_ALIGN_LOG is the alignment known for START.
590 GROWTH should be ~0 if the objective is to compute potential code size
591 increase, and 0 if the objective is to compute potential shrink.
592 The return value is undefined for any other value of GROWTH. */
594 static int
595 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
597 int uid = INSN_UID (start);
598 rtx align_label;
599 int known_align = 1 << known_align_log;
600 int end_shuid = INSN_SHUID (end);
601 int fuzz = 0;
603 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
605 int align_addr, new_align;
607 uid = INSN_UID (align_label);
608 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
609 if (uid_shuid[uid] > end_shuid)
610 break;
611 known_align_log = LABEL_TO_ALIGNMENT (align_label);
612 new_align = 1 << known_align_log;
613 if (new_align < known_align)
614 continue;
615 fuzz += (-align_addr ^ growth) & (new_align - known_align);
616 known_align = new_align;
618 return fuzz;
621 /* Compute a worst-case reference address of a branch so that it
622 can be safely used in the presence of aligned labels. Since the
623 size of the branch itself is unknown, the size of the branch is
624 not included in the range. I.e. for a forward branch, the reference
625 address is the end address of the branch as known from the previous
626 branch shortening pass, minus a value to account for possible size
627 increase due to alignment. For a backward branch, it is the start
628 address of the branch as known from the current pass, plus a value
629 to account for possible size increase due to alignment.
630 NB.: Therefore, the maximum offset allowed for backward branches needs
631 to exclude the branch size. */
634 insn_current_reference_address (rtx branch)
636 rtx dest, seq;
637 int seq_uid;
639 if (! INSN_ADDRESSES_SET_P ())
640 return 0;
642 seq = NEXT_INSN (PREV_INSN (branch));
643 seq_uid = INSN_UID (seq);
644 if (!JUMP_P (branch))
645 /* This can happen for example on the PA; the objective is to know the
646 offset to address something in front of the start of the function.
647 Thus, we can treat it like a backward branch.
648 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
649 any alignment we'd encounter, so we skip the call to align_fuzz. */
650 return insn_current_address;
651 dest = JUMP_LABEL (branch);
653 /* BRANCH has no proper alignment chain set, so use SEQ.
654 BRANCH also has no INSN_SHUID. */
655 if (INSN_SHUID (seq) < INSN_SHUID (dest))
657 /* Forward branch. */
658 return (insn_last_address + insn_lengths[seq_uid]
659 - align_fuzz (seq, dest, length_unit_log, ~0));
661 else
663 /* Backward branch. */
664 return (insn_current_address
665 + align_fuzz (dest, seq, length_unit_log, ~0));
668 #endif /* HAVE_ATTR_length */
670 /* Compute branch alignments based on frequency information in the
671 CFG. */
673 static unsigned int
674 compute_alignments (void)
676 int log, max_skip, max_log;
677 basic_block bb;
678 int freq_max = 0;
679 int freq_threshold = 0;
681 if (label_align)
683 free (label_align);
684 label_align = 0;
687 max_labelno = max_label_num ();
688 min_labelno = get_first_label_num ();
689 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
691 /* If not optimizing or optimizing for size, don't assign any alignments. */
692 if (! optimize || optimize_size)
693 return 0;
695 if (dump_file)
697 dump_flow_info (dump_file, TDF_DETAILS);
698 flow_loops_dump (dump_file, NULL, 1);
699 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
701 FOR_EACH_BB (bb)
702 if (bb->frequency > freq_max)
703 freq_max = bb->frequency;
704 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
706 if (dump_file)
707 fprintf(dump_file, "freq_max: %i\n",freq_max);
708 FOR_EACH_BB (bb)
710 rtx label = BB_HEAD (bb);
711 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
712 edge e;
713 edge_iterator ei;
715 if (!LABEL_P (label)
716 || probably_never_executed_bb_p (bb))
718 if (dump_file)
719 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
720 bb->index, bb->frequency, bb->loop_father->num, bb->loop_depth);
721 continue;
723 max_log = LABEL_ALIGN (label);
724 max_skip = LABEL_ALIGN_MAX_SKIP;
726 FOR_EACH_EDGE (e, ei, bb->preds)
728 if (e->flags & EDGE_FALLTHRU)
729 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
730 else
731 branch_frequency += EDGE_FREQUENCY (e);
733 if (dump_file)
735 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
736 bb->index, bb->frequency, bb->loop_father->num,
737 bb->loop_depth,
738 fallthru_frequency, branch_frequency);
739 if (!bb->loop_father->inner && bb->loop_father->num)
740 fprintf (dump_file, " inner_loop");
741 if (bb->loop_father->header == bb)
742 fprintf (dump_file, " loop_header");
743 fprintf (dump_file, "\n");
746 /* There are two purposes to align block with no fallthru incoming edge:
747 1) to avoid fetch stalls when branch destination is near cache boundary
748 2) to improve cache efficiency in case the previous block is not executed
749 (so it does not need to be in the cache).
751 We to catch first case, we align frequently executed blocks.
752 To catch the second, we align blocks that are executed more frequently
753 than the predecessor and the predecessor is likely to not be executed
754 when function is called. */
756 if (!has_fallthru
757 && (branch_frequency > freq_threshold
758 || (bb->frequency > bb->prev_bb->frequency * 10
759 && (bb->prev_bb->frequency
760 <= ENTRY_BLOCK_PTR->frequency / 2))))
762 log = JUMP_ALIGN (label);
763 if (dump_file)
764 fprintf(dump_file, " jump alignment added.\n");
765 if (max_log < log)
767 max_log = log;
768 max_skip = JUMP_ALIGN_MAX_SKIP;
771 /* In case block is frequent and reached mostly by non-fallthru edge,
772 align it. It is most likely a first block of loop. */
773 if (has_fallthru
774 && maybe_hot_bb_p (bb)
775 && branch_frequency + fallthru_frequency > freq_threshold
776 && (branch_frequency
777 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
779 log = LOOP_ALIGN (label);
780 if (dump_file)
781 fprintf(dump_file, " internal loop alignment added.\n");
782 if (max_log < log)
784 max_log = log;
785 max_skip = LOOP_ALIGN_MAX_SKIP;
788 LABEL_TO_ALIGNMENT (label) = max_log;
789 LABEL_TO_MAX_SKIP (label) = max_skip;
792 if (dump_file)
793 loop_optimizer_finalize ();
794 return 0;
797 struct tree_opt_pass pass_compute_alignments =
799 "alignments", /* name */
800 NULL, /* gate */
801 compute_alignments, /* execute */
802 NULL, /* sub */
803 NULL, /* next */
804 0, /* static_pass_number */
805 0, /* tv_id */
806 0, /* properties_required */
807 0, /* properties_provided */
808 0, /* properties_destroyed */
809 0, /* todo_flags_start */
810 TODO_dump_func | TODO_verify_rtl_sharing
811 | TODO_ggc_collect, /* todo_flags_finish */
812 0 /* letter */
816 /* Make a pass over all insns and compute their actual lengths by shortening
817 any branches of variable length if possible. */
819 /* shorten_branches might be called multiple times: for example, the SH
820 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
821 In order to do this, it needs proper length information, which it obtains
822 by calling shorten_branches. This cannot be collapsed with
823 shorten_branches itself into a single pass unless we also want to integrate
824 reorg.c, since the branch splitting exposes new instructions with delay
825 slots. */
827 void
828 shorten_branches (rtx first ATTRIBUTE_UNUSED)
830 rtx insn;
831 int max_uid;
832 int i;
833 int max_log;
834 int max_skip;
835 #ifdef HAVE_ATTR_length
836 #define MAX_CODE_ALIGN 16
837 rtx seq;
838 int something_changed = 1;
839 char *varying_length;
840 rtx body;
841 int uid;
842 rtx align_tab[MAX_CODE_ALIGN];
844 #endif
846 /* Compute maximum UID and allocate label_align / uid_shuid. */
847 max_uid = get_max_uid ();
849 /* Free uid_shuid before reallocating it. */
850 free (uid_shuid);
852 uid_shuid = XNEWVEC (int, max_uid);
854 if (max_labelno != max_label_num ())
856 int old = max_labelno;
857 int n_labels;
858 int n_old_labels;
860 max_labelno = max_label_num ();
862 n_labels = max_labelno - min_labelno + 1;
863 n_old_labels = old - min_labelno + 1;
865 label_align = xrealloc (label_align,
866 n_labels * sizeof (struct label_alignment));
868 /* Range of labels grows monotonically in the function. Failing here
869 means that the initialization of array got lost. */
870 gcc_assert (n_old_labels <= n_labels);
872 memset (label_align + n_old_labels, 0,
873 (n_labels - n_old_labels) * sizeof (struct label_alignment));
876 /* Initialize label_align and set up uid_shuid to be strictly
877 monotonically rising with insn order. */
878 /* We use max_log here to keep track of the maximum alignment we want to
879 impose on the next CODE_LABEL (or the current one if we are processing
880 the CODE_LABEL itself). */
882 max_log = 0;
883 max_skip = 0;
885 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
887 int log;
889 INSN_SHUID (insn) = i++;
890 if (INSN_P (insn))
891 continue;
893 if (LABEL_P (insn))
895 rtx next;
897 /* Merge in alignments computed by compute_alignments. */
898 log = LABEL_TO_ALIGNMENT (insn);
899 if (max_log < log)
901 max_log = log;
902 max_skip = LABEL_TO_MAX_SKIP (insn);
905 log = LABEL_ALIGN (insn);
906 if (max_log < log)
908 max_log = log;
909 max_skip = LABEL_ALIGN_MAX_SKIP;
911 next = next_nonnote_insn (insn);
912 /* ADDR_VECs only take room if read-only data goes into the text
913 section. */
914 if (JUMP_TABLES_IN_TEXT_SECTION
915 || readonly_data_section == text_section)
916 if (next && JUMP_P (next))
918 rtx nextbody = PATTERN (next);
919 if (GET_CODE (nextbody) == ADDR_VEC
920 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
922 log = ADDR_VEC_ALIGN (next);
923 if (max_log < log)
925 max_log = log;
926 max_skip = LABEL_ALIGN_MAX_SKIP;
930 LABEL_TO_ALIGNMENT (insn) = max_log;
931 LABEL_TO_MAX_SKIP (insn) = max_skip;
932 max_log = 0;
933 max_skip = 0;
935 else if (BARRIER_P (insn))
937 rtx label;
939 for (label = insn; label && ! INSN_P (label);
940 label = NEXT_INSN (label))
941 if (LABEL_P (label))
943 log = LABEL_ALIGN_AFTER_BARRIER (insn);
944 if (max_log < log)
946 max_log = log;
947 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
949 break;
953 #ifdef HAVE_ATTR_length
955 /* Allocate the rest of the arrays. */
956 insn_lengths = XNEWVEC (int, max_uid);
957 insn_lengths_max_uid = max_uid;
958 /* Syntax errors can lead to labels being outside of the main insn stream.
959 Initialize insn_addresses, so that we get reproducible results. */
960 INSN_ADDRESSES_ALLOC (max_uid);
962 varying_length = XCNEWVEC (char, max_uid);
964 /* Initialize uid_align. We scan instructions
965 from end to start, and keep in align_tab[n] the last seen insn
966 that does an alignment of at least n+1, i.e. the successor
967 in the alignment chain for an insn that does / has a known
968 alignment of n. */
969 uid_align = XCNEWVEC (rtx, max_uid);
971 for (i = MAX_CODE_ALIGN; --i >= 0;)
972 align_tab[i] = NULL_RTX;
973 seq = get_last_insn ();
974 for (; seq; seq = PREV_INSN (seq))
976 int uid = INSN_UID (seq);
977 int log;
978 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
979 uid_align[uid] = align_tab[0];
980 if (log)
982 /* Found an alignment label. */
983 uid_align[uid] = align_tab[log];
984 for (i = log - 1; i >= 0; i--)
985 align_tab[i] = seq;
988 #ifdef CASE_VECTOR_SHORTEN_MODE
989 if (optimize)
991 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
992 label fields. */
994 int min_shuid = INSN_SHUID (get_insns ()) - 1;
995 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
996 int rel;
998 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1000 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1001 int len, i, min, max, insn_shuid;
1002 int min_align;
1003 addr_diff_vec_flags flags;
1005 if (!JUMP_P (insn)
1006 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1007 continue;
1008 pat = PATTERN (insn);
1009 len = XVECLEN (pat, 1);
1010 gcc_assert (len > 0);
1011 min_align = MAX_CODE_ALIGN;
1012 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1014 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1015 int shuid = INSN_SHUID (lab);
1016 if (shuid < min)
1018 min = shuid;
1019 min_lab = lab;
1021 if (shuid > max)
1023 max = shuid;
1024 max_lab = lab;
1026 if (min_align > LABEL_TO_ALIGNMENT (lab))
1027 min_align = LABEL_TO_ALIGNMENT (lab);
1029 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1030 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1031 insn_shuid = INSN_SHUID (insn);
1032 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1033 memset (&flags, 0, sizeof (flags));
1034 flags.min_align = min_align;
1035 flags.base_after_vec = rel > insn_shuid;
1036 flags.min_after_vec = min > insn_shuid;
1037 flags.max_after_vec = max > insn_shuid;
1038 flags.min_after_base = min > rel;
1039 flags.max_after_base = max > rel;
1040 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1043 #endif /* CASE_VECTOR_SHORTEN_MODE */
1045 /* Compute initial lengths, addresses, and varying flags for each insn. */
1046 for (insn_current_address = 0, insn = first;
1047 insn != 0;
1048 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1050 uid = INSN_UID (insn);
1052 insn_lengths[uid] = 0;
1054 if (LABEL_P (insn))
1056 int log = LABEL_TO_ALIGNMENT (insn);
1057 if (log)
1059 int align = 1 << log;
1060 int new_address = (insn_current_address + align - 1) & -align;
1061 insn_lengths[uid] = new_address - insn_current_address;
1065 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1067 if (NOTE_P (insn) || BARRIER_P (insn)
1068 || LABEL_P (insn))
1069 continue;
1070 if (INSN_DELETED_P (insn))
1071 continue;
1073 body = PATTERN (insn);
1074 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1076 /* This only takes room if read-only data goes into the text
1077 section. */
1078 if (JUMP_TABLES_IN_TEXT_SECTION
1079 || readonly_data_section == text_section)
1080 insn_lengths[uid] = (XVECLEN (body,
1081 GET_CODE (body) == ADDR_DIFF_VEC)
1082 * GET_MODE_SIZE (GET_MODE (body)));
1083 /* Alignment is handled by ADDR_VEC_ALIGN. */
1085 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1086 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1087 else if (GET_CODE (body) == SEQUENCE)
1089 int i;
1090 int const_delay_slots;
1091 #ifdef DELAY_SLOTS
1092 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1093 #else
1094 const_delay_slots = 0;
1095 #endif
1096 /* Inside a delay slot sequence, we do not do any branch shortening
1097 if the shortening could change the number of delay slots
1098 of the branch. */
1099 for (i = 0; i < XVECLEN (body, 0); i++)
1101 rtx inner_insn = XVECEXP (body, 0, i);
1102 int inner_uid = INSN_UID (inner_insn);
1103 int inner_length;
1105 if (GET_CODE (body) == ASM_INPUT
1106 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1107 inner_length = (asm_insn_count (PATTERN (inner_insn))
1108 * insn_default_length (inner_insn));
1109 else
1110 inner_length = insn_default_length (inner_insn);
1112 insn_lengths[inner_uid] = inner_length;
1113 if (const_delay_slots)
1115 if ((varying_length[inner_uid]
1116 = insn_variable_length_p (inner_insn)) != 0)
1117 varying_length[uid] = 1;
1118 INSN_ADDRESSES (inner_uid) = (insn_current_address
1119 + insn_lengths[uid]);
1121 else
1122 varying_length[inner_uid] = 0;
1123 insn_lengths[uid] += inner_length;
1126 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1128 insn_lengths[uid] = insn_default_length (insn);
1129 varying_length[uid] = insn_variable_length_p (insn);
1132 /* If needed, do any adjustment. */
1133 #ifdef ADJUST_INSN_LENGTH
1134 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1135 if (insn_lengths[uid] < 0)
1136 fatal_insn ("negative insn length", insn);
1137 #endif
1140 /* Now loop over all the insns finding varying length insns. For each,
1141 get the current insn length. If it has changed, reflect the change.
1142 When nothing changes for a full pass, we are done. */
1144 while (something_changed)
1146 something_changed = 0;
1147 insn_current_align = MAX_CODE_ALIGN - 1;
1148 for (insn_current_address = 0, insn = first;
1149 insn != 0;
1150 insn = NEXT_INSN (insn))
1152 int new_length;
1153 #ifdef ADJUST_INSN_LENGTH
1154 int tmp_length;
1155 #endif
1156 int length_align;
1158 uid = INSN_UID (insn);
1160 if (LABEL_P (insn))
1162 int log = LABEL_TO_ALIGNMENT (insn);
1163 if (log > insn_current_align)
1165 int align = 1 << log;
1166 int new_address= (insn_current_address + align - 1) & -align;
1167 insn_lengths[uid] = new_address - insn_current_address;
1168 insn_current_align = log;
1169 insn_current_address = new_address;
1171 else
1172 insn_lengths[uid] = 0;
1173 INSN_ADDRESSES (uid) = insn_current_address;
1174 continue;
1177 length_align = INSN_LENGTH_ALIGNMENT (insn);
1178 if (length_align < insn_current_align)
1179 insn_current_align = length_align;
1181 insn_last_address = INSN_ADDRESSES (uid);
1182 INSN_ADDRESSES (uid) = insn_current_address;
1184 #ifdef CASE_VECTOR_SHORTEN_MODE
1185 if (optimize && JUMP_P (insn)
1186 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1188 rtx body = PATTERN (insn);
1189 int old_length = insn_lengths[uid];
1190 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1191 rtx min_lab = XEXP (XEXP (body, 2), 0);
1192 rtx max_lab = XEXP (XEXP (body, 3), 0);
1193 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1194 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1195 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1196 rtx prev;
1197 int rel_align = 0;
1198 addr_diff_vec_flags flags;
1200 /* Avoid automatic aggregate initialization. */
1201 flags = ADDR_DIFF_VEC_FLAGS (body);
1203 /* Try to find a known alignment for rel_lab. */
1204 for (prev = rel_lab;
1205 prev
1206 && ! insn_lengths[INSN_UID (prev)]
1207 && ! (varying_length[INSN_UID (prev)] & 1);
1208 prev = PREV_INSN (prev))
1209 if (varying_length[INSN_UID (prev)] & 2)
1211 rel_align = LABEL_TO_ALIGNMENT (prev);
1212 break;
1215 /* See the comment on addr_diff_vec_flags in rtl.h for the
1216 meaning of the flags values. base: REL_LAB vec: INSN */
1217 /* Anything after INSN has still addresses from the last
1218 pass; adjust these so that they reflect our current
1219 estimate for this pass. */
1220 if (flags.base_after_vec)
1221 rel_addr += insn_current_address - insn_last_address;
1222 if (flags.min_after_vec)
1223 min_addr += insn_current_address - insn_last_address;
1224 if (flags.max_after_vec)
1225 max_addr += insn_current_address - insn_last_address;
1226 /* We want to know the worst case, i.e. lowest possible value
1227 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1228 its offset is positive, and we have to be wary of code shrink;
1229 otherwise, it is negative, and we have to be vary of code
1230 size increase. */
1231 if (flags.min_after_base)
1233 /* If INSN is between REL_LAB and MIN_LAB, the size
1234 changes we are about to make can change the alignment
1235 within the observed offset, therefore we have to break
1236 it up into two parts that are independent. */
1237 if (! flags.base_after_vec && flags.min_after_vec)
1239 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1240 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1242 else
1243 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1245 else
1247 if (flags.base_after_vec && ! flags.min_after_vec)
1249 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1250 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1252 else
1253 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1255 /* Likewise, determine the highest lowest possible value
1256 for the offset of MAX_LAB. */
1257 if (flags.max_after_base)
1259 if (! flags.base_after_vec && flags.max_after_vec)
1261 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1262 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1264 else
1265 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1267 else
1269 if (flags.base_after_vec && ! flags.max_after_vec)
1271 max_addr += align_fuzz (max_lab, insn, 0, 0);
1272 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1274 else
1275 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1277 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1278 max_addr - rel_addr,
1279 body));
1280 if (JUMP_TABLES_IN_TEXT_SECTION
1281 || readonly_data_section == text_section)
1283 insn_lengths[uid]
1284 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1285 insn_current_address += insn_lengths[uid];
1286 if (insn_lengths[uid] != old_length)
1287 something_changed = 1;
1290 continue;
1292 #endif /* CASE_VECTOR_SHORTEN_MODE */
1294 if (! (varying_length[uid]))
1296 if (NONJUMP_INSN_P (insn)
1297 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1299 int i;
1301 body = PATTERN (insn);
1302 for (i = 0; i < XVECLEN (body, 0); i++)
1304 rtx inner_insn = XVECEXP (body, 0, i);
1305 int inner_uid = INSN_UID (inner_insn);
1307 INSN_ADDRESSES (inner_uid) = insn_current_address;
1309 insn_current_address += insn_lengths[inner_uid];
1312 else
1313 insn_current_address += insn_lengths[uid];
1315 continue;
1318 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1320 int i;
1322 body = PATTERN (insn);
1323 new_length = 0;
1324 for (i = 0; i < XVECLEN (body, 0); i++)
1326 rtx inner_insn = XVECEXP (body, 0, i);
1327 int inner_uid = INSN_UID (inner_insn);
1328 int inner_length;
1330 INSN_ADDRESSES (inner_uid) = insn_current_address;
1332 /* insn_current_length returns 0 for insns with a
1333 non-varying length. */
1334 if (! varying_length[inner_uid])
1335 inner_length = insn_lengths[inner_uid];
1336 else
1337 inner_length = insn_current_length (inner_insn);
1339 if (inner_length != insn_lengths[inner_uid])
1341 insn_lengths[inner_uid] = inner_length;
1342 something_changed = 1;
1344 insn_current_address += insn_lengths[inner_uid];
1345 new_length += inner_length;
1348 else
1350 new_length = insn_current_length (insn);
1351 insn_current_address += new_length;
1354 #ifdef ADJUST_INSN_LENGTH
1355 /* If needed, do any adjustment. */
1356 tmp_length = new_length;
1357 ADJUST_INSN_LENGTH (insn, new_length);
1358 insn_current_address += (new_length - tmp_length);
1359 #endif
1361 if (new_length != insn_lengths[uid])
1363 insn_lengths[uid] = new_length;
1364 something_changed = 1;
1367 /* For a non-optimizing compile, do only a single pass. */
1368 if (!optimize)
1369 break;
1372 free (varying_length);
1374 #endif /* HAVE_ATTR_length */
1377 #ifdef HAVE_ATTR_length
1378 /* Given the body of an INSN known to be generated by an ASM statement, return
1379 the number of machine instructions likely to be generated for this insn.
1380 This is used to compute its length. */
1382 static int
1383 asm_insn_count (rtx body)
1385 const char *template;
1386 int count = 1;
1388 if (GET_CODE (body) == ASM_INPUT)
1389 template = XSTR (body, 0);
1390 else
1391 template = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1393 for (; *template; template++)
1394 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template, template)
1395 || *template == '\n')
1396 count++;
1398 return count;
1400 #endif
1402 /* ??? This is probably the wrong place for these. */
1403 /* Structure recording the mapping from source file and directory
1404 names at compile time to those to be embedded in debug
1405 information. */
1406 typedef struct debug_prefix_map
1408 const char *old_prefix;
1409 const char *new_prefix;
1410 size_t old_len;
1411 size_t new_len;
1412 struct debug_prefix_map *next;
1413 } debug_prefix_map;
1415 /* Linked list of such structures. */
1416 debug_prefix_map *debug_prefix_maps;
1419 /* Record a debug file prefix mapping. ARG is the argument to
1420 -fdebug-prefix-map and must be of the form OLD=NEW. */
1422 void
1423 add_debug_prefix_map (const char *arg)
1425 debug_prefix_map *map;
1426 const char *p;
1428 p = strchr (arg, '=');
1429 if (!p)
1431 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1432 return;
1434 map = XNEW (debug_prefix_map);
1435 map->old_prefix = ggc_alloc_string (arg, p - arg);
1436 map->old_len = p - arg;
1437 p++;
1438 map->new_prefix = ggc_strdup (p);
1439 map->new_len = strlen (p);
1440 map->next = debug_prefix_maps;
1441 debug_prefix_maps = map;
1444 /* Perform user-specified mapping of debug filename prefixes. Return
1445 the new name corresponding to FILENAME. */
1447 const char *
1448 remap_debug_filename (const char *filename)
1450 debug_prefix_map *map;
1451 char *s;
1452 const char *name;
1453 size_t name_len;
1455 for (map = debug_prefix_maps; map; map = map->next)
1456 if (strncmp (filename, map->old_prefix, map->old_len) == 0)
1457 break;
1458 if (!map)
1459 return filename;
1460 name = filename + map->old_len;
1461 name_len = strlen (name) + 1;
1462 s = (char *) alloca (name_len + map->new_len);
1463 memcpy (s, map->new_prefix, map->new_len);
1464 memcpy (s + map->new_len, name, name_len);
1465 return ggc_strdup (s);
1468 /* Output assembler code for the start of a function,
1469 and initialize some of the variables in this file
1470 for the new function. The label for the function and associated
1471 assembler pseudo-ops have already been output in `assemble_start_function'.
1473 FIRST is the first insn of the rtl for the function being compiled.
1474 FILE is the file to write assembler code to.
1475 OPTIMIZE is nonzero if we should eliminate redundant
1476 test and compare insns. */
1478 void
1479 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1480 int optimize ATTRIBUTE_UNUSED)
1482 block_depth = 0;
1484 this_is_asm_operands = 0;
1486 last_filename = locator_file (prologue_locator);
1487 last_linenum = locator_line (prologue_locator);
1489 high_block_linenum = high_function_linenum = last_linenum;
1491 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1493 #if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1494 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1495 dwarf2out_begin_prologue (0, NULL);
1496 #endif
1498 #ifdef LEAF_REG_REMAP
1499 if (current_function_uses_only_leaf_regs)
1500 leaf_renumber_regs (first);
1501 #endif
1503 /* The Sun386i and perhaps other machines don't work right
1504 if the profiling code comes after the prologue. */
1505 #ifdef PROFILE_BEFORE_PROLOGUE
1506 if (current_function_profile)
1507 profile_function (file);
1508 #endif /* PROFILE_BEFORE_PROLOGUE */
1510 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1511 if (dwarf2out_do_frame ())
1512 dwarf2out_frame_debug (NULL_RTX, false);
1513 #endif
1515 /* If debugging, assign block numbers to all of the blocks in this
1516 function. */
1517 if (write_symbols)
1519 reemit_insn_block_notes ();
1520 number_blocks (current_function_decl);
1521 /* We never actually put out begin/end notes for the top-level
1522 block in the function. But, conceptually, that block is
1523 always needed. */
1524 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1527 /* First output the function prologue: code to set up the stack frame. */
1528 targetm.asm_out.function_prologue (file, get_frame_size ());
1530 /* If the machine represents the prologue as RTL, the profiling code must
1531 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1532 #ifdef HAVE_prologue
1533 if (! HAVE_prologue)
1534 #endif
1535 profile_after_prologue (file);
1538 static void
1539 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1541 #ifndef PROFILE_BEFORE_PROLOGUE
1542 if (current_function_profile)
1543 profile_function (file);
1544 #endif /* not PROFILE_BEFORE_PROLOGUE */
1547 static void
1548 profile_function (FILE *file ATTRIBUTE_UNUSED)
1550 #ifndef NO_PROFILE_COUNTERS
1551 # define NO_PROFILE_COUNTERS 0
1552 #endif
1553 #if defined(ASM_OUTPUT_REG_PUSH)
1554 int sval = current_function_returns_struct;
1555 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1556 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1557 int cxt = cfun->static_chain_decl != NULL;
1558 #endif
1559 #endif /* ASM_OUTPUT_REG_PUSH */
1561 if (! NO_PROFILE_COUNTERS)
1563 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1564 switch_to_section (data_section);
1565 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1566 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1567 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1570 switch_to_section (current_function_section ());
1572 #if defined(ASM_OUTPUT_REG_PUSH)
1573 if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1575 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1577 #endif
1579 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1580 if (cxt)
1581 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1582 #else
1583 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1584 if (cxt)
1586 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1588 #endif
1589 #endif
1591 FUNCTION_PROFILER (file, current_function_funcdef_no);
1593 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1594 if (cxt)
1595 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1596 #else
1597 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1598 if (cxt)
1600 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1602 #endif
1603 #endif
1605 #if defined(ASM_OUTPUT_REG_PUSH)
1606 if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1608 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1610 #endif
1613 /* Output assembler code for the end of a function.
1614 For clarity, args are same as those of `final_start_function'
1615 even though not all of them are needed. */
1617 void
1618 final_end_function (void)
1620 app_disable ();
1622 (*debug_hooks->end_function) (high_function_linenum);
1624 /* Finally, output the function epilogue:
1625 code to restore the stack frame and return to the caller. */
1626 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1628 /* And debug output. */
1629 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1631 #if defined (DWARF2_UNWIND_INFO)
1632 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1633 && dwarf2out_do_frame ())
1634 dwarf2out_end_epilogue (last_linenum, last_filename);
1635 #endif
1638 /* Output assembler code for some insns: all or part of a function.
1639 For description of args, see `final_start_function', above. */
1641 void
1642 final (rtx first, FILE *file, int optimize)
1644 rtx insn;
1645 int max_uid = 0;
1646 int seen = 0;
1648 last_ignored_compare = 0;
1650 for (insn = first; insn; insn = NEXT_INSN (insn))
1652 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1653 max_uid = INSN_UID (insn);
1654 #ifdef HAVE_cc0
1655 /* If CC tracking across branches is enabled, record the insn which
1656 jumps to each branch only reached from one place. */
1657 if (optimize && JUMP_P (insn))
1659 rtx lab = JUMP_LABEL (insn);
1660 if (lab && LABEL_NUSES (lab) == 1)
1662 LABEL_REFS (lab) = insn;
1665 #endif
1668 init_recog ();
1670 CC_STATUS_INIT;
1672 /* Output the insns. */
1673 for (insn = first; insn;)
1675 #ifdef HAVE_ATTR_length
1676 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1678 /* This can be triggered by bugs elsewhere in the compiler if
1679 new insns are created after init_insn_lengths is called. */
1680 gcc_assert (NOTE_P (insn));
1681 insn_current_address = -1;
1683 else
1684 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1685 #endif /* HAVE_ATTR_length */
1687 insn = final_scan_insn (insn, file, optimize, 0, &seen);
1691 const char *
1692 get_insn_template (int code, rtx insn)
1694 switch (insn_data[code].output_format)
1696 case INSN_OUTPUT_FORMAT_SINGLE:
1697 return insn_data[code].output.single;
1698 case INSN_OUTPUT_FORMAT_MULTI:
1699 return insn_data[code].output.multi[which_alternative];
1700 case INSN_OUTPUT_FORMAT_FUNCTION:
1701 gcc_assert (insn);
1702 return (*insn_data[code].output.function) (recog_data.operand, insn);
1704 default:
1705 gcc_unreachable ();
1709 /* Emit the appropriate declaration for an alternate-entry-point
1710 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1711 LABEL_KIND != LABEL_NORMAL.
1713 The case fall-through in this function is intentional. */
1714 static void
1715 output_alternate_entry_point (FILE *file, rtx insn)
1717 const char *name = LABEL_NAME (insn);
1719 switch (LABEL_KIND (insn))
1721 case LABEL_WEAK_ENTRY:
1722 #ifdef ASM_WEAKEN_LABEL
1723 ASM_WEAKEN_LABEL (file, name);
1724 #endif
1725 case LABEL_GLOBAL_ENTRY:
1726 targetm.asm_out.globalize_label (file, name);
1727 case LABEL_STATIC_ENTRY:
1728 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1729 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1730 #endif
1731 ASM_OUTPUT_LABEL (file, name);
1732 break;
1734 case LABEL_NORMAL:
1735 default:
1736 gcc_unreachable ();
1740 /* The final scan for one insn, INSN.
1741 Args are same as in `final', except that INSN
1742 is the insn being scanned.
1743 Value returned is the next insn to be scanned.
1745 NOPEEPHOLES is the flag to disallow peephole processing (currently
1746 used for within delayed branch sequence output).
1748 SEEN is used to track the end of the prologue, for emitting
1749 debug information. We force the emission of a line note after
1750 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1751 at the beginning of the second basic block, whichever comes
1752 first. */
1755 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1756 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1758 #ifdef HAVE_cc0
1759 rtx set;
1760 #endif
1761 rtx next;
1763 insn_counter++;
1765 /* Ignore deleted insns. These can occur when we split insns (due to a
1766 template of "#") while not optimizing. */
1767 if (INSN_DELETED_P (insn))
1768 return NEXT_INSN (insn);
1770 switch (GET_CODE (insn))
1772 case NOTE:
1773 switch (NOTE_KIND (insn))
1775 case NOTE_INSN_DELETED:
1776 break;
1778 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1779 in_cold_section_p = !in_cold_section_p;
1780 (*debug_hooks->switch_text_section) ();
1781 switch_to_section (current_function_section ());
1782 break;
1784 case NOTE_INSN_BASIC_BLOCK:
1785 #ifdef TARGET_UNWIND_INFO
1786 targetm.asm_out.unwind_emit (asm_out_file, insn);
1787 #endif
1789 if (flag_debug_asm)
1790 fprintf (asm_out_file, "\t%s basic block %d\n",
1791 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1793 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1795 *seen |= SEEN_EMITTED;
1796 force_source_line = true;
1798 else
1799 *seen |= SEEN_BB;
1801 break;
1803 case NOTE_INSN_EH_REGION_BEG:
1804 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1805 NOTE_EH_HANDLER (insn));
1806 break;
1808 case NOTE_INSN_EH_REGION_END:
1809 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1810 NOTE_EH_HANDLER (insn));
1811 break;
1813 case NOTE_INSN_PROLOGUE_END:
1814 targetm.asm_out.function_end_prologue (file);
1815 profile_after_prologue (file);
1817 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1819 *seen |= SEEN_EMITTED;
1820 force_source_line = true;
1822 else
1823 *seen |= SEEN_NOTE;
1825 break;
1827 case NOTE_INSN_EPILOGUE_BEG:
1828 targetm.asm_out.function_begin_epilogue (file);
1829 break;
1831 case NOTE_INSN_FUNCTION_BEG:
1832 app_disable ();
1833 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1835 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1837 *seen |= SEEN_EMITTED;
1838 force_source_line = true;
1840 else
1841 *seen |= SEEN_NOTE;
1843 break;
1845 case NOTE_INSN_BLOCK_BEG:
1846 if (debug_info_level == DINFO_LEVEL_NORMAL
1847 || debug_info_level == DINFO_LEVEL_VERBOSE
1848 || write_symbols == DWARF2_DEBUG
1849 || write_symbols == VMS_AND_DWARF2_DEBUG
1850 || write_symbols == VMS_DEBUG)
1852 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1854 app_disable ();
1855 ++block_depth;
1856 high_block_linenum = last_linenum;
1858 /* Output debugging info about the symbol-block beginning. */
1859 (*debug_hooks->begin_block) (last_linenum, n);
1861 /* Mark this block as output. */
1862 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1864 if (write_symbols == DBX_DEBUG
1865 || write_symbols == SDB_DEBUG)
1867 location_t *locus_ptr
1868 = block_nonartificial_location (NOTE_BLOCK (insn));
1870 if (locus_ptr != NULL)
1872 override_filename = LOCATION_FILE (*locus_ptr);
1873 override_linenum = LOCATION_LINE (*locus_ptr);
1876 break;
1878 case NOTE_INSN_BLOCK_END:
1879 if (debug_info_level == DINFO_LEVEL_NORMAL
1880 || debug_info_level == DINFO_LEVEL_VERBOSE
1881 || write_symbols == DWARF2_DEBUG
1882 || write_symbols == VMS_AND_DWARF2_DEBUG
1883 || write_symbols == VMS_DEBUG)
1885 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1887 app_disable ();
1889 /* End of a symbol-block. */
1890 --block_depth;
1891 gcc_assert (block_depth >= 0);
1893 (*debug_hooks->end_block) (high_block_linenum, n);
1895 if (write_symbols == DBX_DEBUG
1896 || write_symbols == SDB_DEBUG)
1898 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
1899 location_t *locus_ptr
1900 = block_nonartificial_location (outer_block);
1902 if (locus_ptr != NULL)
1904 override_filename = LOCATION_FILE (*locus_ptr);
1905 override_linenum = LOCATION_LINE (*locus_ptr);
1907 else
1909 override_filename = NULL;
1910 override_linenum = 0;
1913 break;
1915 case NOTE_INSN_DELETED_LABEL:
1916 /* Emit the label. We may have deleted the CODE_LABEL because
1917 the label could be proved to be unreachable, though still
1918 referenced (in the form of having its address taken. */
1919 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1920 break;
1922 case NOTE_INSN_VAR_LOCATION:
1923 (*debug_hooks->var_location) (insn);
1924 break;
1926 default:
1927 gcc_unreachable ();
1928 break;
1930 break;
1932 case BARRIER:
1933 #if defined (DWARF2_UNWIND_INFO)
1934 if (dwarf2out_do_frame ())
1935 dwarf2out_frame_debug (insn, false);
1936 #endif
1937 break;
1939 case CODE_LABEL:
1940 /* The target port might emit labels in the output function for
1941 some insn, e.g. sh.c output_branchy_insn. */
1942 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1944 int align = LABEL_TO_ALIGNMENT (insn);
1945 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1946 int max_skip = LABEL_TO_MAX_SKIP (insn);
1947 #endif
1949 if (align && NEXT_INSN (insn))
1951 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1952 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1953 #else
1954 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1955 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1956 #else
1957 ASM_OUTPUT_ALIGN (file, align);
1958 #endif
1959 #endif
1962 #ifdef HAVE_cc0
1963 CC_STATUS_INIT;
1964 /* If this label is reached from only one place, set the condition
1965 codes from the instruction just before the branch. */
1967 /* Disabled because some insns set cc_status in the C output code
1968 and NOTICE_UPDATE_CC alone can set incorrect status. */
1969 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1971 rtx jump = LABEL_REFS (insn);
1972 rtx barrier = prev_nonnote_insn (insn);
1973 rtx prev;
1974 /* If the LABEL_REFS field of this label has been set to point
1975 at a branch, the predecessor of the branch is a regular
1976 insn, and that branch is the only way to reach this label,
1977 set the condition codes based on the branch and its
1978 predecessor. */
1979 if (barrier && BARRIER_P (barrier)
1980 && jump && JUMP_P (jump)
1981 && (prev = prev_nonnote_insn (jump))
1982 && NONJUMP_INSN_P (prev))
1984 NOTICE_UPDATE_CC (PATTERN (prev), prev);
1985 NOTICE_UPDATE_CC (PATTERN (jump), jump);
1988 #endif
1990 if (LABEL_NAME (insn))
1991 (*debug_hooks->label) (insn);
1993 if (app_on)
1995 fputs (ASM_APP_OFF, file);
1996 app_on = 0;
1999 next = next_nonnote_insn (insn);
2000 if (next != 0 && JUMP_P (next))
2002 rtx nextbody = PATTERN (next);
2004 /* If this label is followed by a jump-table,
2005 make sure we put the label in the read-only section. Also
2006 possibly write the label and jump table together. */
2008 if (GET_CODE (nextbody) == ADDR_VEC
2009 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
2011 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2012 /* In this case, the case vector is being moved by the
2013 target, so don't output the label at all. Leave that
2014 to the back end macros. */
2015 #else
2016 if (! JUMP_TABLES_IN_TEXT_SECTION)
2018 int log_align;
2020 switch_to_section (targetm.asm_out.function_rodata_section
2021 (current_function_decl));
2023 #ifdef ADDR_VEC_ALIGN
2024 log_align = ADDR_VEC_ALIGN (next);
2025 #else
2026 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2027 #endif
2028 ASM_OUTPUT_ALIGN (file, log_align);
2030 else
2031 switch_to_section (current_function_section ());
2033 #ifdef ASM_OUTPUT_CASE_LABEL
2034 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2035 next);
2036 #else
2037 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2038 #endif
2039 #endif
2040 break;
2043 if (LABEL_ALT_ENTRY_P (insn))
2044 output_alternate_entry_point (file, insn);
2045 else
2046 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2047 break;
2049 default:
2051 rtx body = PATTERN (insn);
2052 int insn_code_number;
2053 const char *template;
2055 #ifdef HAVE_conditional_execution
2056 /* Reset this early so it is correct for ASM statements. */
2057 current_insn_predicate = NULL_RTX;
2058 #endif
2059 /* An INSN, JUMP_INSN or CALL_INSN.
2060 First check for special kinds that recog doesn't recognize. */
2062 if (GET_CODE (body) == USE /* These are just declarations. */
2063 || GET_CODE (body) == CLOBBER)
2064 break;
2066 #ifdef HAVE_cc0
2068 /* If there is a REG_CC_SETTER note on this insn, it means that
2069 the setting of the condition code was done in the delay slot
2070 of the insn that branched here. So recover the cc status
2071 from the insn that set it. */
2073 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2074 if (note)
2076 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2077 cc_prev_status = cc_status;
2080 #endif
2082 /* Detect insns that are really jump-tables
2083 and output them as such. */
2085 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2087 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2088 int vlen, idx;
2089 #endif
2091 if (! JUMP_TABLES_IN_TEXT_SECTION)
2092 switch_to_section (targetm.asm_out.function_rodata_section
2093 (current_function_decl));
2094 else
2095 switch_to_section (current_function_section ());
2097 if (app_on)
2099 fputs (ASM_APP_OFF, file);
2100 app_on = 0;
2103 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2104 if (GET_CODE (body) == ADDR_VEC)
2106 #ifdef ASM_OUTPUT_ADDR_VEC
2107 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2108 #else
2109 gcc_unreachable ();
2110 #endif
2112 else
2114 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2115 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2116 #else
2117 gcc_unreachable ();
2118 #endif
2120 #else
2121 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2122 for (idx = 0; idx < vlen; idx++)
2124 if (GET_CODE (body) == ADDR_VEC)
2126 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2127 ASM_OUTPUT_ADDR_VEC_ELT
2128 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2129 #else
2130 gcc_unreachable ();
2131 #endif
2133 else
2135 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2136 ASM_OUTPUT_ADDR_DIFF_ELT
2137 (file,
2138 body,
2139 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2140 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2141 #else
2142 gcc_unreachable ();
2143 #endif
2146 #ifdef ASM_OUTPUT_CASE_END
2147 ASM_OUTPUT_CASE_END (file,
2148 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2149 insn);
2150 #endif
2151 #endif
2153 switch_to_section (current_function_section ());
2155 break;
2157 /* Output this line note if it is the first or the last line
2158 note in a row. */
2159 if (notice_source_line (insn))
2161 (*debug_hooks->source_line) (last_linenum, last_filename);
2164 if (GET_CODE (body) == ASM_INPUT)
2166 const char *string = XSTR (body, 0);
2168 /* There's no telling what that did to the condition codes. */
2169 CC_STATUS_INIT;
2171 if (string[0])
2173 expanded_location loc;
2175 if (! app_on)
2177 fputs (ASM_APP_ON, file);
2178 app_on = 1;
2180 #ifdef USE_MAPPED_LOCATION
2181 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2182 #else
2183 loc.file = ASM_INPUT_SOURCE_FILE (body);
2184 loc.line = ASM_INPUT_SOURCE_LINE (body);
2185 #endif
2186 if (*loc.file && loc.line)
2187 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2188 ASM_COMMENT_START, loc.line, loc.file);
2189 fprintf (asm_out_file, "\t%s\n", string);
2190 #if HAVE_AS_LINE_ZERO
2191 if (*loc.file && loc.line)
2192 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2193 #endif
2195 break;
2198 /* Detect `asm' construct with operands. */
2199 if (asm_noperands (body) >= 0)
2201 unsigned int noperands = asm_noperands (body);
2202 rtx *ops = alloca (noperands * sizeof (rtx));
2203 const char *string;
2204 location_t loc;
2205 expanded_location expanded;
2207 /* There's no telling what that did to the condition codes. */
2208 CC_STATUS_INIT;
2210 /* Get out the operand values. */
2211 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2212 /* Inhibit dieing on what would otherwise be compiler bugs. */
2213 insn_noperands = noperands;
2214 this_is_asm_operands = insn;
2215 expanded = expand_location (loc);
2217 #ifdef FINAL_PRESCAN_INSN
2218 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2219 #endif
2221 /* Output the insn using them. */
2222 if (string[0])
2224 if (! app_on)
2226 fputs (ASM_APP_ON, file);
2227 app_on = 1;
2229 if (expanded.file && expanded.line)
2230 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2231 ASM_COMMENT_START, expanded.line, expanded.file);
2232 output_asm_insn (string, ops);
2233 #if HAVE_AS_LINE_ZERO
2234 if (expanded.file && expanded.line)
2235 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2236 #endif
2239 this_is_asm_operands = 0;
2240 break;
2243 if (app_on)
2245 fputs (ASM_APP_OFF, file);
2246 app_on = 0;
2249 if (GET_CODE (body) == SEQUENCE)
2251 /* A delayed-branch sequence */
2252 int i;
2254 final_sequence = body;
2256 /* Record the delay slots' frame information before the branch.
2257 This is needed for delayed calls: see execute_cfa_program(). */
2258 #if defined (DWARF2_UNWIND_INFO)
2259 if (dwarf2out_do_frame ())
2260 for (i = 1; i < XVECLEN (body, 0); i++)
2261 dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2262 #endif
2264 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2265 force the restoration of a comparison that was previously
2266 thought unnecessary. If that happens, cancel this sequence
2267 and cause that insn to be restored. */
2269 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2270 if (next != XVECEXP (body, 0, 1))
2272 final_sequence = 0;
2273 return next;
2276 for (i = 1; i < XVECLEN (body, 0); i++)
2278 rtx insn = XVECEXP (body, 0, i);
2279 rtx next = NEXT_INSN (insn);
2280 /* We loop in case any instruction in a delay slot gets
2281 split. */
2283 insn = final_scan_insn (insn, file, 0, 1, seen);
2284 while (insn != next);
2286 #ifdef DBR_OUTPUT_SEQEND
2287 DBR_OUTPUT_SEQEND (file);
2288 #endif
2289 final_sequence = 0;
2291 /* If the insn requiring the delay slot was a CALL_INSN, the
2292 insns in the delay slot are actually executed before the
2293 called function. Hence we don't preserve any CC-setting
2294 actions in these insns and the CC must be marked as being
2295 clobbered by the function. */
2296 if (CALL_P (XVECEXP (body, 0, 0)))
2298 CC_STATUS_INIT;
2300 break;
2303 /* We have a real machine instruction as rtl. */
2305 body = PATTERN (insn);
2307 #ifdef HAVE_cc0
2308 set = single_set (insn);
2310 /* Check for redundant test and compare instructions
2311 (when the condition codes are already set up as desired).
2312 This is done only when optimizing; if not optimizing,
2313 it should be possible for the user to alter a variable
2314 with the debugger in between statements
2315 and the next statement should reexamine the variable
2316 to compute the condition codes. */
2318 if (optimize)
2320 if (set
2321 && GET_CODE (SET_DEST (set)) == CC0
2322 && insn != last_ignored_compare)
2324 if (GET_CODE (SET_SRC (set)) == SUBREG)
2325 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2326 else if (GET_CODE (SET_SRC (set)) == COMPARE)
2328 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2329 XEXP (SET_SRC (set), 0)
2330 = alter_subreg (&XEXP (SET_SRC (set), 0));
2331 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2332 XEXP (SET_SRC (set), 1)
2333 = alter_subreg (&XEXP (SET_SRC (set), 1));
2335 if ((cc_status.value1 != 0
2336 && rtx_equal_p (SET_SRC (set), cc_status.value1))
2337 || (cc_status.value2 != 0
2338 && rtx_equal_p (SET_SRC (set), cc_status.value2)))
2340 /* Don't delete insn if it has an addressing side-effect. */
2341 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2342 /* or if anything in it is volatile. */
2343 && ! volatile_refs_p (PATTERN (insn)))
2345 /* We don't really delete the insn; just ignore it. */
2346 last_ignored_compare = insn;
2347 break;
2352 #endif
2354 #ifdef HAVE_cc0
2355 /* If this is a conditional branch, maybe modify it
2356 if the cc's are in a nonstandard state
2357 so that it accomplishes the same thing that it would
2358 do straightforwardly if the cc's were set up normally. */
2360 if (cc_status.flags != 0
2361 && JUMP_P (insn)
2362 && GET_CODE (body) == SET
2363 && SET_DEST (body) == pc_rtx
2364 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2365 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2366 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2368 /* This function may alter the contents of its argument
2369 and clear some of the cc_status.flags bits.
2370 It may also return 1 meaning condition now always true
2371 or -1 meaning condition now always false
2372 or 2 meaning condition nontrivial but altered. */
2373 int result = alter_cond (XEXP (SET_SRC (body), 0));
2374 /* If condition now has fixed value, replace the IF_THEN_ELSE
2375 with its then-operand or its else-operand. */
2376 if (result == 1)
2377 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2378 if (result == -1)
2379 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2381 /* The jump is now either unconditional or a no-op.
2382 If it has become a no-op, don't try to output it.
2383 (It would not be recognized.) */
2384 if (SET_SRC (body) == pc_rtx)
2386 delete_insn (insn);
2387 break;
2389 else if (GET_CODE (SET_SRC (body)) == RETURN)
2390 /* Replace (set (pc) (return)) with (return). */
2391 PATTERN (insn) = body = SET_SRC (body);
2393 /* Rerecognize the instruction if it has changed. */
2394 if (result != 0)
2395 INSN_CODE (insn) = -1;
2398 /* If this is a conditional trap, maybe modify it if the cc's
2399 are in a nonstandard state so that it accomplishes the same
2400 thing that it would do straightforwardly if the cc's were
2401 set up normally. */
2402 if (cc_status.flags != 0
2403 && NONJUMP_INSN_P (insn)
2404 && GET_CODE (body) == TRAP_IF
2405 && COMPARISON_P (TRAP_CONDITION (body))
2406 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2408 /* This function may alter the contents of its argument
2409 and clear some of the cc_status.flags bits.
2410 It may also return 1 meaning condition now always true
2411 or -1 meaning condition now always false
2412 or 2 meaning condition nontrivial but altered. */
2413 int result = alter_cond (TRAP_CONDITION (body));
2415 /* If TRAP_CONDITION has become always false, delete the
2416 instruction. */
2417 if (result == -1)
2419 delete_insn (insn);
2420 break;
2423 /* If TRAP_CONDITION has become always true, replace
2424 TRAP_CONDITION with const_true_rtx. */
2425 if (result == 1)
2426 TRAP_CONDITION (body) = const_true_rtx;
2428 /* Rerecognize the instruction if it has changed. */
2429 if (result != 0)
2430 INSN_CODE (insn) = -1;
2433 /* Make same adjustments to instructions that examine the
2434 condition codes without jumping and instructions that
2435 handle conditional moves (if this machine has either one). */
2437 if (cc_status.flags != 0
2438 && set != 0)
2440 rtx cond_rtx, then_rtx, else_rtx;
2442 if (!JUMP_P (insn)
2443 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2445 cond_rtx = XEXP (SET_SRC (set), 0);
2446 then_rtx = XEXP (SET_SRC (set), 1);
2447 else_rtx = XEXP (SET_SRC (set), 2);
2449 else
2451 cond_rtx = SET_SRC (set);
2452 then_rtx = const_true_rtx;
2453 else_rtx = const0_rtx;
2456 switch (GET_CODE (cond_rtx))
2458 case GTU:
2459 case GT:
2460 case LTU:
2461 case LT:
2462 case GEU:
2463 case GE:
2464 case LEU:
2465 case LE:
2466 case EQ:
2467 case NE:
2469 int result;
2470 if (XEXP (cond_rtx, 0) != cc0_rtx)
2471 break;
2472 result = alter_cond (cond_rtx);
2473 if (result == 1)
2474 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2475 else if (result == -1)
2476 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2477 else if (result == 2)
2478 INSN_CODE (insn) = -1;
2479 if (SET_DEST (set) == SET_SRC (set))
2480 delete_insn (insn);
2482 break;
2484 default:
2485 break;
2489 #endif
2491 #ifdef HAVE_peephole
2492 /* Do machine-specific peephole optimizations if desired. */
2494 if (optimize && !flag_no_peephole && !nopeepholes)
2496 rtx next = peephole (insn);
2497 /* When peepholing, if there were notes within the peephole,
2498 emit them before the peephole. */
2499 if (next != 0 && next != NEXT_INSN (insn))
2501 rtx note, prev = PREV_INSN (insn);
2503 for (note = NEXT_INSN (insn); note != next;
2504 note = NEXT_INSN (note))
2505 final_scan_insn (note, file, optimize, nopeepholes, seen);
2507 /* Put the notes in the proper position for a later
2508 rescan. For example, the SH target can do this
2509 when generating a far jump in a delayed branch
2510 sequence. */
2511 note = NEXT_INSN (insn);
2512 PREV_INSN (note) = prev;
2513 NEXT_INSN (prev) = note;
2514 NEXT_INSN (PREV_INSN (next)) = insn;
2515 PREV_INSN (insn) = PREV_INSN (next);
2516 NEXT_INSN (insn) = next;
2517 PREV_INSN (next) = insn;
2520 /* PEEPHOLE might have changed this. */
2521 body = PATTERN (insn);
2523 #endif
2525 /* Try to recognize the instruction.
2526 If successful, verify that the operands satisfy the
2527 constraints for the instruction. Crash if they don't,
2528 since `reload' should have changed them so that they do. */
2530 insn_code_number = recog_memoized (insn);
2531 cleanup_subreg_operands (insn);
2533 /* Dump the insn in the assembly for debugging. */
2534 if (flag_dump_rtl_in_asm)
2536 print_rtx_head = ASM_COMMENT_START;
2537 print_rtl_single (asm_out_file, insn);
2538 print_rtx_head = "";
2541 if (! constrain_operands_cached (1))
2542 fatal_insn_not_found (insn);
2544 /* Some target machines need to prescan each insn before
2545 it is output. */
2547 #ifdef FINAL_PRESCAN_INSN
2548 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2549 #endif
2551 #ifdef HAVE_conditional_execution
2552 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2553 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2554 #endif
2556 #ifdef HAVE_cc0
2557 cc_prev_status = cc_status;
2559 /* Update `cc_status' for this instruction.
2560 The instruction's output routine may change it further.
2561 If the output routine for a jump insn needs to depend
2562 on the cc status, it should look at cc_prev_status. */
2564 NOTICE_UPDATE_CC (body, insn);
2565 #endif
2567 current_output_insn = debug_insn = insn;
2569 #if defined (DWARF2_UNWIND_INFO)
2570 if (CALL_P (insn) && dwarf2out_do_frame ())
2571 dwarf2out_frame_debug (insn, false);
2572 #endif
2574 /* Find the proper template for this insn. */
2575 template = get_insn_template (insn_code_number, insn);
2577 /* If the C code returns 0, it means that it is a jump insn
2578 which follows a deleted test insn, and that test insn
2579 needs to be reinserted. */
2580 if (template == 0)
2582 rtx prev;
2584 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2586 /* We have already processed the notes between the setter and
2587 the user. Make sure we don't process them again, this is
2588 particularly important if one of the notes is a block
2589 scope note or an EH note. */
2590 for (prev = insn;
2591 prev != last_ignored_compare;
2592 prev = PREV_INSN (prev))
2594 if (NOTE_P (prev))
2595 delete_insn (prev); /* Use delete_note. */
2598 return prev;
2601 /* If the template is the string "#", it means that this insn must
2602 be split. */
2603 if (template[0] == '#' && template[1] == '\0')
2605 rtx new = try_split (body, insn, 0);
2607 /* If we didn't split the insn, go away. */
2608 if (new == insn && PATTERN (new) == body)
2609 fatal_insn ("could not split insn", insn);
2611 #ifdef HAVE_ATTR_length
2612 /* This instruction should have been split in shorten_branches,
2613 to ensure that we would have valid length info for the
2614 splitees. */
2615 gcc_unreachable ();
2616 #endif
2618 return new;
2621 #ifdef TARGET_UNWIND_INFO
2622 /* ??? This will put the directives in the wrong place if
2623 get_insn_template outputs assembly directly. However calling it
2624 before get_insn_template breaks if the insns is split. */
2625 targetm.asm_out.unwind_emit (asm_out_file, insn);
2626 #endif
2628 /* Output assembler code from the template. */
2629 output_asm_insn (template, recog_data.operand);
2631 /* If necessary, report the effect that the instruction has on
2632 the unwind info. We've already done this for delay slots
2633 and call instructions. */
2634 #if defined (DWARF2_UNWIND_INFO)
2635 if (final_sequence == 0
2636 #if !defined (HAVE_prologue)
2637 && !ACCUMULATE_OUTGOING_ARGS
2638 #endif
2639 && dwarf2out_do_frame ())
2640 dwarf2out_frame_debug (insn, true);
2641 #endif
2643 current_output_insn = debug_insn = 0;
2646 return NEXT_INSN (insn);
2649 /* Return whether a source line note needs to be emitted before INSN. */
2651 static bool
2652 notice_source_line (rtx insn)
2654 const char *filename;
2655 int linenum;
2657 if (override_filename)
2659 filename = override_filename;
2660 linenum = override_linenum;
2662 else
2664 filename = insn_file (insn);
2665 linenum = insn_line (insn);
2668 if (filename
2669 && (force_source_line
2670 || filename != last_filename
2671 || last_linenum != linenum))
2673 force_source_line = false;
2674 last_filename = filename;
2675 last_linenum = linenum;
2676 high_block_linenum = MAX (last_linenum, high_block_linenum);
2677 high_function_linenum = MAX (last_linenum, high_function_linenum);
2678 return true;
2680 return false;
2683 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2684 directly to the desired hard register. */
2686 void
2687 cleanup_subreg_operands (rtx insn)
2689 int i;
2690 bool changed = false;
2691 extract_insn_cached (insn);
2692 for (i = 0; i < recog_data.n_operands; i++)
2694 /* The following test cannot use recog_data.operand when testing
2695 for a SUBREG: the underlying object might have been changed
2696 already if we are inside a match_operator expression that
2697 matches the else clause. Instead we test the underlying
2698 expression directly. */
2699 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2701 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2702 changed = true;
2704 else if (GET_CODE (recog_data.operand[i]) == PLUS
2705 || GET_CODE (recog_data.operand[i]) == MULT
2706 || MEM_P (recog_data.operand[i]))
2707 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2710 for (i = 0; i < recog_data.n_dups; i++)
2712 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2714 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2715 changed = true;
2717 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2718 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2719 || MEM_P (*recog_data.dup_loc[i]))
2720 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2722 if (changed)
2723 df_insn_rescan (insn);
2726 /* If X is a SUBREG, replace it with a REG or a MEM,
2727 based on the thing it is a subreg of. */
2730 alter_subreg (rtx *xp)
2732 rtx x = *xp;
2733 rtx y = SUBREG_REG (x);
2735 /* simplify_subreg does not remove subreg from volatile references.
2736 We are required to. */
2737 if (MEM_P (y))
2739 int offset = SUBREG_BYTE (x);
2741 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2742 contains 0 instead of the proper offset. See simplify_subreg. */
2743 if (offset == 0
2744 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2746 int difference = GET_MODE_SIZE (GET_MODE (y))
2747 - GET_MODE_SIZE (GET_MODE (x));
2748 if (WORDS_BIG_ENDIAN)
2749 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2750 if (BYTES_BIG_ENDIAN)
2751 offset += difference % UNITS_PER_WORD;
2754 *xp = adjust_address (y, GET_MODE (x), offset);
2756 else
2758 rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2759 SUBREG_BYTE (x));
2761 if (new != 0)
2762 *xp = new;
2763 else if (REG_P (y))
2765 /* Simplify_subreg can't handle some REG cases, but we have to. */
2766 unsigned int regno = subreg_regno (x);
2767 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2771 return *xp;
2774 /* Do alter_subreg on all the SUBREGs contained in X. */
2776 static rtx
2777 walk_alter_subreg (rtx *xp, bool *changed)
2779 rtx x = *xp;
2780 switch (GET_CODE (x))
2782 case PLUS:
2783 case MULT:
2784 case AND:
2785 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2786 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
2787 break;
2789 case MEM:
2790 case ZERO_EXTEND:
2791 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2792 break;
2794 case SUBREG:
2795 *changed = true;
2796 return alter_subreg (xp);
2798 default:
2799 break;
2802 return *xp;
2805 #ifdef HAVE_cc0
2807 /* Given BODY, the body of a jump instruction, alter the jump condition
2808 as required by the bits that are set in cc_status.flags.
2809 Not all of the bits there can be handled at this level in all cases.
2811 The value is normally 0.
2812 1 means that the condition has become always true.
2813 -1 means that the condition has become always false.
2814 2 means that COND has been altered. */
2816 static int
2817 alter_cond (rtx cond)
2819 int value = 0;
2821 if (cc_status.flags & CC_REVERSED)
2823 value = 2;
2824 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2827 if (cc_status.flags & CC_INVERTED)
2829 value = 2;
2830 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2833 if (cc_status.flags & CC_NOT_POSITIVE)
2834 switch (GET_CODE (cond))
2836 case LE:
2837 case LEU:
2838 case GEU:
2839 /* Jump becomes unconditional. */
2840 return 1;
2842 case GT:
2843 case GTU:
2844 case LTU:
2845 /* Jump becomes no-op. */
2846 return -1;
2848 case GE:
2849 PUT_CODE (cond, EQ);
2850 value = 2;
2851 break;
2853 case LT:
2854 PUT_CODE (cond, NE);
2855 value = 2;
2856 break;
2858 default:
2859 break;
2862 if (cc_status.flags & CC_NOT_NEGATIVE)
2863 switch (GET_CODE (cond))
2865 case GE:
2866 case GEU:
2867 /* Jump becomes unconditional. */
2868 return 1;
2870 case LT:
2871 case LTU:
2872 /* Jump becomes no-op. */
2873 return -1;
2875 case LE:
2876 case LEU:
2877 PUT_CODE (cond, EQ);
2878 value = 2;
2879 break;
2881 case GT:
2882 case GTU:
2883 PUT_CODE (cond, NE);
2884 value = 2;
2885 break;
2887 default:
2888 break;
2891 if (cc_status.flags & CC_NO_OVERFLOW)
2892 switch (GET_CODE (cond))
2894 case GEU:
2895 /* Jump becomes unconditional. */
2896 return 1;
2898 case LEU:
2899 PUT_CODE (cond, EQ);
2900 value = 2;
2901 break;
2903 case GTU:
2904 PUT_CODE (cond, NE);
2905 value = 2;
2906 break;
2908 case LTU:
2909 /* Jump becomes no-op. */
2910 return -1;
2912 default:
2913 break;
2916 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2917 switch (GET_CODE (cond))
2919 default:
2920 gcc_unreachable ();
2922 case NE:
2923 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2924 value = 2;
2925 break;
2927 case EQ:
2928 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2929 value = 2;
2930 break;
2933 if (cc_status.flags & CC_NOT_SIGNED)
2934 /* The flags are valid if signed condition operators are converted
2935 to unsigned. */
2936 switch (GET_CODE (cond))
2938 case LE:
2939 PUT_CODE (cond, LEU);
2940 value = 2;
2941 break;
2943 case LT:
2944 PUT_CODE (cond, LTU);
2945 value = 2;
2946 break;
2948 case GT:
2949 PUT_CODE (cond, GTU);
2950 value = 2;
2951 break;
2953 case GE:
2954 PUT_CODE (cond, GEU);
2955 value = 2;
2956 break;
2958 default:
2959 break;
2962 return value;
2964 #endif
2966 /* Report inconsistency between the assembler template and the operands.
2967 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2969 void
2970 output_operand_lossage (const char *cmsgid, ...)
2972 char *fmt_string;
2973 char *new_message;
2974 const char *pfx_str;
2975 va_list ap;
2977 va_start (ap, cmsgid);
2979 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
2980 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
2981 vasprintf (&new_message, fmt_string, ap);
2983 if (this_is_asm_operands)
2984 error_for_asm (this_is_asm_operands, "%s", new_message);
2985 else
2986 internal_error ("%s", new_message);
2988 free (fmt_string);
2989 free (new_message);
2990 va_end (ap);
2993 /* Output of assembler code from a template, and its subroutines. */
2995 /* Annotate the assembly with a comment describing the pattern and
2996 alternative used. */
2998 static void
2999 output_asm_name (void)
3001 if (debug_insn)
3003 int num = INSN_CODE (debug_insn);
3004 fprintf (asm_out_file, "\t%s %d\t%s",
3005 ASM_COMMENT_START, INSN_UID (debug_insn),
3006 insn_data[num].name);
3007 if (insn_data[num].n_alternatives > 1)
3008 fprintf (asm_out_file, "/%d", which_alternative + 1);
3009 #ifdef HAVE_ATTR_length
3010 fprintf (asm_out_file, "\t[length = %d]",
3011 get_attr_length (debug_insn));
3012 #endif
3013 /* Clear this so only the first assembler insn
3014 of any rtl insn will get the special comment for -dp. */
3015 debug_insn = 0;
3019 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3020 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3021 corresponds to the address of the object and 0 if to the object. */
3023 static tree
3024 get_mem_expr_from_op (rtx op, int *paddressp)
3026 tree expr;
3027 int inner_addressp;
3029 *paddressp = 0;
3031 if (REG_P (op))
3032 return REG_EXPR (op);
3033 else if (!MEM_P (op))
3034 return 0;
3036 if (MEM_EXPR (op) != 0)
3037 return MEM_EXPR (op);
3039 /* Otherwise we have an address, so indicate it and look at the address. */
3040 *paddressp = 1;
3041 op = XEXP (op, 0);
3043 /* First check if we have a decl for the address, then look at the right side
3044 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3045 But don't allow the address to itself be indirect. */
3046 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3047 return expr;
3048 else if (GET_CODE (op) == PLUS
3049 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3050 return expr;
3052 while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
3053 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3054 op = XEXP (op, 0);
3056 expr = get_mem_expr_from_op (op, &inner_addressp);
3057 return inner_addressp ? 0 : expr;
3060 /* Output operand names for assembler instructions. OPERANDS is the
3061 operand vector, OPORDER is the order to write the operands, and NOPS
3062 is the number of operands to write. */
3064 static void
3065 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3067 int wrote = 0;
3068 int i;
3070 for (i = 0; i < nops; i++)
3072 int addressp;
3073 rtx op = operands[oporder[i]];
3074 tree expr = get_mem_expr_from_op (op, &addressp);
3076 fprintf (asm_out_file, "%c%s",
3077 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3078 wrote = 1;
3079 if (expr)
3081 fprintf (asm_out_file, "%s",
3082 addressp ? "*" : "");
3083 print_mem_expr (asm_out_file, expr);
3084 wrote = 1;
3086 else if (REG_P (op) && ORIGINAL_REGNO (op)
3087 && ORIGINAL_REGNO (op) != REGNO (op))
3088 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3092 /* Output text from TEMPLATE to the assembler output file,
3093 obeying %-directions to substitute operands taken from
3094 the vector OPERANDS.
3096 %N (for N a digit) means print operand N in usual manner.
3097 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3098 and print the label name with no punctuation.
3099 %cN means require operand N to be a constant
3100 and print the constant expression with no punctuation.
3101 %aN means expect operand N to be a memory address
3102 (not a memory reference!) and print a reference
3103 to that address.
3104 %nN means expect operand N to be a constant
3105 and print a constant expression for minus the value
3106 of the operand, with no other punctuation. */
3108 void
3109 output_asm_insn (const char *template, rtx *operands)
3111 const char *p;
3112 int c;
3113 #ifdef ASSEMBLER_DIALECT
3114 int dialect = 0;
3115 #endif
3116 int oporder[MAX_RECOG_OPERANDS];
3117 char opoutput[MAX_RECOG_OPERANDS];
3118 int ops = 0;
3120 /* An insn may return a null string template
3121 in a case where no assembler code is needed. */
3122 if (*template == 0)
3123 return;
3125 memset (opoutput, 0, sizeof opoutput);
3126 p = template;
3127 putc ('\t', asm_out_file);
3129 #ifdef ASM_OUTPUT_OPCODE
3130 ASM_OUTPUT_OPCODE (asm_out_file, p);
3131 #endif
3133 while ((c = *p++))
3134 switch (c)
3136 case '\n':
3137 if (flag_verbose_asm)
3138 output_asm_operand_names (operands, oporder, ops);
3139 if (flag_print_asm_name)
3140 output_asm_name ();
3142 ops = 0;
3143 memset (opoutput, 0, sizeof opoutput);
3145 putc (c, asm_out_file);
3146 #ifdef ASM_OUTPUT_OPCODE
3147 while ((c = *p) == '\t')
3149 putc (c, asm_out_file);
3150 p++;
3152 ASM_OUTPUT_OPCODE (asm_out_file, p);
3153 #endif
3154 break;
3156 #ifdef ASSEMBLER_DIALECT
3157 case '{':
3159 int i;
3161 if (dialect)
3162 output_operand_lossage ("nested assembly dialect alternatives");
3163 else
3164 dialect = 1;
3166 /* If we want the first dialect, do nothing. Otherwise, skip
3167 DIALECT_NUMBER of strings ending with '|'. */
3168 for (i = 0; i < dialect_number; i++)
3170 while (*p && *p != '}' && *p++ != '|')
3172 if (*p == '}')
3173 break;
3174 if (*p == '|')
3175 p++;
3178 if (*p == '\0')
3179 output_operand_lossage ("unterminated assembly dialect alternative");
3181 break;
3183 case '|':
3184 if (dialect)
3186 /* Skip to close brace. */
3189 if (*p == '\0')
3191 output_operand_lossage ("unterminated assembly dialect alternative");
3192 break;
3195 while (*p++ != '}');
3196 dialect = 0;
3198 else
3199 putc (c, asm_out_file);
3200 break;
3202 case '}':
3203 if (! dialect)
3204 putc (c, asm_out_file);
3205 dialect = 0;
3206 break;
3207 #endif
3209 case '%':
3210 /* %% outputs a single %. */
3211 if (*p == '%')
3213 p++;
3214 putc (c, asm_out_file);
3216 /* %= outputs a number which is unique to each insn in the entire
3217 compilation. This is useful for making local labels that are
3218 referred to more than once in a given insn. */
3219 else if (*p == '=')
3221 p++;
3222 fprintf (asm_out_file, "%d", insn_counter);
3224 /* % followed by a letter and some digits
3225 outputs an operand in a special way depending on the letter.
3226 Letters `acln' are implemented directly.
3227 Other letters are passed to `output_operand' so that
3228 the PRINT_OPERAND macro can define them. */
3229 else if (ISALPHA (*p))
3231 int letter = *p++;
3232 unsigned long opnum;
3233 char *endptr;
3235 opnum = strtoul (p, &endptr, 10);
3237 if (endptr == p)
3238 output_operand_lossage ("operand number missing "
3239 "after %%-letter");
3240 else if (this_is_asm_operands && opnum >= insn_noperands)
3241 output_operand_lossage ("operand number out of range");
3242 else if (letter == 'l')
3243 output_asm_label (operands[opnum]);
3244 else if (letter == 'a')
3245 output_address (operands[opnum]);
3246 else if (letter == 'c')
3248 if (CONSTANT_ADDRESS_P (operands[opnum]))
3249 output_addr_const (asm_out_file, operands[opnum]);
3250 else
3251 output_operand (operands[opnum], 'c');
3253 else if (letter == 'n')
3255 if (GET_CODE (operands[opnum]) == CONST_INT)
3256 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3257 - INTVAL (operands[opnum]));
3258 else
3260 putc ('-', asm_out_file);
3261 output_addr_const (asm_out_file, operands[opnum]);
3264 else
3265 output_operand (operands[opnum], letter);
3267 if (!opoutput[opnum])
3268 oporder[ops++] = opnum;
3269 opoutput[opnum] = 1;
3271 p = endptr;
3272 c = *p;
3274 /* % followed by a digit outputs an operand the default way. */
3275 else if (ISDIGIT (*p))
3277 unsigned long opnum;
3278 char *endptr;
3280 opnum = strtoul (p, &endptr, 10);
3281 if (this_is_asm_operands && opnum >= insn_noperands)
3282 output_operand_lossage ("operand number out of range");
3283 else
3284 output_operand (operands[opnum], 0);
3286 if (!opoutput[opnum])
3287 oporder[ops++] = opnum;
3288 opoutput[opnum] = 1;
3290 p = endptr;
3291 c = *p;
3293 /* % followed by punctuation: output something for that
3294 punctuation character alone, with no operand.
3295 The PRINT_OPERAND macro decides what is actually done. */
3296 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3297 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3298 output_operand (NULL_RTX, *p++);
3299 #endif
3300 else
3301 output_operand_lossage ("invalid %%-code");
3302 break;
3304 default:
3305 putc (c, asm_out_file);
3308 /* Write out the variable names for operands, if we know them. */
3309 if (flag_verbose_asm)
3310 output_asm_operand_names (operands, oporder, ops);
3311 if (flag_print_asm_name)
3312 output_asm_name ();
3314 putc ('\n', asm_out_file);
3317 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3319 void
3320 output_asm_label (rtx x)
3322 char buf[256];
3324 if (GET_CODE (x) == LABEL_REF)
3325 x = XEXP (x, 0);
3326 if (LABEL_P (x)
3327 || (NOTE_P (x)
3328 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3329 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3330 else
3331 output_operand_lossage ("'%%l' operand isn't a label");
3333 assemble_name (asm_out_file, buf);
3336 /* Print operand X using machine-dependent assembler syntax.
3337 The macro PRINT_OPERAND is defined just to control this function.
3338 CODE is a non-digit that preceded the operand-number in the % spec,
3339 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3340 between the % and the digits.
3341 When CODE is a non-letter, X is 0.
3343 The meanings of the letters are machine-dependent and controlled
3344 by PRINT_OPERAND. */
3346 static void
3347 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3349 if (x && GET_CODE (x) == SUBREG)
3350 x = alter_subreg (&x);
3352 /* X must not be a pseudo reg. */
3353 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3355 PRINT_OPERAND (asm_out_file, x, code);
3358 /* Print a memory reference operand for address X
3359 using machine-dependent assembler syntax.
3360 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3362 void
3363 output_address (rtx x)
3365 bool changed = false;
3366 walk_alter_subreg (&x, &changed);
3367 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3370 /* Print an integer constant expression in assembler syntax.
3371 Addition and subtraction are the only arithmetic
3372 that may appear in these expressions. */
3374 void
3375 output_addr_const (FILE *file, rtx x)
3377 char buf[256];
3379 restart:
3380 switch (GET_CODE (x))
3382 case PC:
3383 putc ('.', file);
3384 break;
3386 case SYMBOL_REF:
3387 if (SYMBOL_REF_DECL (x))
3388 mark_decl_referenced (SYMBOL_REF_DECL (x));
3389 #ifdef ASM_OUTPUT_SYMBOL_REF
3390 ASM_OUTPUT_SYMBOL_REF (file, x);
3391 #else
3392 assemble_name (file, XSTR (x, 0));
3393 #endif
3394 break;
3396 case LABEL_REF:
3397 x = XEXP (x, 0);
3398 /* Fall through. */
3399 case CODE_LABEL:
3400 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3401 #ifdef ASM_OUTPUT_LABEL_REF
3402 ASM_OUTPUT_LABEL_REF (file, buf);
3403 #else
3404 assemble_name (file, buf);
3405 #endif
3406 break;
3408 case CONST_INT:
3409 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3410 break;
3412 case CONST:
3413 /* This used to output parentheses around the expression,
3414 but that does not work on the 386 (either ATT or BSD assembler). */
3415 output_addr_const (file, XEXP (x, 0));
3416 break;
3418 case CONST_DOUBLE:
3419 if (GET_MODE (x) == VOIDmode)
3421 /* We can use %d if the number is one word and positive. */
3422 if (CONST_DOUBLE_HIGH (x))
3423 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3424 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3425 else if (CONST_DOUBLE_LOW (x) < 0)
3426 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3427 else
3428 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3430 else
3431 /* We can't handle floating point constants;
3432 PRINT_OPERAND must handle them. */
3433 output_operand_lossage ("floating constant misused");
3434 break;
3436 case CONST_FIXED:
3437 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_FIXED_VALUE_LOW (x));
3438 break;
3440 case PLUS:
3441 /* Some assemblers need integer constants to appear last (eg masm). */
3442 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3444 output_addr_const (file, XEXP (x, 1));
3445 if (INTVAL (XEXP (x, 0)) >= 0)
3446 fprintf (file, "+");
3447 output_addr_const (file, XEXP (x, 0));
3449 else
3451 output_addr_const (file, XEXP (x, 0));
3452 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3453 || INTVAL (XEXP (x, 1)) >= 0)
3454 fprintf (file, "+");
3455 output_addr_const (file, XEXP (x, 1));
3457 break;
3459 case MINUS:
3460 /* Avoid outputting things like x-x or x+5-x,
3461 since some assemblers can't handle that. */
3462 x = simplify_subtraction (x);
3463 if (GET_CODE (x) != MINUS)
3464 goto restart;
3466 output_addr_const (file, XEXP (x, 0));
3467 fprintf (file, "-");
3468 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3469 || GET_CODE (XEXP (x, 1)) == PC
3470 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3471 output_addr_const (file, XEXP (x, 1));
3472 else
3474 fputs (targetm.asm_out.open_paren, file);
3475 output_addr_const (file, XEXP (x, 1));
3476 fputs (targetm.asm_out.close_paren, file);
3478 break;
3480 case ZERO_EXTEND:
3481 case SIGN_EXTEND:
3482 case SUBREG:
3483 output_addr_const (file, XEXP (x, 0));
3484 break;
3486 default:
3487 #ifdef OUTPUT_ADDR_CONST_EXTRA
3488 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3489 break;
3491 fail:
3492 #endif
3493 output_operand_lossage ("invalid expression as operand");
3497 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3498 %R prints the value of REGISTER_PREFIX.
3499 %L prints the value of LOCAL_LABEL_PREFIX.
3500 %U prints the value of USER_LABEL_PREFIX.
3501 %I prints the value of IMMEDIATE_PREFIX.
3502 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3503 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3505 We handle alternate assembler dialects here, just like output_asm_insn. */
3507 void
3508 asm_fprintf (FILE *file, const char *p, ...)
3510 char buf[10];
3511 char *q, c;
3512 va_list argptr;
3514 va_start (argptr, p);
3516 buf[0] = '%';
3518 while ((c = *p++))
3519 switch (c)
3521 #ifdef ASSEMBLER_DIALECT
3522 case '{':
3524 int i;
3526 /* If we want the first dialect, do nothing. Otherwise, skip
3527 DIALECT_NUMBER of strings ending with '|'. */
3528 for (i = 0; i < dialect_number; i++)
3530 while (*p && *p++ != '|')
3533 if (*p == '|')
3534 p++;
3537 break;
3539 case '|':
3540 /* Skip to close brace. */
3541 while (*p && *p++ != '}')
3543 break;
3545 case '}':
3546 break;
3547 #endif
3549 case '%':
3550 c = *p++;
3551 q = &buf[1];
3552 while (strchr ("-+ #0", c))
3554 *q++ = c;
3555 c = *p++;
3557 while (ISDIGIT (c) || c == '.')
3559 *q++ = c;
3560 c = *p++;
3562 switch (c)
3564 case '%':
3565 putc ('%', file);
3566 break;
3568 case 'd': case 'i': case 'u':
3569 case 'x': case 'X': case 'o':
3570 case 'c':
3571 *q++ = c;
3572 *q = 0;
3573 fprintf (file, buf, va_arg (argptr, int));
3574 break;
3576 case 'w':
3577 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3578 'o' cases, but we do not check for those cases. It
3579 means that the value is a HOST_WIDE_INT, which may be
3580 either `long' or `long long'. */
3581 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3582 q += strlen (HOST_WIDE_INT_PRINT);
3583 *q++ = *p++;
3584 *q = 0;
3585 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3586 break;
3588 case 'l':
3589 *q++ = c;
3590 #ifdef HAVE_LONG_LONG
3591 if (*p == 'l')
3593 *q++ = *p++;
3594 *q++ = *p++;
3595 *q = 0;
3596 fprintf (file, buf, va_arg (argptr, long long));
3598 else
3599 #endif
3601 *q++ = *p++;
3602 *q = 0;
3603 fprintf (file, buf, va_arg (argptr, long));
3606 break;
3608 case 's':
3609 *q++ = c;
3610 *q = 0;
3611 fprintf (file, buf, va_arg (argptr, char *));
3612 break;
3614 case 'O':
3615 #ifdef ASM_OUTPUT_OPCODE
3616 ASM_OUTPUT_OPCODE (asm_out_file, p);
3617 #endif
3618 break;
3620 case 'R':
3621 #ifdef REGISTER_PREFIX
3622 fprintf (file, "%s", REGISTER_PREFIX);
3623 #endif
3624 break;
3626 case 'I':
3627 #ifdef IMMEDIATE_PREFIX
3628 fprintf (file, "%s", IMMEDIATE_PREFIX);
3629 #endif
3630 break;
3632 case 'L':
3633 #ifdef LOCAL_LABEL_PREFIX
3634 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3635 #endif
3636 break;
3638 case 'U':
3639 fputs (user_label_prefix, file);
3640 break;
3642 #ifdef ASM_FPRINTF_EXTENSIONS
3643 /* Uppercase letters are reserved for general use by asm_fprintf
3644 and so are not available to target specific code. In order to
3645 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3646 they are defined here. As they get turned into real extensions
3647 to asm_fprintf they should be removed from this list. */
3648 case 'A': case 'B': case 'C': case 'D': case 'E':
3649 case 'F': case 'G': case 'H': case 'J': case 'K':
3650 case 'M': case 'N': case 'P': case 'Q': case 'S':
3651 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3652 break;
3654 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3655 #endif
3656 default:
3657 gcc_unreachable ();
3659 break;
3661 default:
3662 putc (c, file);
3664 va_end (argptr);
3667 /* Split up a CONST_DOUBLE or integer constant rtx
3668 into two rtx's for single words,
3669 storing in *FIRST the word that comes first in memory in the target
3670 and in *SECOND the other. */
3672 void
3673 split_double (rtx value, rtx *first, rtx *second)
3675 if (GET_CODE (value) == CONST_INT)
3677 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3679 /* In this case the CONST_INT holds both target words.
3680 Extract the bits from it into two word-sized pieces.
3681 Sign extend each half to HOST_WIDE_INT. */
3682 unsigned HOST_WIDE_INT low, high;
3683 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3685 /* Set sign_bit to the most significant bit of a word. */
3686 sign_bit = 1;
3687 sign_bit <<= BITS_PER_WORD - 1;
3689 /* Set mask so that all bits of the word are set. We could
3690 have used 1 << BITS_PER_WORD instead of basing the
3691 calculation on sign_bit. However, on machines where
3692 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3693 compiler warning, even though the code would never be
3694 executed. */
3695 mask = sign_bit << 1;
3696 mask--;
3698 /* Set sign_extend as any remaining bits. */
3699 sign_extend = ~mask;
3701 /* Pick the lower word and sign-extend it. */
3702 low = INTVAL (value);
3703 low &= mask;
3704 if (low & sign_bit)
3705 low |= sign_extend;
3707 /* Pick the higher word, shifted to the least significant
3708 bits, and sign-extend it. */
3709 high = INTVAL (value);
3710 high >>= BITS_PER_WORD - 1;
3711 high >>= 1;
3712 high &= mask;
3713 if (high & sign_bit)
3714 high |= sign_extend;
3716 /* Store the words in the target machine order. */
3717 if (WORDS_BIG_ENDIAN)
3719 *first = GEN_INT (high);
3720 *second = GEN_INT (low);
3722 else
3724 *first = GEN_INT (low);
3725 *second = GEN_INT (high);
3728 else
3730 /* The rule for using CONST_INT for a wider mode
3731 is that we regard the value as signed.
3732 So sign-extend it. */
3733 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3734 if (WORDS_BIG_ENDIAN)
3736 *first = high;
3737 *second = value;
3739 else
3741 *first = value;
3742 *second = high;
3746 else if (GET_CODE (value) != CONST_DOUBLE)
3748 if (WORDS_BIG_ENDIAN)
3750 *first = const0_rtx;
3751 *second = value;
3753 else
3755 *first = value;
3756 *second = const0_rtx;
3759 else if (GET_MODE (value) == VOIDmode
3760 /* This is the old way we did CONST_DOUBLE integers. */
3761 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3763 /* In an integer, the words are defined as most and least significant.
3764 So order them by the target's convention. */
3765 if (WORDS_BIG_ENDIAN)
3767 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3768 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3770 else
3772 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3773 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3776 else
3778 REAL_VALUE_TYPE r;
3779 long l[2];
3780 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3782 /* Note, this converts the REAL_VALUE_TYPE to the target's
3783 format, splits up the floating point double and outputs
3784 exactly 32 bits of it into each of l[0] and l[1] --
3785 not necessarily BITS_PER_WORD bits. */
3786 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3788 /* If 32 bits is an entire word for the target, but not for the host,
3789 then sign-extend on the host so that the number will look the same
3790 way on the host that it would on the target. See for instance
3791 simplify_unary_operation. The #if is needed to avoid compiler
3792 warnings. */
3794 #if HOST_BITS_PER_LONG > 32
3795 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3797 if (l[0] & ((long) 1 << 31))
3798 l[0] |= ((long) (-1) << 32);
3799 if (l[1] & ((long) 1 << 31))
3800 l[1] |= ((long) (-1) << 32);
3802 #endif
3804 *first = GEN_INT (l[0]);
3805 *second = GEN_INT (l[1]);
3809 /* Return nonzero if this function has no function calls. */
3812 leaf_function_p (void)
3814 rtx insn;
3815 rtx link;
3817 if (current_function_profile || profile_arc_flag)
3818 return 0;
3820 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3822 if (CALL_P (insn)
3823 && ! SIBLING_CALL_P (insn))
3824 return 0;
3825 if (NONJUMP_INSN_P (insn)
3826 && GET_CODE (PATTERN (insn)) == SEQUENCE
3827 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3828 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3829 return 0;
3831 for (link = current_function_epilogue_delay_list;
3832 link;
3833 link = XEXP (link, 1))
3835 insn = XEXP (link, 0);
3837 if (CALL_P (insn)
3838 && ! SIBLING_CALL_P (insn))
3839 return 0;
3840 if (NONJUMP_INSN_P (insn)
3841 && GET_CODE (PATTERN (insn)) == SEQUENCE
3842 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3843 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3844 return 0;
3847 return 1;
3850 /* Return 1 if branch is a forward branch.
3851 Uses insn_shuid array, so it works only in the final pass. May be used by
3852 output templates to customary add branch prediction hints.
3855 final_forward_branch_p (rtx insn)
3857 int insn_id, label_id;
3859 gcc_assert (uid_shuid);
3860 insn_id = INSN_SHUID (insn);
3861 label_id = INSN_SHUID (JUMP_LABEL (insn));
3862 /* We've hit some insns that does not have id information available. */
3863 gcc_assert (insn_id && label_id);
3864 return insn_id < label_id;
3867 /* On some machines, a function with no call insns
3868 can run faster if it doesn't create its own register window.
3869 When output, the leaf function should use only the "output"
3870 registers. Ordinarily, the function would be compiled to use
3871 the "input" registers to find its arguments; it is a candidate
3872 for leaf treatment if it uses only the "input" registers.
3873 Leaf function treatment means renumbering so the function
3874 uses the "output" registers instead. */
3876 #ifdef LEAF_REGISTERS
3878 /* Return 1 if this function uses only the registers that can be
3879 safely renumbered. */
3882 only_leaf_regs_used (void)
3884 int i;
3885 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3887 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3888 if ((df_regs_ever_live_p (i) || global_regs[i])
3889 && ! permitted_reg_in_leaf_functions[i])
3890 return 0;
3892 if (current_function_uses_pic_offset_table
3893 && pic_offset_table_rtx != 0
3894 && REG_P (pic_offset_table_rtx)
3895 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3896 return 0;
3898 return 1;
3901 /* Scan all instructions and renumber all registers into those
3902 available in leaf functions. */
3904 static void
3905 leaf_renumber_regs (rtx first)
3907 rtx insn;
3909 /* Renumber only the actual patterns.
3910 The reg-notes can contain frame pointer refs,
3911 and renumbering them could crash, and should not be needed. */
3912 for (insn = first; insn; insn = NEXT_INSN (insn))
3913 if (INSN_P (insn))
3914 leaf_renumber_regs_insn (PATTERN (insn));
3915 for (insn = current_function_epilogue_delay_list;
3916 insn;
3917 insn = XEXP (insn, 1))
3918 if (INSN_P (XEXP (insn, 0)))
3919 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3922 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3923 available in leaf functions. */
3925 void
3926 leaf_renumber_regs_insn (rtx in_rtx)
3928 int i, j;
3929 const char *format_ptr;
3931 if (in_rtx == 0)
3932 return;
3934 /* Renumber all input-registers into output-registers.
3935 renumbered_regs would be 1 for an output-register;
3936 they */
3938 if (REG_P (in_rtx))
3940 int newreg;
3942 /* Don't renumber the same reg twice. */
3943 if (in_rtx->used)
3944 return;
3946 newreg = REGNO (in_rtx);
3947 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3948 to reach here as part of a REG_NOTE. */
3949 if (newreg >= FIRST_PSEUDO_REGISTER)
3951 in_rtx->used = 1;
3952 return;
3954 newreg = LEAF_REG_REMAP (newreg);
3955 gcc_assert (newreg >= 0);
3956 df_set_regs_ever_live (REGNO (in_rtx), false);
3957 df_set_regs_ever_live (newreg, true);
3958 SET_REGNO (in_rtx, newreg);
3959 in_rtx->used = 1;
3962 if (INSN_P (in_rtx))
3964 /* Inside a SEQUENCE, we find insns.
3965 Renumber just the patterns of these insns,
3966 just as we do for the top-level insns. */
3967 leaf_renumber_regs_insn (PATTERN (in_rtx));
3968 return;
3971 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3973 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3974 switch (*format_ptr++)
3976 case 'e':
3977 leaf_renumber_regs_insn (XEXP (in_rtx, i));
3978 break;
3980 case 'E':
3981 if (NULL != XVEC (in_rtx, i))
3983 for (j = 0; j < XVECLEN (in_rtx, i); j++)
3984 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3986 break;
3988 case 'S':
3989 case 's':
3990 case '0':
3991 case 'i':
3992 case 'w':
3993 case 'n':
3994 case 'u':
3995 break;
3997 default:
3998 gcc_unreachable ();
4001 #endif
4004 /* When -gused is used, emit debug info for only used symbols. But in
4005 addition to the standard intercepted debug_hooks there are some direct
4006 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
4007 Those routines may also be called from a higher level intercepted routine. So
4008 to prevent recording data for an inner call to one of these for an intercept,
4009 we maintain an intercept nesting counter (debug_nesting). We only save the
4010 intercepted arguments if the nesting is 1. */
4011 int debug_nesting = 0;
4013 static tree *symbol_queue;
4014 int symbol_queue_index = 0;
4015 static int symbol_queue_size = 0;
4017 /* Generate the symbols for any queued up type symbols we encountered
4018 while generating the type info for some originally used symbol.
4019 This might generate additional entries in the queue. Only when
4020 the nesting depth goes to 0 is this routine called. */
4022 void
4023 debug_flush_symbol_queue (void)
4025 int i;
4027 /* Make sure that additionally queued items are not flushed
4028 prematurely. */
4030 ++debug_nesting;
4032 for (i = 0; i < symbol_queue_index; ++i)
4034 /* If we pushed queued symbols then such symbols must be
4035 output no matter what anyone else says. Specifically,
4036 we need to make sure dbxout_symbol() thinks the symbol was
4037 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
4038 which may be set for outside reasons. */
4039 int saved_tree_used = TREE_USED (symbol_queue[i]);
4040 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
4041 TREE_USED (symbol_queue[i]) = 1;
4042 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
4044 #ifdef DBX_DEBUGGING_INFO
4045 dbxout_symbol (symbol_queue[i], 0);
4046 #endif
4048 TREE_USED (symbol_queue[i]) = saved_tree_used;
4049 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
4052 symbol_queue_index = 0;
4053 --debug_nesting;
4056 /* Queue a type symbol needed as part of the definition of a decl
4057 symbol. These symbols are generated when debug_flush_symbol_queue()
4058 is called. */
4060 void
4061 debug_queue_symbol (tree decl)
4063 if (symbol_queue_index >= symbol_queue_size)
4065 symbol_queue_size += 10;
4066 symbol_queue = xrealloc (symbol_queue,
4067 symbol_queue_size * sizeof (tree));
4070 symbol_queue[symbol_queue_index++] = decl;
4073 /* Free symbol queue. */
4074 void
4075 debug_free_queue (void)
4077 if (symbol_queue)
4079 free (symbol_queue);
4080 symbol_queue = NULL;
4081 symbol_queue_size = 0;
4085 /* Turn the RTL into assembly. */
4086 static unsigned int
4087 rest_of_handle_final (void)
4089 rtx x;
4090 const char *fnname;
4092 /* Get the function's name, as described by its RTL. This may be
4093 different from the DECL_NAME name used in the source file. */
4095 x = DECL_RTL (current_function_decl);
4096 gcc_assert (MEM_P (x));
4097 x = XEXP (x, 0);
4098 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4099 fnname = XSTR (x, 0);
4101 assemble_start_function (current_function_decl, fnname);
4102 final_start_function (get_insns (), asm_out_file, optimize);
4103 final (get_insns (), asm_out_file, optimize);
4104 final_end_function ();
4106 #ifdef TARGET_UNWIND_INFO
4107 /* ??? The IA-64 ".handlerdata" directive must be issued before
4108 the ".endp" directive that closes the procedure descriptor. */
4109 output_function_exception_table (fnname);
4110 #endif
4112 assemble_end_function (current_function_decl, fnname);
4114 #ifndef TARGET_UNWIND_INFO
4115 /* Otherwise, it feels unclean to switch sections in the middle. */
4116 output_function_exception_table (fnname);
4117 #endif
4119 user_defined_section_attribute = false;
4121 /* Free up reg info memory. */
4122 free_reg_info ();
4124 if (! quiet_flag)
4125 fflush (asm_out_file);
4127 /* Write DBX symbols if requested. */
4129 /* Note that for those inline functions where we don't initially
4130 know for certain that we will be generating an out-of-line copy,
4131 the first invocation of this routine (rest_of_compilation) will
4132 skip over this code by doing a `goto exit_rest_of_compilation;'.
4133 Later on, wrapup_global_declarations will (indirectly) call
4134 rest_of_compilation again for those inline functions that need
4135 to have out-of-line copies generated. During that call, we
4136 *will* be routed past here. */
4138 timevar_push (TV_SYMOUT);
4139 (*debug_hooks->function_decl) (current_function_decl);
4140 timevar_pop (TV_SYMOUT);
4141 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4142 && targetm.have_ctors_dtors)
4143 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4144 decl_init_priority_lookup
4145 (current_function_decl));
4146 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4147 && targetm.have_ctors_dtors)
4148 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4149 decl_fini_priority_lookup
4150 (current_function_decl));
4151 return 0;
4154 struct tree_opt_pass pass_final =
4156 NULL, /* name */
4157 NULL, /* gate */
4158 rest_of_handle_final, /* execute */
4159 NULL, /* sub */
4160 NULL, /* next */
4161 0, /* static_pass_number */
4162 TV_FINAL, /* tv_id */
4163 0, /* properties_required */
4164 0, /* properties_provided */
4165 0, /* properties_destroyed */
4166 0, /* todo_flags_start */
4167 TODO_ggc_collect, /* todo_flags_finish */
4168 0 /* letter */
4172 static unsigned int
4173 rest_of_handle_shorten_branches (void)
4175 /* Shorten branches. */
4176 shorten_branches (get_insns ());
4177 return 0;
4180 struct tree_opt_pass pass_shorten_branches =
4182 "shorten", /* name */
4183 NULL, /* gate */
4184 rest_of_handle_shorten_branches, /* execute */
4185 NULL, /* sub */
4186 NULL, /* next */
4187 0, /* static_pass_number */
4188 TV_FINAL, /* tv_id */
4189 0, /* properties_required */
4190 0, /* properties_provided */
4191 0, /* properties_destroyed */
4192 0, /* todo_flags_start */
4193 TODO_dump_func, /* todo_flags_finish */
4194 0 /* letter */
4198 static unsigned int
4199 rest_of_clean_state (void)
4201 rtx insn, next;
4203 /* It is very important to decompose the RTL instruction chain here:
4204 debug information keeps pointing into CODE_LABEL insns inside the function
4205 body. If these remain pointing to the other insns, we end up preserving
4206 whole RTL chain and attached detailed debug info in memory. */
4207 for (insn = get_insns (); insn; insn = next)
4209 next = NEXT_INSN (insn);
4210 NEXT_INSN (insn) = NULL;
4211 PREV_INSN (insn) = NULL;
4214 /* In case the function was not output,
4215 don't leave any temporary anonymous types
4216 queued up for sdb output. */
4217 #ifdef SDB_DEBUGGING_INFO
4218 if (write_symbols == SDB_DEBUG)
4219 sdbout_types (NULL_TREE);
4220 #endif
4222 reload_completed = 0;
4223 epilogue_completed = 0;
4224 #ifdef STACK_REGS
4225 regstack_completed = 0;
4226 #endif
4228 /* Clear out the insn_length contents now that they are no
4229 longer valid. */
4230 init_insn_lengths ();
4232 /* Show no temporary slots allocated. */
4233 init_temp_slots ();
4235 free_bb_for_insn ();
4237 if (targetm.binds_local_p (current_function_decl))
4239 int pref = cfun->preferred_stack_boundary;
4240 if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
4241 pref = cfun->stack_alignment_needed;
4242 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4243 = pref;
4246 /* Make sure volatile mem refs aren't considered valid operands for
4247 arithmetic insns. We must call this here if this is a nested inline
4248 function, since the above code leaves us in the init_recog state,
4249 and the function context push/pop code does not save/restore volatile_ok.
4251 ??? Maybe it isn't necessary for expand_start_function to call this
4252 anymore if we do it here? */
4254 init_recog_no_volatile ();
4256 /* We're done with this function. Free up memory if we can. */
4257 free_after_parsing (cfun);
4258 free_after_compilation (cfun);
4259 return 0;
4262 struct tree_opt_pass pass_clean_state =
4264 NULL, /* name */
4265 NULL, /* gate */
4266 rest_of_clean_state, /* execute */
4267 NULL, /* sub */
4268 NULL, /* next */
4269 0, /* static_pass_number */
4270 TV_FINAL, /* tv_id */
4271 0, /* properties_required */
4272 0, /* properties_provided */
4273 PROP_rtl, /* properties_destroyed */
4274 0, /* todo_flags_start */
4275 0, /* todo_flags_finish */
4276 0 /* letter */