* xcoffout.h (xcoffout_source_line): Update prototype.
[official-gcc.git] / gcc / final.c
blob1fae1b4e2388d521078513f1c2e0804e21387b52
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74 #include "tree-pass.h"
75 #include "timevar.h"
76 #include "cgraph.h"
77 #include "coverage.h"
78 #include "df.h"
79 #include "vecprim.h"
80 #include "ggc.h"
81 #include "cfgloop.h"
82 #include "params.h"
84 #ifdef XCOFF_DEBUGGING_INFO
85 #include "xcoffout.h" /* Needed for external data
86 declarations for e.g. AIX 4.x. */
87 #endif
89 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
90 #include "dwarf2out.h"
91 #endif
93 #ifdef DBX_DEBUGGING_INFO
94 #include "dbxout.h"
95 #endif
97 #ifdef SDB_DEBUGGING_INFO
98 #include "sdbout.h"
99 #endif
101 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
102 null default for it to save conditionalization later. */
103 #ifndef CC_STATUS_INIT
104 #define CC_STATUS_INIT
105 #endif
107 /* How to start an assembler comment. */
108 #ifndef ASM_COMMENT_START
109 #define ASM_COMMENT_START ";#"
110 #endif
112 /* Is the given character a logical line separator for the assembler? */
113 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
114 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
115 #endif
117 #ifndef JUMP_TABLES_IN_TEXT_SECTION
118 #define JUMP_TABLES_IN_TEXT_SECTION 0
119 #endif
121 /* Bitflags used by final_scan_insn. */
122 #define SEEN_BB 1
123 #define SEEN_NOTE 2
124 #define SEEN_EMITTED 4
126 /* Last insn processed by final_scan_insn. */
127 static rtx debug_insn;
128 rtx current_output_insn;
130 /* Line number of last NOTE. */
131 static int last_linenum;
133 /* Last discriminator written to assembly. */
134 static int last_discriminator;
136 /* Discriminator of current block. */
137 static int discriminator;
139 /* Highest line number in current block. */
140 static int high_block_linenum;
142 /* Likewise for function. */
143 static int high_function_linenum;
145 /* Filename of last NOTE. */
146 static const char *last_filename;
148 /* Override filename and line number. */
149 static const char *override_filename;
150 static int override_linenum;
152 /* Whether to force emission of a line note before the next insn. */
153 static bool force_source_line = false;
155 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
157 /* Nonzero while outputting an `asm' with operands.
158 This means that inconsistencies are the user's fault, so don't die.
159 The precise value is the insn being output, to pass to error_for_asm. */
160 rtx this_is_asm_operands;
162 /* Number of operands of this insn, for an `asm' with operands. */
163 static unsigned int insn_noperands;
165 /* Compare optimization flag. */
167 static rtx last_ignored_compare = 0;
169 /* Assign a unique number to each insn that is output.
170 This can be used to generate unique local labels. */
172 static int insn_counter = 0;
174 #ifdef HAVE_cc0
175 /* This variable contains machine-dependent flags (defined in tm.h)
176 set and examined by output routines
177 that describe how to interpret the condition codes properly. */
179 CC_STATUS cc_status;
181 /* During output of an insn, this contains a copy of cc_status
182 from before the insn. */
184 CC_STATUS cc_prev_status;
185 #endif
187 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
189 static int block_depth;
191 /* Nonzero if have enabled APP processing of our assembler output. */
193 static int app_on;
195 /* If we are outputting an insn sequence, this contains the sequence rtx.
196 Zero otherwise. */
198 rtx final_sequence;
200 #ifdef ASSEMBLER_DIALECT
202 /* Number of the assembler dialect to use, starting at 0. */
203 static int dialect_number;
204 #endif
206 #ifdef HAVE_conditional_execution
207 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
208 rtx current_insn_predicate;
209 #endif
211 #ifdef HAVE_ATTR_length
212 static int asm_insn_count (rtx);
213 #endif
214 static void profile_function (FILE *);
215 static void profile_after_prologue (FILE *);
216 static bool notice_source_line (rtx);
217 static rtx walk_alter_subreg (rtx *, bool *);
218 static void output_asm_name (void);
219 static void output_alternate_entry_point (FILE *, rtx);
220 static tree get_mem_expr_from_op (rtx, int *);
221 static void output_asm_operand_names (rtx *, int *, int);
222 static void output_operand (rtx, int);
223 #ifdef LEAF_REGISTERS
224 static void leaf_renumber_regs (rtx);
225 #endif
226 #ifdef HAVE_cc0
227 static int alter_cond (rtx);
228 #endif
229 #ifndef ADDR_VEC_ALIGN
230 static int final_addr_vec_align (rtx);
231 #endif
232 #ifdef HAVE_ATTR_length
233 static int align_fuzz (rtx, rtx, int, unsigned);
234 #endif
236 /* Initialize data in final at the beginning of a compilation. */
238 void
239 init_final (const char *filename ATTRIBUTE_UNUSED)
241 app_on = 0;
242 final_sequence = 0;
244 #ifdef ASSEMBLER_DIALECT
245 dialect_number = ASSEMBLER_DIALECT;
246 #endif
249 /* Default target function prologue and epilogue assembler output.
251 If not overridden for epilogue code, then the function body itself
252 contains return instructions wherever needed. */
253 void
254 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
255 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
259 /* Default target hook that outputs nothing to a stream. */
260 void
261 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
265 /* Enable APP processing of subsequent output.
266 Used before the output from an `asm' statement. */
268 void
269 app_enable (void)
271 if (! app_on)
273 fputs (ASM_APP_ON, asm_out_file);
274 app_on = 1;
278 /* Disable APP processing of subsequent output.
279 Called from varasm.c before most kinds of output. */
281 void
282 app_disable (void)
284 if (app_on)
286 fputs (ASM_APP_OFF, asm_out_file);
287 app_on = 0;
291 /* Return the number of slots filled in the current
292 delayed branch sequence (we don't count the insn needing the
293 delay slot). Zero if not in a delayed branch sequence. */
295 #ifdef DELAY_SLOTS
297 dbr_sequence_length (void)
299 if (final_sequence != 0)
300 return XVECLEN (final_sequence, 0) - 1;
301 else
302 return 0;
304 #endif
306 /* The next two pages contain routines used to compute the length of an insn
307 and to shorten branches. */
309 /* Arrays for insn lengths, and addresses. The latter is referenced by
310 `insn_current_length'. */
312 static int *insn_lengths;
314 VEC(int,heap) *insn_addresses_;
316 /* Max uid for which the above arrays are valid. */
317 static int insn_lengths_max_uid;
319 /* Address of insn being processed. Used by `insn_current_length'. */
320 int insn_current_address;
322 /* Address of insn being processed in previous iteration. */
323 int insn_last_address;
325 /* known invariant alignment of insn being processed. */
326 int insn_current_align;
328 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
329 gives the next following alignment insn that increases the known
330 alignment, or NULL_RTX if there is no such insn.
331 For any alignment obtained this way, we can again index uid_align with
332 its uid to obtain the next following align that in turn increases the
333 alignment, till we reach NULL_RTX; the sequence obtained this way
334 for each insn we'll call the alignment chain of this insn in the following
335 comments. */
337 struct label_alignment
339 short alignment;
340 short max_skip;
343 static rtx *uid_align;
344 static int *uid_shuid;
345 static struct label_alignment *label_align;
347 /* Indicate that branch shortening hasn't yet been done. */
349 void
350 init_insn_lengths (void)
352 if (uid_shuid)
354 free (uid_shuid);
355 uid_shuid = 0;
357 if (insn_lengths)
359 free (insn_lengths);
360 insn_lengths = 0;
361 insn_lengths_max_uid = 0;
363 #ifdef HAVE_ATTR_length
364 INSN_ADDRESSES_FREE ();
365 #endif
366 if (uid_align)
368 free (uid_align);
369 uid_align = 0;
373 /* Obtain the current length of an insn. If branch shortening has been done,
374 get its actual length. Otherwise, use FALLBACK_FN to calculate the
375 length. */
376 static inline int
377 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
378 int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
380 #ifdef HAVE_ATTR_length
381 rtx body;
382 int i;
383 int length = 0;
385 if (insn_lengths_max_uid > INSN_UID (insn))
386 return insn_lengths[INSN_UID (insn)];
387 else
388 switch (GET_CODE (insn))
390 case NOTE:
391 case BARRIER:
392 case CODE_LABEL:
393 return 0;
395 case CALL_INSN:
396 length = fallback_fn (insn);
397 break;
399 case JUMP_INSN:
400 body = PATTERN (insn);
401 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
403 /* Alignment is machine-dependent and should be handled by
404 ADDR_VEC_ALIGN. */
406 else
407 length = fallback_fn (insn);
408 break;
410 case INSN:
411 body = PATTERN (insn);
412 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
413 return 0;
415 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
416 length = asm_insn_count (body) * fallback_fn (insn);
417 else if (GET_CODE (body) == SEQUENCE)
418 for (i = 0; i < XVECLEN (body, 0); i++)
419 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
420 else
421 length = fallback_fn (insn);
422 break;
424 default:
425 break;
428 #ifdef ADJUST_INSN_LENGTH
429 ADJUST_INSN_LENGTH (insn, length);
430 #endif
431 return length;
432 #else /* not HAVE_ATTR_length */
433 return 0;
434 #define insn_default_length 0
435 #define insn_min_length 0
436 #endif /* not HAVE_ATTR_length */
439 /* Obtain the current length of an insn. If branch shortening has been done,
440 get its actual length. Otherwise, get its maximum length. */
442 get_attr_length (rtx insn)
444 return get_attr_length_1 (insn, insn_default_length);
447 /* Obtain the current length of an insn. If branch shortening has been done,
448 get its actual length. Otherwise, get its minimum length. */
450 get_attr_min_length (rtx insn)
452 return get_attr_length_1 (insn, insn_min_length);
455 /* Code to handle alignment inside shorten_branches. */
457 /* Here is an explanation how the algorithm in align_fuzz can give
458 proper results:
460 Call a sequence of instructions beginning with alignment point X
461 and continuing until the next alignment point `block X'. When `X'
462 is used in an expression, it means the alignment value of the
463 alignment point.
465 Call the distance between the start of the first insn of block X, and
466 the end of the last insn of block X `IX', for the `inner size of X'.
467 This is clearly the sum of the instruction lengths.
469 Likewise with the next alignment-delimited block following X, which we
470 shall call block Y.
472 Call the distance between the start of the first insn of block X, and
473 the start of the first insn of block Y `OX', for the `outer size of X'.
475 The estimated padding is then OX - IX.
477 OX can be safely estimated as
479 if (X >= Y)
480 OX = round_up(IX, Y)
481 else
482 OX = round_up(IX, X) + Y - X
484 Clearly est(IX) >= real(IX), because that only depends on the
485 instruction lengths, and those being overestimated is a given.
487 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
488 we needn't worry about that when thinking about OX.
490 When X >= Y, the alignment provided by Y adds no uncertainty factor
491 for branch ranges starting before X, so we can just round what we have.
492 But when X < Y, we don't know anything about the, so to speak,
493 `middle bits', so we have to assume the worst when aligning up from an
494 address mod X to one mod Y, which is Y - X. */
496 #ifndef LABEL_ALIGN
497 #define LABEL_ALIGN(LABEL) align_labels_log
498 #endif
500 #ifndef LABEL_ALIGN_MAX_SKIP
501 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
502 #endif
504 #ifndef LOOP_ALIGN
505 #define LOOP_ALIGN(LABEL) align_loops_log
506 #endif
508 #ifndef LOOP_ALIGN_MAX_SKIP
509 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
510 #endif
512 #ifndef LABEL_ALIGN_AFTER_BARRIER
513 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
514 #endif
516 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
517 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
518 #endif
520 #ifndef JUMP_ALIGN
521 #define JUMP_ALIGN(LABEL) align_jumps_log
522 #endif
524 #ifndef JUMP_ALIGN_MAX_SKIP
525 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
526 #endif
528 #ifndef ADDR_VEC_ALIGN
529 static int
530 final_addr_vec_align (rtx addr_vec)
532 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
534 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
535 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
536 return exact_log2 (align);
540 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
541 #endif
543 #ifndef INSN_LENGTH_ALIGNMENT
544 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
545 #endif
547 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
549 static int min_labelno, max_labelno;
551 #define LABEL_TO_ALIGNMENT(LABEL) \
552 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
554 #define LABEL_TO_MAX_SKIP(LABEL) \
555 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
557 /* For the benefit of port specific code do this also as a function. */
560 label_to_alignment (rtx label)
562 if (CODE_LABEL_NUMBER (label) <= max_labelno)
563 return LABEL_TO_ALIGNMENT (label);
564 return 0;
568 label_to_max_skip (rtx label)
570 if (CODE_LABEL_NUMBER (label) <= max_labelno)
571 return LABEL_TO_MAX_SKIP (label);
572 return 0;
575 #ifdef HAVE_ATTR_length
576 /* The differences in addresses
577 between a branch and its target might grow or shrink depending on
578 the alignment the start insn of the range (the branch for a forward
579 branch or the label for a backward branch) starts out on; if these
580 differences are used naively, they can even oscillate infinitely.
581 We therefore want to compute a 'worst case' address difference that
582 is independent of the alignment the start insn of the range end
583 up on, and that is at least as large as the actual difference.
584 The function align_fuzz calculates the amount we have to add to the
585 naively computed difference, by traversing the part of the alignment
586 chain of the start insn of the range that is in front of the end insn
587 of the range, and considering for each alignment the maximum amount
588 that it might contribute to a size increase.
590 For casesi tables, we also want to know worst case minimum amounts of
591 address difference, in case a machine description wants to introduce
592 some common offset that is added to all offsets in a table.
593 For this purpose, align_fuzz with a growth argument of 0 computes the
594 appropriate adjustment. */
596 /* Compute the maximum delta by which the difference of the addresses of
597 START and END might grow / shrink due to a different address for start
598 which changes the size of alignment insns between START and END.
599 KNOWN_ALIGN_LOG is the alignment known for START.
600 GROWTH should be ~0 if the objective is to compute potential code size
601 increase, and 0 if the objective is to compute potential shrink.
602 The return value is undefined for any other value of GROWTH. */
604 static int
605 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
607 int uid = INSN_UID (start);
608 rtx align_label;
609 int known_align = 1 << known_align_log;
610 int end_shuid = INSN_SHUID (end);
611 int fuzz = 0;
613 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
615 int align_addr, new_align;
617 uid = INSN_UID (align_label);
618 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
619 if (uid_shuid[uid] > end_shuid)
620 break;
621 known_align_log = LABEL_TO_ALIGNMENT (align_label);
622 new_align = 1 << known_align_log;
623 if (new_align < known_align)
624 continue;
625 fuzz += (-align_addr ^ growth) & (new_align - known_align);
626 known_align = new_align;
628 return fuzz;
631 /* Compute a worst-case reference address of a branch so that it
632 can be safely used in the presence of aligned labels. Since the
633 size of the branch itself is unknown, the size of the branch is
634 not included in the range. I.e. for a forward branch, the reference
635 address is the end address of the branch as known from the previous
636 branch shortening pass, minus a value to account for possible size
637 increase due to alignment. For a backward branch, it is the start
638 address of the branch as known from the current pass, plus a value
639 to account for possible size increase due to alignment.
640 NB.: Therefore, the maximum offset allowed for backward branches needs
641 to exclude the branch size. */
644 insn_current_reference_address (rtx branch)
646 rtx dest, seq;
647 int seq_uid;
649 if (! INSN_ADDRESSES_SET_P ())
650 return 0;
652 seq = NEXT_INSN (PREV_INSN (branch));
653 seq_uid = INSN_UID (seq);
654 if (!JUMP_P (branch))
655 /* This can happen for example on the PA; the objective is to know the
656 offset to address something in front of the start of the function.
657 Thus, we can treat it like a backward branch.
658 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
659 any alignment we'd encounter, so we skip the call to align_fuzz. */
660 return insn_current_address;
661 dest = JUMP_LABEL (branch);
663 /* BRANCH has no proper alignment chain set, so use SEQ.
664 BRANCH also has no INSN_SHUID. */
665 if (INSN_SHUID (seq) < INSN_SHUID (dest))
667 /* Forward branch. */
668 return (insn_last_address + insn_lengths[seq_uid]
669 - align_fuzz (seq, dest, length_unit_log, ~0));
671 else
673 /* Backward branch. */
674 return (insn_current_address
675 + align_fuzz (dest, seq, length_unit_log, ~0));
678 #endif /* HAVE_ATTR_length */
680 /* Compute branch alignments based on frequency information in the
681 CFG. */
683 unsigned int
684 compute_alignments (void)
686 int log, max_skip, max_log;
687 basic_block bb;
688 int freq_max = 0;
689 int freq_threshold = 0;
691 if (label_align)
693 free (label_align);
694 label_align = 0;
697 max_labelno = max_label_num ();
698 min_labelno = get_first_label_num ();
699 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
701 /* If not optimizing or optimizing for size, don't assign any alignments. */
702 if (! optimize || optimize_function_for_size_p (cfun))
703 return 0;
705 if (dump_file)
707 dump_flow_info (dump_file, TDF_DETAILS);
708 flow_loops_dump (dump_file, NULL, 1);
709 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
711 FOR_EACH_BB (bb)
712 if (bb->frequency > freq_max)
713 freq_max = bb->frequency;
714 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
716 if (dump_file)
717 fprintf(dump_file, "freq_max: %i\n",freq_max);
718 FOR_EACH_BB (bb)
720 rtx label = BB_HEAD (bb);
721 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
722 edge e;
723 edge_iterator ei;
725 if (!LABEL_P (label)
726 || optimize_bb_for_size_p (bb))
728 if (dump_file)
729 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
730 bb->index, bb->frequency, bb->loop_father->num, bb->loop_depth);
731 continue;
733 max_log = LABEL_ALIGN (label);
734 max_skip = LABEL_ALIGN_MAX_SKIP;
736 FOR_EACH_EDGE (e, ei, bb->preds)
738 if (e->flags & EDGE_FALLTHRU)
739 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
740 else
741 branch_frequency += EDGE_FREQUENCY (e);
743 if (dump_file)
745 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
746 bb->index, bb->frequency, bb->loop_father->num,
747 bb->loop_depth,
748 fallthru_frequency, branch_frequency);
749 if (!bb->loop_father->inner && bb->loop_father->num)
750 fprintf (dump_file, " inner_loop");
751 if (bb->loop_father->header == bb)
752 fprintf (dump_file, " loop_header");
753 fprintf (dump_file, "\n");
756 /* There are two purposes to align block with no fallthru incoming edge:
757 1) to avoid fetch stalls when branch destination is near cache boundary
758 2) to improve cache efficiency in case the previous block is not executed
759 (so it does not need to be in the cache).
761 We to catch first case, we align frequently executed blocks.
762 To catch the second, we align blocks that are executed more frequently
763 than the predecessor and the predecessor is likely to not be executed
764 when function is called. */
766 if (!has_fallthru
767 && (branch_frequency > freq_threshold
768 || (bb->frequency > bb->prev_bb->frequency * 10
769 && (bb->prev_bb->frequency
770 <= ENTRY_BLOCK_PTR->frequency / 2))))
772 log = JUMP_ALIGN (label);
773 if (dump_file)
774 fprintf(dump_file, " jump alignment added.\n");
775 if (max_log < log)
777 max_log = log;
778 max_skip = JUMP_ALIGN_MAX_SKIP;
781 /* In case block is frequent and reached mostly by non-fallthru edge,
782 align it. It is most likely a first block of loop. */
783 if (has_fallthru
784 && optimize_bb_for_speed_p (bb)
785 && branch_frequency + fallthru_frequency > freq_threshold
786 && (branch_frequency
787 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
789 log = LOOP_ALIGN (label);
790 if (dump_file)
791 fprintf(dump_file, " internal loop alignment added.\n");
792 if (max_log < log)
794 max_log = log;
795 max_skip = LOOP_ALIGN_MAX_SKIP;
798 LABEL_TO_ALIGNMENT (label) = max_log;
799 LABEL_TO_MAX_SKIP (label) = max_skip;
802 if (dump_file)
804 loop_optimizer_finalize ();
805 free_dominance_info (CDI_DOMINATORS);
807 return 0;
810 struct rtl_opt_pass pass_compute_alignments =
813 RTL_PASS,
814 "alignments", /* name */
815 NULL, /* gate */
816 compute_alignments, /* execute */
817 NULL, /* sub */
818 NULL, /* next */
819 0, /* static_pass_number */
820 TV_NONE, /* tv_id */
821 0, /* properties_required */
822 0, /* properties_provided */
823 0, /* properties_destroyed */
824 0, /* todo_flags_start */
825 TODO_dump_func | TODO_verify_rtl_sharing
826 | TODO_ggc_collect /* todo_flags_finish */
831 /* Make a pass over all insns and compute their actual lengths by shortening
832 any branches of variable length if possible. */
834 /* shorten_branches might be called multiple times: for example, the SH
835 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
836 In order to do this, it needs proper length information, which it obtains
837 by calling shorten_branches. This cannot be collapsed with
838 shorten_branches itself into a single pass unless we also want to integrate
839 reorg.c, since the branch splitting exposes new instructions with delay
840 slots. */
842 void
843 shorten_branches (rtx first ATTRIBUTE_UNUSED)
845 rtx insn;
846 int max_uid;
847 int i;
848 int max_log;
849 int max_skip;
850 #ifdef HAVE_ATTR_length
851 #define MAX_CODE_ALIGN 16
852 rtx seq;
853 int something_changed = 1;
854 char *varying_length;
855 rtx body;
856 int uid;
857 rtx align_tab[MAX_CODE_ALIGN];
859 #endif
861 /* Compute maximum UID and allocate label_align / uid_shuid. */
862 max_uid = get_max_uid ();
864 /* Free uid_shuid before reallocating it. */
865 free (uid_shuid);
867 uid_shuid = XNEWVEC (int, max_uid);
869 if (max_labelno != max_label_num ())
871 int old = max_labelno;
872 int n_labels;
873 int n_old_labels;
875 max_labelno = max_label_num ();
877 n_labels = max_labelno - min_labelno + 1;
878 n_old_labels = old - min_labelno + 1;
880 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
882 /* Range of labels grows monotonically in the function. Failing here
883 means that the initialization of array got lost. */
884 gcc_assert (n_old_labels <= n_labels);
886 memset (label_align + n_old_labels, 0,
887 (n_labels - n_old_labels) * sizeof (struct label_alignment));
890 /* Initialize label_align and set up uid_shuid to be strictly
891 monotonically rising with insn order. */
892 /* We use max_log here to keep track of the maximum alignment we want to
893 impose on the next CODE_LABEL (or the current one if we are processing
894 the CODE_LABEL itself). */
896 max_log = 0;
897 max_skip = 0;
899 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
901 int log;
903 INSN_SHUID (insn) = i++;
904 if (INSN_P (insn))
905 continue;
907 if (LABEL_P (insn))
909 rtx next;
910 bool next_is_jumptable;
912 /* Merge in alignments computed by compute_alignments. */
913 log = LABEL_TO_ALIGNMENT (insn);
914 if (max_log < log)
916 max_log = log;
917 max_skip = LABEL_TO_MAX_SKIP (insn);
920 next = next_nonnote_insn (insn);
921 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
922 if (!next_is_jumptable)
924 log = LABEL_ALIGN (insn);
925 if (max_log < log)
927 max_log = log;
928 max_skip = LABEL_ALIGN_MAX_SKIP;
931 /* ADDR_VECs only take room if read-only data goes into the text
932 section. */
933 if ((JUMP_TABLES_IN_TEXT_SECTION
934 || readonly_data_section == text_section)
935 && next_is_jumptable)
937 log = ADDR_VEC_ALIGN (next);
938 if (max_log < log)
940 max_log = log;
941 max_skip = LABEL_ALIGN_MAX_SKIP;
944 LABEL_TO_ALIGNMENT (insn) = max_log;
945 LABEL_TO_MAX_SKIP (insn) = max_skip;
946 max_log = 0;
947 max_skip = 0;
949 else if (BARRIER_P (insn))
951 rtx label;
953 for (label = insn; label && ! INSN_P (label);
954 label = NEXT_INSN (label))
955 if (LABEL_P (label))
957 log = LABEL_ALIGN_AFTER_BARRIER (insn);
958 if (max_log < log)
960 max_log = log;
961 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
963 break;
967 #ifdef HAVE_ATTR_length
969 /* Allocate the rest of the arrays. */
970 insn_lengths = XNEWVEC (int, max_uid);
971 insn_lengths_max_uid = max_uid;
972 /* Syntax errors can lead to labels being outside of the main insn stream.
973 Initialize insn_addresses, so that we get reproducible results. */
974 INSN_ADDRESSES_ALLOC (max_uid);
976 varying_length = XCNEWVEC (char, max_uid);
978 /* Initialize uid_align. We scan instructions
979 from end to start, and keep in align_tab[n] the last seen insn
980 that does an alignment of at least n+1, i.e. the successor
981 in the alignment chain for an insn that does / has a known
982 alignment of n. */
983 uid_align = XCNEWVEC (rtx, max_uid);
985 for (i = MAX_CODE_ALIGN; --i >= 0;)
986 align_tab[i] = NULL_RTX;
987 seq = get_last_insn ();
988 for (; seq; seq = PREV_INSN (seq))
990 int uid = INSN_UID (seq);
991 int log;
992 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
993 uid_align[uid] = align_tab[0];
994 if (log)
996 /* Found an alignment label. */
997 uid_align[uid] = align_tab[log];
998 for (i = log - 1; i >= 0; i--)
999 align_tab[i] = seq;
1002 #ifdef CASE_VECTOR_SHORTEN_MODE
1003 if (optimize)
1005 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1006 label fields. */
1008 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1009 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1010 int rel;
1012 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1014 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1015 int len, i, min, max, insn_shuid;
1016 int min_align;
1017 addr_diff_vec_flags flags;
1019 if (!JUMP_P (insn)
1020 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1021 continue;
1022 pat = PATTERN (insn);
1023 len = XVECLEN (pat, 1);
1024 gcc_assert (len > 0);
1025 min_align = MAX_CODE_ALIGN;
1026 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1028 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1029 int shuid = INSN_SHUID (lab);
1030 if (shuid < min)
1032 min = shuid;
1033 min_lab = lab;
1035 if (shuid > max)
1037 max = shuid;
1038 max_lab = lab;
1040 if (min_align > LABEL_TO_ALIGNMENT (lab))
1041 min_align = LABEL_TO_ALIGNMENT (lab);
1043 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1044 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1045 insn_shuid = INSN_SHUID (insn);
1046 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1047 memset (&flags, 0, sizeof (flags));
1048 flags.min_align = min_align;
1049 flags.base_after_vec = rel > insn_shuid;
1050 flags.min_after_vec = min > insn_shuid;
1051 flags.max_after_vec = max > insn_shuid;
1052 flags.min_after_base = min > rel;
1053 flags.max_after_base = max > rel;
1054 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1057 #endif /* CASE_VECTOR_SHORTEN_MODE */
1059 /* Compute initial lengths, addresses, and varying flags for each insn. */
1060 for (insn_current_address = 0, insn = first;
1061 insn != 0;
1062 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1064 uid = INSN_UID (insn);
1066 insn_lengths[uid] = 0;
1068 if (LABEL_P (insn))
1070 int log = LABEL_TO_ALIGNMENT (insn);
1071 if (log)
1073 int align = 1 << log;
1074 int new_address = (insn_current_address + align - 1) & -align;
1075 insn_lengths[uid] = new_address - insn_current_address;
1079 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1081 if (NOTE_P (insn) || BARRIER_P (insn)
1082 || LABEL_P (insn))
1083 continue;
1084 if (INSN_DELETED_P (insn))
1085 continue;
1087 body = PATTERN (insn);
1088 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1090 /* This only takes room if read-only data goes into the text
1091 section. */
1092 if (JUMP_TABLES_IN_TEXT_SECTION
1093 || readonly_data_section == text_section)
1094 insn_lengths[uid] = (XVECLEN (body,
1095 GET_CODE (body) == ADDR_DIFF_VEC)
1096 * GET_MODE_SIZE (GET_MODE (body)));
1097 /* Alignment is handled by ADDR_VEC_ALIGN. */
1099 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1100 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1101 else if (GET_CODE (body) == SEQUENCE)
1103 int i;
1104 int const_delay_slots;
1105 #ifdef DELAY_SLOTS
1106 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1107 #else
1108 const_delay_slots = 0;
1109 #endif
1110 /* Inside a delay slot sequence, we do not do any branch shortening
1111 if the shortening could change the number of delay slots
1112 of the branch. */
1113 for (i = 0; i < XVECLEN (body, 0); i++)
1115 rtx inner_insn = XVECEXP (body, 0, i);
1116 int inner_uid = INSN_UID (inner_insn);
1117 int inner_length;
1119 if (GET_CODE (body) == ASM_INPUT
1120 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1121 inner_length = (asm_insn_count (PATTERN (inner_insn))
1122 * insn_default_length (inner_insn));
1123 else
1124 inner_length = insn_default_length (inner_insn);
1126 insn_lengths[inner_uid] = inner_length;
1127 if (const_delay_slots)
1129 if ((varying_length[inner_uid]
1130 = insn_variable_length_p (inner_insn)) != 0)
1131 varying_length[uid] = 1;
1132 INSN_ADDRESSES (inner_uid) = (insn_current_address
1133 + insn_lengths[uid]);
1135 else
1136 varying_length[inner_uid] = 0;
1137 insn_lengths[uid] += inner_length;
1140 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1142 insn_lengths[uid] = insn_default_length (insn);
1143 varying_length[uid] = insn_variable_length_p (insn);
1146 /* If needed, do any adjustment. */
1147 #ifdef ADJUST_INSN_LENGTH
1148 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1149 if (insn_lengths[uid] < 0)
1150 fatal_insn ("negative insn length", insn);
1151 #endif
1154 /* Now loop over all the insns finding varying length insns. For each,
1155 get the current insn length. If it has changed, reflect the change.
1156 When nothing changes for a full pass, we are done. */
1158 while (something_changed)
1160 something_changed = 0;
1161 insn_current_align = MAX_CODE_ALIGN - 1;
1162 for (insn_current_address = 0, insn = first;
1163 insn != 0;
1164 insn = NEXT_INSN (insn))
1166 int new_length;
1167 #ifdef ADJUST_INSN_LENGTH
1168 int tmp_length;
1169 #endif
1170 int length_align;
1172 uid = INSN_UID (insn);
1174 if (LABEL_P (insn))
1176 int log = LABEL_TO_ALIGNMENT (insn);
1177 if (log > insn_current_align)
1179 int align = 1 << log;
1180 int new_address= (insn_current_address + align - 1) & -align;
1181 insn_lengths[uid] = new_address - insn_current_address;
1182 insn_current_align = log;
1183 insn_current_address = new_address;
1185 else
1186 insn_lengths[uid] = 0;
1187 INSN_ADDRESSES (uid) = insn_current_address;
1188 continue;
1191 length_align = INSN_LENGTH_ALIGNMENT (insn);
1192 if (length_align < insn_current_align)
1193 insn_current_align = length_align;
1195 insn_last_address = INSN_ADDRESSES (uid);
1196 INSN_ADDRESSES (uid) = insn_current_address;
1198 #ifdef CASE_VECTOR_SHORTEN_MODE
1199 if (optimize && JUMP_P (insn)
1200 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1202 rtx body = PATTERN (insn);
1203 int old_length = insn_lengths[uid];
1204 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1205 rtx min_lab = XEXP (XEXP (body, 2), 0);
1206 rtx max_lab = XEXP (XEXP (body, 3), 0);
1207 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1208 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1209 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1210 rtx prev;
1211 int rel_align = 0;
1212 addr_diff_vec_flags flags;
1214 /* Avoid automatic aggregate initialization. */
1215 flags = ADDR_DIFF_VEC_FLAGS (body);
1217 /* Try to find a known alignment for rel_lab. */
1218 for (prev = rel_lab;
1219 prev
1220 && ! insn_lengths[INSN_UID (prev)]
1221 && ! (varying_length[INSN_UID (prev)] & 1);
1222 prev = PREV_INSN (prev))
1223 if (varying_length[INSN_UID (prev)] & 2)
1225 rel_align = LABEL_TO_ALIGNMENT (prev);
1226 break;
1229 /* See the comment on addr_diff_vec_flags in rtl.h for the
1230 meaning of the flags values. base: REL_LAB vec: INSN */
1231 /* Anything after INSN has still addresses from the last
1232 pass; adjust these so that they reflect our current
1233 estimate for this pass. */
1234 if (flags.base_after_vec)
1235 rel_addr += insn_current_address - insn_last_address;
1236 if (flags.min_after_vec)
1237 min_addr += insn_current_address - insn_last_address;
1238 if (flags.max_after_vec)
1239 max_addr += insn_current_address - insn_last_address;
1240 /* We want to know the worst case, i.e. lowest possible value
1241 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1242 its offset is positive, and we have to be wary of code shrink;
1243 otherwise, it is negative, and we have to be vary of code
1244 size increase. */
1245 if (flags.min_after_base)
1247 /* If INSN is between REL_LAB and MIN_LAB, the size
1248 changes we are about to make can change the alignment
1249 within the observed offset, therefore we have to break
1250 it up into two parts that are independent. */
1251 if (! flags.base_after_vec && flags.min_after_vec)
1253 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1254 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1256 else
1257 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1259 else
1261 if (flags.base_after_vec && ! flags.min_after_vec)
1263 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1264 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1266 else
1267 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1269 /* Likewise, determine the highest lowest possible value
1270 for the offset of MAX_LAB. */
1271 if (flags.max_after_base)
1273 if (! flags.base_after_vec && flags.max_after_vec)
1275 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1276 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1278 else
1279 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1281 else
1283 if (flags.base_after_vec && ! flags.max_after_vec)
1285 max_addr += align_fuzz (max_lab, insn, 0, 0);
1286 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1288 else
1289 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1291 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1292 max_addr - rel_addr,
1293 body));
1294 if (JUMP_TABLES_IN_TEXT_SECTION
1295 || readonly_data_section == text_section)
1297 insn_lengths[uid]
1298 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1299 insn_current_address += insn_lengths[uid];
1300 if (insn_lengths[uid] != old_length)
1301 something_changed = 1;
1304 continue;
1306 #endif /* CASE_VECTOR_SHORTEN_MODE */
1308 if (! (varying_length[uid]))
1310 if (NONJUMP_INSN_P (insn)
1311 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1313 int i;
1315 body = PATTERN (insn);
1316 for (i = 0; i < XVECLEN (body, 0); i++)
1318 rtx inner_insn = XVECEXP (body, 0, i);
1319 int inner_uid = INSN_UID (inner_insn);
1321 INSN_ADDRESSES (inner_uid) = insn_current_address;
1323 insn_current_address += insn_lengths[inner_uid];
1326 else
1327 insn_current_address += insn_lengths[uid];
1329 continue;
1332 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1334 int i;
1336 body = PATTERN (insn);
1337 new_length = 0;
1338 for (i = 0; i < XVECLEN (body, 0); i++)
1340 rtx inner_insn = XVECEXP (body, 0, i);
1341 int inner_uid = INSN_UID (inner_insn);
1342 int inner_length;
1344 INSN_ADDRESSES (inner_uid) = insn_current_address;
1346 /* insn_current_length returns 0 for insns with a
1347 non-varying length. */
1348 if (! varying_length[inner_uid])
1349 inner_length = insn_lengths[inner_uid];
1350 else
1351 inner_length = insn_current_length (inner_insn);
1353 if (inner_length != insn_lengths[inner_uid])
1355 insn_lengths[inner_uid] = inner_length;
1356 something_changed = 1;
1358 insn_current_address += insn_lengths[inner_uid];
1359 new_length += inner_length;
1362 else
1364 new_length = insn_current_length (insn);
1365 insn_current_address += new_length;
1368 #ifdef ADJUST_INSN_LENGTH
1369 /* If needed, do any adjustment. */
1370 tmp_length = new_length;
1371 ADJUST_INSN_LENGTH (insn, new_length);
1372 insn_current_address += (new_length - tmp_length);
1373 #endif
1375 if (new_length != insn_lengths[uid])
1377 insn_lengths[uid] = new_length;
1378 something_changed = 1;
1381 /* For a non-optimizing compile, do only a single pass. */
1382 if (!optimize)
1383 break;
1386 free (varying_length);
1388 #endif /* HAVE_ATTR_length */
1391 #ifdef HAVE_ATTR_length
1392 /* Given the body of an INSN known to be generated by an ASM statement, return
1393 the number of machine instructions likely to be generated for this insn.
1394 This is used to compute its length. */
1396 static int
1397 asm_insn_count (rtx body)
1399 const char *templ;
1400 int count = 1;
1402 if (GET_CODE (body) == ASM_INPUT)
1403 templ = XSTR (body, 0);
1404 else
1405 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1407 if (!*templ)
1408 return 0;
1410 for (; *templ; templ++)
1411 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1412 || *templ == '\n')
1413 count++;
1415 return count;
1417 #endif
1419 /* ??? This is probably the wrong place for these. */
1420 /* Structure recording the mapping from source file and directory
1421 names at compile time to those to be embedded in debug
1422 information. */
1423 typedef struct debug_prefix_map
1425 const char *old_prefix;
1426 const char *new_prefix;
1427 size_t old_len;
1428 size_t new_len;
1429 struct debug_prefix_map *next;
1430 } debug_prefix_map;
1432 /* Linked list of such structures. */
1433 debug_prefix_map *debug_prefix_maps;
1436 /* Record a debug file prefix mapping. ARG is the argument to
1437 -fdebug-prefix-map and must be of the form OLD=NEW. */
1439 void
1440 add_debug_prefix_map (const char *arg)
1442 debug_prefix_map *map;
1443 const char *p;
1445 p = strchr (arg, '=');
1446 if (!p)
1448 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1449 return;
1451 map = XNEW (debug_prefix_map);
1452 map->old_prefix = ggc_alloc_string (arg, p - arg);
1453 map->old_len = p - arg;
1454 p++;
1455 map->new_prefix = ggc_strdup (p);
1456 map->new_len = strlen (p);
1457 map->next = debug_prefix_maps;
1458 debug_prefix_maps = map;
1461 /* Perform user-specified mapping of debug filename prefixes. Return
1462 the new name corresponding to FILENAME. */
1464 const char *
1465 remap_debug_filename (const char *filename)
1467 debug_prefix_map *map;
1468 char *s;
1469 const char *name;
1470 size_t name_len;
1472 for (map = debug_prefix_maps; map; map = map->next)
1473 if (strncmp (filename, map->old_prefix, map->old_len) == 0)
1474 break;
1475 if (!map)
1476 return filename;
1477 name = filename + map->old_len;
1478 name_len = strlen (name) + 1;
1479 s = (char *) alloca (name_len + map->new_len);
1480 memcpy (s, map->new_prefix, map->new_len);
1481 memcpy (s + map->new_len, name, name_len);
1482 return ggc_strdup (s);
1485 /* Output assembler code for the start of a function,
1486 and initialize some of the variables in this file
1487 for the new function. The label for the function and associated
1488 assembler pseudo-ops have already been output in `assemble_start_function'.
1490 FIRST is the first insn of the rtl for the function being compiled.
1491 FILE is the file to write assembler code to.
1492 OPTIMIZE is nonzero if we should eliminate redundant
1493 test and compare insns. */
1495 void
1496 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1497 int optimize ATTRIBUTE_UNUSED)
1499 block_depth = 0;
1501 this_is_asm_operands = 0;
1503 last_filename = locator_file (prologue_locator);
1504 last_linenum = locator_line (prologue_locator);
1505 last_discriminator = discriminator = 0;
1507 high_block_linenum = high_function_linenum = last_linenum;
1509 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1511 #if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1512 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1513 dwarf2out_begin_prologue (0, NULL);
1514 #endif
1516 #ifdef LEAF_REG_REMAP
1517 if (current_function_uses_only_leaf_regs)
1518 leaf_renumber_regs (first);
1519 #endif
1521 /* The Sun386i and perhaps other machines don't work right
1522 if the profiling code comes after the prologue. */
1523 #ifdef PROFILE_BEFORE_PROLOGUE
1524 if (crtl->profile)
1525 profile_function (file);
1526 #endif /* PROFILE_BEFORE_PROLOGUE */
1528 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1529 if (dwarf2out_do_frame ())
1530 dwarf2out_frame_debug (NULL_RTX, false);
1531 #endif
1533 /* If debugging, assign block numbers to all of the blocks in this
1534 function. */
1535 if (write_symbols)
1537 reemit_insn_block_notes ();
1538 number_blocks (current_function_decl);
1539 /* We never actually put out begin/end notes for the top-level
1540 block in the function. But, conceptually, that block is
1541 always needed. */
1542 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1545 if (warn_frame_larger_than
1546 && get_frame_size () > frame_larger_than_size)
1548 /* Issue a warning */
1549 warning (OPT_Wframe_larger_than_,
1550 "the frame size of %wd bytes is larger than %wd bytes",
1551 get_frame_size (), frame_larger_than_size);
1554 /* First output the function prologue: code to set up the stack frame. */
1555 targetm.asm_out.function_prologue (file, get_frame_size ());
1557 /* If the machine represents the prologue as RTL, the profiling code must
1558 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1559 #ifdef HAVE_prologue
1560 if (! HAVE_prologue)
1561 #endif
1562 profile_after_prologue (file);
1565 static void
1566 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1568 #ifndef PROFILE_BEFORE_PROLOGUE
1569 if (crtl->profile)
1570 profile_function (file);
1571 #endif /* not PROFILE_BEFORE_PROLOGUE */
1574 static void
1575 profile_function (FILE *file ATTRIBUTE_UNUSED)
1577 #ifndef NO_PROFILE_COUNTERS
1578 # define NO_PROFILE_COUNTERS 0
1579 #endif
1580 #if defined(ASM_OUTPUT_REG_PUSH)
1581 int sval = cfun->returns_struct;
1582 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1583 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1584 int cxt = cfun->static_chain_decl != NULL;
1585 #endif
1586 #endif /* ASM_OUTPUT_REG_PUSH */
1588 if (! NO_PROFILE_COUNTERS)
1590 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1591 switch_to_section (data_section);
1592 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1593 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1594 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1597 switch_to_section (current_function_section ());
1599 #if defined(ASM_OUTPUT_REG_PUSH)
1600 if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1602 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1604 #endif
1606 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1607 if (cxt)
1608 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1609 #else
1610 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1611 if (cxt)
1613 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1615 #endif
1616 #endif
1618 FUNCTION_PROFILER (file, current_function_funcdef_no);
1620 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1621 if (cxt)
1622 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1623 #else
1624 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1625 if (cxt)
1627 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1629 #endif
1630 #endif
1632 #if defined(ASM_OUTPUT_REG_PUSH)
1633 if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1635 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1637 #endif
1640 /* Output assembler code for the end of a function.
1641 For clarity, args are same as those of `final_start_function'
1642 even though not all of them are needed. */
1644 void
1645 final_end_function (void)
1647 app_disable ();
1649 (*debug_hooks->end_function) (high_function_linenum);
1651 /* Finally, output the function epilogue:
1652 code to restore the stack frame and return to the caller. */
1653 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1655 /* And debug output. */
1656 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1658 #if defined (DWARF2_UNWIND_INFO)
1659 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1660 && dwarf2out_do_frame ())
1661 dwarf2out_end_epilogue (last_linenum, last_filename);
1662 #endif
1665 /* Output assembler code for some insns: all or part of a function.
1666 For description of args, see `final_start_function', above. */
1668 void
1669 final (rtx first, FILE *file, int optimize)
1671 rtx insn;
1672 int max_uid = 0;
1673 int seen = 0;
1675 last_ignored_compare = 0;
1677 for (insn = first; insn; insn = NEXT_INSN (insn))
1679 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1680 max_uid = INSN_UID (insn);
1681 #ifdef HAVE_cc0
1682 /* If CC tracking across branches is enabled, record the insn which
1683 jumps to each branch only reached from one place. */
1684 if (optimize && JUMP_P (insn))
1686 rtx lab = JUMP_LABEL (insn);
1687 if (lab && LABEL_NUSES (lab) == 1)
1689 LABEL_REFS (lab) = insn;
1692 #endif
1695 init_recog ();
1697 CC_STATUS_INIT;
1699 /* Output the insns. */
1700 for (insn = first; insn;)
1702 #ifdef HAVE_ATTR_length
1703 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1705 /* This can be triggered by bugs elsewhere in the compiler if
1706 new insns are created after init_insn_lengths is called. */
1707 gcc_assert (NOTE_P (insn));
1708 insn_current_address = -1;
1710 else
1711 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1712 #endif /* HAVE_ATTR_length */
1714 insn = final_scan_insn (insn, file, optimize, 0, &seen);
1718 const char *
1719 get_insn_template (int code, rtx insn)
1721 switch (insn_data[code].output_format)
1723 case INSN_OUTPUT_FORMAT_SINGLE:
1724 return insn_data[code].output.single;
1725 case INSN_OUTPUT_FORMAT_MULTI:
1726 return insn_data[code].output.multi[which_alternative];
1727 case INSN_OUTPUT_FORMAT_FUNCTION:
1728 gcc_assert (insn);
1729 return (*insn_data[code].output.function) (recog_data.operand, insn);
1731 default:
1732 gcc_unreachable ();
1736 /* Emit the appropriate declaration for an alternate-entry-point
1737 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1738 LABEL_KIND != LABEL_NORMAL.
1740 The case fall-through in this function is intentional. */
1741 static void
1742 output_alternate_entry_point (FILE *file, rtx insn)
1744 const char *name = LABEL_NAME (insn);
1746 switch (LABEL_KIND (insn))
1748 case LABEL_WEAK_ENTRY:
1749 #ifdef ASM_WEAKEN_LABEL
1750 ASM_WEAKEN_LABEL (file, name);
1751 #endif
1752 case LABEL_GLOBAL_ENTRY:
1753 targetm.asm_out.globalize_label (file, name);
1754 case LABEL_STATIC_ENTRY:
1755 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1756 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1757 #endif
1758 ASM_OUTPUT_LABEL (file, name);
1759 break;
1761 case LABEL_NORMAL:
1762 default:
1763 gcc_unreachable ();
1767 /* Given a CALL_INSN, find and return the nested CALL. */
1768 static rtx
1769 call_from_call_insn (rtx insn)
1771 rtx x;
1772 gcc_assert (CALL_P (insn));
1773 x = PATTERN (insn);
1775 while (GET_CODE (x) != CALL)
1777 switch (GET_CODE (x))
1779 default:
1780 gcc_unreachable ();
1781 case COND_EXEC:
1782 x = COND_EXEC_CODE (x);
1783 break;
1784 case PARALLEL:
1785 x = XVECEXP (x, 0, 0);
1786 break;
1787 case SET:
1788 x = XEXP (x, 1);
1789 break;
1792 return x;
1795 /* The final scan for one insn, INSN.
1796 Args are same as in `final', except that INSN
1797 is the insn being scanned.
1798 Value returned is the next insn to be scanned.
1800 NOPEEPHOLES is the flag to disallow peephole processing (currently
1801 used for within delayed branch sequence output).
1803 SEEN is used to track the end of the prologue, for emitting
1804 debug information. We force the emission of a line note after
1805 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1806 at the beginning of the second basic block, whichever comes
1807 first. */
1810 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1811 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1813 #ifdef HAVE_cc0
1814 rtx set;
1815 #endif
1816 rtx next;
1818 insn_counter++;
1820 /* Ignore deleted insns. These can occur when we split insns (due to a
1821 template of "#") while not optimizing. */
1822 if (INSN_DELETED_P (insn))
1823 return NEXT_INSN (insn);
1825 switch (GET_CODE (insn))
1827 case NOTE:
1828 switch (NOTE_KIND (insn))
1830 case NOTE_INSN_DELETED:
1831 break;
1833 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1834 in_cold_section_p = !in_cold_section_p;
1835 #ifdef DWARF2_UNWIND_INFO
1836 if (dwarf2out_do_frame ())
1837 dwarf2out_switch_text_section ();
1838 else
1839 #endif
1840 (*debug_hooks->switch_text_section) ();
1842 switch_to_section (current_function_section ());
1843 break;
1845 case NOTE_INSN_BASIC_BLOCK:
1846 #ifdef TARGET_UNWIND_INFO
1847 targetm.asm_out.unwind_emit (asm_out_file, insn);
1848 #endif
1850 if (flag_debug_asm)
1851 fprintf (asm_out_file, "\t%s basic block %d\n",
1852 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1854 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1856 *seen |= SEEN_EMITTED;
1857 force_source_line = true;
1859 else
1860 *seen |= SEEN_BB;
1862 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
1864 break;
1866 case NOTE_INSN_EH_REGION_BEG:
1867 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1868 NOTE_EH_HANDLER (insn));
1869 break;
1871 case NOTE_INSN_EH_REGION_END:
1872 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1873 NOTE_EH_HANDLER (insn));
1874 break;
1876 case NOTE_INSN_PROLOGUE_END:
1877 targetm.asm_out.function_end_prologue (file);
1878 profile_after_prologue (file);
1880 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1882 *seen |= SEEN_EMITTED;
1883 force_source_line = true;
1885 else
1886 *seen |= SEEN_NOTE;
1888 break;
1890 case NOTE_INSN_EPILOGUE_BEG:
1891 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_epilogue)
1892 if (dwarf2out_do_frame ())
1893 dwarf2out_begin_epilogue (insn);
1894 #endif
1895 targetm.asm_out.function_begin_epilogue (file);
1896 break;
1898 case NOTE_INSN_CFA_RESTORE_STATE:
1899 #if defined (DWARF2_UNWIND_INFO)
1900 dwarf2out_frame_debug_restore_state ();
1901 #endif
1902 break;
1904 case NOTE_INSN_FUNCTION_BEG:
1905 app_disable ();
1906 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1908 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1910 *seen |= SEEN_EMITTED;
1911 force_source_line = true;
1913 else
1914 *seen |= SEEN_NOTE;
1916 break;
1918 case NOTE_INSN_BLOCK_BEG:
1919 if (debug_info_level == DINFO_LEVEL_NORMAL
1920 || debug_info_level == DINFO_LEVEL_VERBOSE
1921 || write_symbols == DWARF2_DEBUG
1922 || write_symbols == VMS_AND_DWARF2_DEBUG
1923 || write_symbols == VMS_DEBUG)
1925 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1927 app_disable ();
1928 ++block_depth;
1929 high_block_linenum = last_linenum;
1931 /* Output debugging info about the symbol-block beginning. */
1932 (*debug_hooks->begin_block) (last_linenum, n);
1934 /* Mark this block as output. */
1935 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1937 if (write_symbols == DBX_DEBUG
1938 || write_symbols == SDB_DEBUG)
1940 location_t *locus_ptr
1941 = block_nonartificial_location (NOTE_BLOCK (insn));
1943 if (locus_ptr != NULL)
1945 override_filename = LOCATION_FILE (*locus_ptr);
1946 override_linenum = LOCATION_LINE (*locus_ptr);
1949 break;
1951 case NOTE_INSN_BLOCK_END:
1952 if (debug_info_level == DINFO_LEVEL_NORMAL
1953 || debug_info_level == DINFO_LEVEL_VERBOSE
1954 || write_symbols == DWARF2_DEBUG
1955 || write_symbols == VMS_AND_DWARF2_DEBUG
1956 || write_symbols == VMS_DEBUG)
1958 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1960 app_disable ();
1962 /* End of a symbol-block. */
1963 --block_depth;
1964 gcc_assert (block_depth >= 0);
1966 (*debug_hooks->end_block) (high_block_linenum, n);
1968 if (write_symbols == DBX_DEBUG
1969 || write_symbols == SDB_DEBUG)
1971 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
1972 location_t *locus_ptr
1973 = block_nonartificial_location (outer_block);
1975 if (locus_ptr != NULL)
1977 override_filename = LOCATION_FILE (*locus_ptr);
1978 override_linenum = LOCATION_LINE (*locus_ptr);
1980 else
1982 override_filename = NULL;
1983 override_linenum = 0;
1986 break;
1988 case NOTE_INSN_DELETED_LABEL:
1989 /* Emit the label. We may have deleted the CODE_LABEL because
1990 the label could be proved to be unreachable, though still
1991 referenced (in the form of having its address taken. */
1992 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1993 break;
1995 case NOTE_INSN_VAR_LOCATION:
1996 (*debug_hooks->var_location) (insn);
1997 break;
1999 default:
2000 gcc_unreachable ();
2001 break;
2003 break;
2005 case BARRIER:
2006 #if defined (DWARF2_UNWIND_INFO)
2007 if (dwarf2out_do_frame ())
2008 dwarf2out_frame_debug (insn, false);
2009 #endif
2010 break;
2012 case CODE_LABEL:
2013 /* The target port might emit labels in the output function for
2014 some insn, e.g. sh.c output_branchy_insn. */
2015 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2017 int align = LABEL_TO_ALIGNMENT (insn);
2018 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2019 int max_skip = LABEL_TO_MAX_SKIP (insn);
2020 #endif
2022 if (align && NEXT_INSN (insn))
2024 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2025 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2026 #else
2027 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2028 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2029 #else
2030 ASM_OUTPUT_ALIGN (file, align);
2031 #endif
2032 #endif
2035 #ifdef HAVE_cc0
2036 CC_STATUS_INIT;
2037 #endif
2039 if (LABEL_NAME (insn))
2040 (*debug_hooks->label) (insn);
2042 app_disable ();
2044 next = next_nonnote_insn (insn);
2045 /* If this label is followed by a jump-table, make sure we put
2046 the label in the read-only section. Also possibly write the
2047 label and jump table together. */
2048 if (next != 0 && JUMP_TABLE_DATA_P (next))
2050 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2051 /* In this case, the case vector is being moved by the
2052 target, so don't output the label at all. Leave that
2053 to the back end macros. */
2054 #else
2055 if (! JUMP_TABLES_IN_TEXT_SECTION)
2057 int log_align;
2059 switch_to_section (targetm.asm_out.function_rodata_section
2060 (current_function_decl));
2062 #ifdef ADDR_VEC_ALIGN
2063 log_align = ADDR_VEC_ALIGN (next);
2064 #else
2065 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2066 #endif
2067 ASM_OUTPUT_ALIGN (file, log_align);
2069 else
2070 switch_to_section (current_function_section ());
2072 #ifdef ASM_OUTPUT_CASE_LABEL
2073 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2074 next);
2075 #else
2076 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2077 #endif
2078 #endif
2079 break;
2081 if (LABEL_ALT_ENTRY_P (insn))
2082 output_alternate_entry_point (file, insn);
2083 else
2084 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2085 break;
2087 default:
2089 rtx body = PATTERN (insn);
2090 int insn_code_number;
2091 const char *templ;
2093 #ifdef HAVE_conditional_execution
2094 /* Reset this early so it is correct for ASM statements. */
2095 current_insn_predicate = NULL_RTX;
2096 #endif
2097 /* An INSN, JUMP_INSN or CALL_INSN.
2098 First check for special kinds that recog doesn't recognize. */
2100 if (GET_CODE (body) == USE /* These are just declarations. */
2101 || GET_CODE (body) == CLOBBER)
2102 break;
2104 #ifdef HAVE_cc0
2106 /* If there is a REG_CC_SETTER note on this insn, it means that
2107 the setting of the condition code was done in the delay slot
2108 of the insn that branched here. So recover the cc status
2109 from the insn that set it. */
2111 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2112 if (note)
2114 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2115 cc_prev_status = cc_status;
2118 #endif
2120 /* Detect insns that are really jump-tables
2121 and output them as such. */
2123 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2125 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2126 int vlen, idx;
2127 #endif
2129 if (! JUMP_TABLES_IN_TEXT_SECTION)
2130 switch_to_section (targetm.asm_out.function_rodata_section
2131 (current_function_decl));
2132 else
2133 switch_to_section (current_function_section ());
2135 app_disable ();
2137 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2138 if (GET_CODE (body) == ADDR_VEC)
2140 #ifdef ASM_OUTPUT_ADDR_VEC
2141 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2142 #else
2143 gcc_unreachable ();
2144 #endif
2146 else
2148 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2149 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2150 #else
2151 gcc_unreachable ();
2152 #endif
2154 #else
2155 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2156 for (idx = 0; idx < vlen; idx++)
2158 if (GET_CODE (body) == ADDR_VEC)
2160 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2161 ASM_OUTPUT_ADDR_VEC_ELT
2162 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2163 #else
2164 gcc_unreachable ();
2165 #endif
2167 else
2169 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2170 ASM_OUTPUT_ADDR_DIFF_ELT
2171 (file,
2172 body,
2173 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2174 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2175 #else
2176 gcc_unreachable ();
2177 #endif
2180 #ifdef ASM_OUTPUT_CASE_END
2181 ASM_OUTPUT_CASE_END (file,
2182 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2183 insn);
2184 #endif
2185 #endif
2187 switch_to_section (current_function_section ());
2189 break;
2191 /* Output this line note if it is the first or the last line
2192 note in a row. */
2193 if (notice_source_line (insn))
2195 (*debug_hooks->source_line) (last_linenum,
2196 last_filename,
2197 last_discriminator);
2200 if (GET_CODE (body) == ASM_INPUT)
2202 const char *string = XSTR (body, 0);
2204 /* There's no telling what that did to the condition codes. */
2205 CC_STATUS_INIT;
2207 if (string[0])
2209 expanded_location loc;
2211 app_enable ();
2212 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2213 if (*loc.file && loc.line)
2214 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2215 ASM_COMMENT_START, loc.line, loc.file);
2216 fprintf (asm_out_file, "\t%s\n", string);
2217 #if HAVE_AS_LINE_ZERO
2218 if (*loc.file && loc.line)
2219 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2220 #endif
2222 break;
2225 /* Detect `asm' construct with operands. */
2226 if (asm_noperands (body) >= 0)
2228 unsigned int noperands = asm_noperands (body);
2229 rtx *ops = XALLOCAVEC (rtx, noperands);
2230 const char *string;
2231 location_t loc;
2232 expanded_location expanded;
2234 /* There's no telling what that did to the condition codes. */
2235 CC_STATUS_INIT;
2237 /* Get out the operand values. */
2238 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2239 /* Inhibit dying on what would otherwise be compiler bugs. */
2240 insn_noperands = noperands;
2241 this_is_asm_operands = insn;
2242 expanded = expand_location (loc);
2244 #ifdef FINAL_PRESCAN_INSN
2245 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2246 #endif
2248 /* Output the insn using them. */
2249 if (string[0])
2251 app_enable ();
2252 if (expanded.file && expanded.line)
2253 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2254 ASM_COMMENT_START, expanded.line, expanded.file);
2255 output_asm_insn (string, ops);
2256 #if HAVE_AS_LINE_ZERO
2257 if (expanded.file && expanded.line)
2258 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2259 #endif
2262 if (targetm.asm_out.final_postscan_insn)
2263 targetm.asm_out.final_postscan_insn (file, insn, ops,
2264 insn_noperands);
2266 this_is_asm_operands = 0;
2267 break;
2270 app_disable ();
2272 if (GET_CODE (body) == SEQUENCE)
2274 /* A delayed-branch sequence */
2275 int i;
2277 final_sequence = body;
2279 /* Record the delay slots' frame information before the branch.
2280 This is needed for delayed calls: see execute_cfa_program(). */
2281 #if defined (DWARF2_UNWIND_INFO)
2282 if (dwarf2out_do_frame ())
2283 for (i = 1; i < XVECLEN (body, 0); i++)
2284 dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2285 #endif
2287 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2288 force the restoration of a comparison that was previously
2289 thought unnecessary. If that happens, cancel this sequence
2290 and cause that insn to be restored. */
2292 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2293 if (next != XVECEXP (body, 0, 1))
2295 final_sequence = 0;
2296 return next;
2299 for (i = 1; i < XVECLEN (body, 0); i++)
2301 rtx insn = XVECEXP (body, 0, i);
2302 rtx next = NEXT_INSN (insn);
2303 /* We loop in case any instruction in a delay slot gets
2304 split. */
2306 insn = final_scan_insn (insn, file, 0, 1, seen);
2307 while (insn != next);
2309 #ifdef DBR_OUTPUT_SEQEND
2310 DBR_OUTPUT_SEQEND (file);
2311 #endif
2312 final_sequence = 0;
2314 /* If the insn requiring the delay slot was a CALL_INSN, the
2315 insns in the delay slot are actually executed before the
2316 called function. Hence we don't preserve any CC-setting
2317 actions in these insns and the CC must be marked as being
2318 clobbered by the function. */
2319 if (CALL_P (XVECEXP (body, 0, 0)))
2321 CC_STATUS_INIT;
2323 break;
2326 /* We have a real machine instruction as rtl. */
2328 body = PATTERN (insn);
2330 #ifdef HAVE_cc0
2331 set = single_set (insn);
2333 /* Check for redundant test and compare instructions
2334 (when the condition codes are already set up as desired).
2335 This is done only when optimizing; if not optimizing,
2336 it should be possible for the user to alter a variable
2337 with the debugger in between statements
2338 and the next statement should reexamine the variable
2339 to compute the condition codes. */
2341 if (optimize)
2343 if (set
2344 && GET_CODE (SET_DEST (set)) == CC0
2345 && insn != last_ignored_compare)
2347 rtx src1, src2;
2348 if (GET_CODE (SET_SRC (set)) == SUBREG)
2349 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2351 src1 = SET_SRC (set);
2352 src2 = NULL_RTX;
2353 if (GET_CODE (SET_SRC (set)) == COMPARE)
2355 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2356 XEXP (SET_SRC (set), 0)
2357 = alter_subreg (&XEXP (SET_SRC (set), 0));
2358 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2359 XEXP (SET_SRC (set), 1)
2360 = alter_subreg (&XEXP (SET_SRC (set), 1));
2361 if (XEXP (SET_SRC (set), 1)
2362 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2363 src2 = XEXP (SET_SRC (set), 0);
2365 if ((cc_status.value1 != 0
2366 && rtx_equal_p (src1, cc_status.value1))
2367 || (cc_status.value2 != 0
2368 && rtx_equal_p (src1, cc_status.value2))
2369 || (src2 != 0 && cc_status.value1 != 0
2370 && rtx_equal_p (src2, cc_status.value1))
2371 || (src2 != 0 && cc_status.value2 != 0
2372 && rtx_equal_p (src2, cc_status.value2)))
2374 /* Don't delete insn if it has an addressing side-effect. */
2375 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2376 /* or if anything in it is volatile. */
2377 && ! volatile_refs_p (PATTERN (insn)))
2379 /* We don't really delete the insn; just ignore it. */
2380 last_ignored_compare = insn;
2381 break;
2387 /* If this is a conditional branch, maybe modify it
2388 if the cc's are in a nonstandard state
2389 so that it accomplishes the same thing that it would
2390 do straightforwardly if the cc's were set up normally. */
2392 if (cc_status.flags != 0
2393 && JUMP_P (insn)
2394 && GET_CODE (body) == SET
2395 && SET_DEST (body) == pc_rtx
2396 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2397 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2398 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2400 /* This function may alter the contents of its argument
2401 and clear some of the cc_status.flags bits.
2402 It may also return 1 meaning condition now always true
2403 or -1 meaning condition now always false
2404 or 2 meaning condition nontrivial but altered. */
2405 int result = alter_cond (XEXP (SET_SRC (body), 0));
2406 /* If condition now has fixed value, replace the IF_THEN_ELSE
2407 with its then-operand or its else-operand. */
2408 if (result == 1)
2409 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2410 if (result == -1)
2411 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2413 /* The jump is now either unconditional or a no-op.
2414 If it has become a no-op, don't try to output it.
2415 (It would not be recognized.) */
2416 if (SET_SRC (body) == pc_rtx)
2418 delete_insn (insn);
2419 break;
2421 else if (GET_CODE (SET_SRC (body)) == RETURN)
2422 /* Replace (set (pc) (return)) with (return). */
2423 PATTERN (insn) = body = SET_SRC (body);
2425 /* Rerecognize the instruction if it has changed. */
2426 if (result != 0)
2427 INSN_CODE (insn) = -1;
2430 /* If this is a conditional trap, maybe modify it if the cc's
2431 are in a nonstandard state so that it accomplishes the same
2432 thing that it would do straightforwardly if the cc's were
2433 set up normally. */
2434 if (cc_status.flags != 0
2435 && NONJUMP_INSN_P (insn)
2436 && GET_CODE (body) == TRAP_IF
2437 && COMPARISON_P (TRAP_CONDITION (body))
2438 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2440 /* This function may alter the contents of its argument
2441 and clear some of the cc_status.flags bits.
2442 It may also return 1 meaning condition now always true
2443 or -1 meaning condition now always false
2444 or 2 meaning condition nontrivial but altered. */
2445 int result = alter_cond (TRAP_CONDITION (body));
2447 /* If TRAP_CONDITION has become always false, delete the
2448 instruction. */
2449 if (result == -1)
2451 delete_insn (insn);
2452 break;
2455 /* If TRAP_CONDITION has become always true, replace
2456 TRAP_CONDITION with const_true_rtx. */
2457 if (result == 1)
2458 TRAP_CONDITION (body) = const_true_rtx;
2460 /* Rerecognize the instruction if it has changed. */
2461 if (result != 0)
2462 INSN_CODE (insn) = -1;
2465 /* Make same adjustments to instructions that examine the
2466 condition codes without jumping and instructions that
2467 handle conditional moves (if this machine has either one). */
2469 if (cc_status.flags != 0
2470 && set != 0)
2472 rtx cond_rtx, then_rtx, else_rtx;
2474 if (!JUMP_P (insn)
2475 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2477 cond_rtx = XEXP (SET_SRC (set), 0);
2478 then_rtx = XEXP (SET_SRC (set), 1);
2479 else_rtx = XEXP (SET_SRC (set), 2);
2481 else
2483 cond_rtx = SET_SRC (set);
2484 then_rtx = const_true_rtx;
2485 else_rtx = const0_rtx;
2488 switch (GET_CODE (cond_rtx))
2490 case GTU:
2491 case GT:
2492 case LTU:
2493 case LT:
2494 case GEU:
2495 case GE:
2496 case LEU:
2497 case LE:
2498 case EQ:
2499 case NE:
2501 int result;
2502 if (XEXP (cond_rtx, 0) != cc0_rtx)
2503 break;
2504 result = alter_cond (cond_rtx);
2505 if (result == 1)
2506 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2507 else if (result == -1)
2508 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2509 else if (result == 2)
2510 INSN_CODE (insn) = -1;
2511 if (SET_DEST (set) == SET_SRC (set))
2512 delete_insn (insn);
2514 break;
2516 default:
2517 break;
2521 #endif
2523 #ifdef HAVE_peephole
2524 /* Do machine-specific peephole optimizations if desired. */
2526 if (optimize && !flag_no_peephole && !nopeepholes)
2528 rtx next = peephole (insn);
2529 /* When peepholing, if there were notes within the peephole,
2530 emit them before the peephole. */
2531 if (next != 0 && next != NEXT_INSN (insn))
2533 rtx note, prev = PREV_INSN (insn);
2535 for (note = NEXT_INSN (insn); note != next;
2536 note = NEXT_INSN (note))
2537 final_scan_insn (note, file, optimize, nopeepholes, seen);
2539 /* Put the notes in the proper position for a later
2540 rescan. For example, the SH target can do this
2541 when generating a far jump in a delayed branch
2542 sequence. */
2543 note = NEXT_INSN (insn);
2544 PREV_INSN (note) = prev;
2545 NEXT_INSN (prev) = note;
2546 NEXT_INSN (PREV_INSN (next)) = insn;
2547 PREV_INSN (insn) = PREV_INSN (next);
2548 NEXT_INSN (insn) = next;
2549 PREV_INSN (next) = insn;
2552 /* PEEPHOLE might have changed this. */
2553 body = PATTERN (insn);
2555 #endif
2557 /* Try to recognize the instruction.
2558 If successful, verify that the operands satisfy the
2559 constraints for the instruction. Crash if they don't,
2560 since `reload' should have changed them so that they do. */
2562 insn_code_number = recog_memoized (insn);
2563 cleanup_subreg_operands (insn);
2565 /* Dump the insn in the assembly for debugging. */
2566 if (flag_dump_rtl_in_asm)
2568 print_rtx_head = ASM_COMMENT_START;
2569 print_rtl_single (asm_out_file, insn);
2570 print_rtx_head = "";
2573 if (! constrain_operands_cached (1))
2574 fatal_insn_not_found (insn);
2576 /* Some target machines need to prescan each insn before
2577 it is output. */
2579 #ifdef FINAL_PRESCAN_INSN
2580 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2581 #endif
2583 #ifdef HAVE_conditional_execution
2584 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2585 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2586 #endif
2588 #ifdef HAVE_cc0
2589 cc_prev_status = cc_status;
2591 /* Update `cc_status' for this instruction.
2592 The instruction's output routine may change it further.
2593 If the output routine for a jump insn needs to depend
2594 on the cc status, it should look at cc_prev_status. */
2596 NOTICE_UPDATE_CC (body, insn);
2597 #endif
2599 current_output_insn = debug_insn = insn;
2601 #if defined (DWARF2_UNWIND_INFO)
2602 if (CALL_P (insn) && dwarf2out_do_frame ())
2603 dwarf2out_frame_debug (insn, false);
2604 #endif
2606 /* Find the proper template for this insn. */
2607 templ = get_insn_template (insn_code_number, insn);
2609 /* If the C code returns 0, it means that it is a jump insn
2610 which follows a deleted test insn, and that test insn
2611 needs to be reinserted. */
2612 if (templ == 0)
2614 rtx prev;
2616 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2618 /* We have already processed the notes between the setter and
2619 the user. Make sure we don't process them again, this is
2620 particularly important if one of the notes is a block
2621 scope note or an EH note. */
2622 for (prev = insn;
2623 prev != last_ignored_compare;
2624 prev = PREV_INSN (prev))
2626 if (NOTE_P (prev))
2627 delete_insn (prev); /* Use delete_note. */
2630 return prev;
2633 /* If the template is the string "#", it means that this insn must
2634 be split. */
2635 if (templ[0] == '#' && templ[1] == '\0')
2637 rtx new_rtx = try_split (body, insn, 0);
2639 /* If we didn't split the insn, go away. */
2640 if (new_rtx == insn && PATTERN (new_rtx) == body)
2641 fatal_insn ("could not split insn", insn);
2643 #ifdef HAVE_ATTR_length
2644 /* This instruction should have been split in shorten_branches,
2645 to ensure that we would have valid length info for the
2646 splitees. */
2647 gcc_unreachable ();
2648 #endif
2650 return new_rtx;
2653 #ifdef TARGET_UNWIND_INFO
2654 /* ??? This will put the directives in the wrong place if
2655 get_insn_template outputs assembly directly. However calling it
2656 before get_insn_template breaks if the insns is split. */
2657 targetm.asm_out.unwind_emit (asm_out_file, insn);
2658 #endif
2660 if (CALL_P (insn))
2662 rtx x = call_from_call_insn (insn);
2663 x = XEXP (x, 0);
2664 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2666 tree t;
2667 x = XEXP (x, 0);
2668 t = SYMBOL_REF_DECL (x);
2669 if (t)
2670 assemble_external (t);
2674 /* Output assembler code from the template. */
2675 output_asm_insn (templ, recog_data.operand);
2677 /* Some target machines need to postscan each insn after
2678 it is output. */
2679 if (targetm.asm_out.final_postscan_insn)
2680 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2681 recog_data.n_operands);
2683 /* If necessary, report the effect that the instruction has on
2684 the unwind info. We've already done this for delay slots
2685 and call instructions. */
2686 #if defined (DWARF2_UNWIND_INFO)
2687 if (final_sequence == 0
2688 #if !defined (HAVE_prologue)
2689 && !ACCUMULATE_OUTGOING_ARGS
2690 #endif
2691 && dwarf2out_do_frame ())
2692 dwarf2out_frame_debug (insn, true);
2693 #endif
2695 current_output_insn = debug_insn = 0;
2698 return NEXT_INSN (insn);
2701 /* Return whether a source line note needs to be emitted before INSN. */
2703 static bool
2704 notice_source_line (rtx insn)
2706 const char *filename;
2707 int linenum;
2709 if (override_filename)
2711 filename = override_filename;
2712 linenum = override_linenum;
2714 else
2716 filename = insn_file (insn);
2717 linenum = insn_line (insn);
2720 if (filename
2721 && (force_source_line
2722 || filename != last_filename
2723 || last_linenum != linenum
2724 || last_discriminator != discriminator))
2726 force_source_line = false;
2727 last_filename = filename;
2728 last_linenum = linenum;
2729 last_discriminator = discriminator;
2730 high_block_linenum = MAX (last_linenum, high_block_linenum);
2731 high_function_linenum = MAX (last_linenum, high_function_linenum);
2732 return true;
2734 return false;
2737 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2738 directly to the desired hard register. */
2740 void
2741 cleanup_subreg_operands (rtx insn)
2743 int i;
2744 bool changed = false;
2745 extract_insn_cached (insn);
2746 for (i = 0; i < recog_data.n_operands; i++)
2748 /* The following test cannot use recog_data.operand when testing
2749 for a SUBREG: the underlying object might have been changed
2750 already if we are inside a match_operator expression that
2751 matches the else clause. Instead we test the underlying
2752 expression directly. */
2753 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2755 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2756 changed = true;
2758 else if (GET_CODE (recog_data.operand[i]) == PLUS
2759 || GET_CODE (recog_data.operand[i]) == MULT
2760 || MEM_P (recog_data.operand[i]))
2761 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2764 for (i = 0; i < recog_data.n_dups; i++)
2766 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2768 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2769 changed = true;
2771 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2772 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2773 || MEM_P (*recog_data.dup_loc[i]))
2774 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2776 if (changed)
2777 df_insn_rescan (insn);
2780 /* If X is a SUBREG, replace it with a REG or a MEM,
2781 based on the thing it is a subreg of. */
2784 alter_subreg (rtx *xp)
2786 rtx x = *xp;
2787 rtx y = SUBREG_REG (x);
2789 /* simplify_subreg does not remove subreg from volatile references.
2790 We are required to. */
2791 if (MEM_P (y))
2793 int offset = SUBREG_BYTE (x);
2795 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2796 contains 0 instead of the proper offset. See simplify_subreg. */
2797 if (offset == 0
2798 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2800 int difference = GET_MODE_SIZE (GET_MODE (y))
2801 - GET_MODE_SIZE (GET_MODE (x));
2802 if (WORDS_BIG_ENDIAN)
2803 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2804 if (BYTES_BIG_ENDIAN)
2805 offset += difference % UNITS_PER_WORD;
2808 *xp = adjust_address (y, GET_MODE (x), offset);
2810 else
2812 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2813 SUBREG_BYTE (x));
2815 if (new_rtx != 0)
2816 *xp = new_rtx;
2817 else if (REG_P (y))
2819 /* Simplify_subreg can't handle some REG cases, but we have to. */
2820 unsigned int regno;
2821 HOST_WIDE_INT offset;
2823 regno = subreg_regno (x);
2824 if (subreg_lowpart_p (x))
2825 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
2826 else
2827 offset = SUBREG_BYTE (x);
2828 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
2832 return *xp;
2835 /* Do alter_subreg on all the SUBREGs contained in X. */
2837 static rtx
2838 walk_alter_subreg (rtx *xp, bool *changed)
2840 rtx x = *xp;
2841 switch (GET_CODE (x))
2843 case PLUS:
2844 case MULT:
2845 case AND:
2846 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2847 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
2848 break;
2850 case MEM:
2851 case ZERO_EXTEND:
2852 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2853 break;
2855 case SUBREG:
2856 *changed = true;
2857 return alter_subreg (xp);
2859 default:
2860 break;
2863 return *xp;
2866 #ifdef HAVE_cc0
2868 /* Given BODY, the body of a jump instruction, alter the jump condition
2869 as required by the bits that are set in cc_status.flags.
2870 Not all of the bits there can be handled at this level in all cases.
2872 The value is normally 0.
2873 1 means that the condition has become always true.
2874 -1 means that the condition has become always false.
2875 2 means that COND has been altered. */
2877 static int
2878 alter_cond (rtx cond)
2880 int value = 0;
2882 if (cc_status.flags & CC_REVERSED)
2884 value = 2;
2885 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2888 if (cc_status.flags & CC_INVERTED)
2890 value = 2;
2891 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2894 if (cc_status.flags & CC_NOT_POSITIVE)
2895 switch (GET_CODE (cond))
2897 case LE:
2898 case LEU:
2899 case GEU:
2900 /* Jump becomes unconditional. */
2901 return 1;
2903 case GT:
2904 case GTU:
2905 case LTU:
2906 /* Jump becomes no-op. */
2907 return -1;
2909 case GE:
2910 PUT_CODE (cond, EQ);
2911 value = 2;
2912 break;
2914 case LT:
2915 PUT_CODE (cond, NE);
2916 value = 2;
2917 break;
2919 default:
2920 break;
2923 if (cc_status.flags & CC_NOT_NEGATIVE)
2924 switch (GET_CODE (cond))
2926 case GE:
2927 case GEU:
2928 /* Jump becomes unconditional. */
2929 return 1;
2931 case LT:
2932 case LTU:
2933 /* Jump becomes no-op. */
2934 return -1;
2936 case LE:
2937 case LEU:
2938 PUT_CODE (cond, EQ);
2939 value = 2;
2940 break;
2942 case GT:
2943 case GTU:
2944 PUT_CODE (cond, NE);
2945 value = 2;
2946 break;
2948 default:
2949 break;
2952 if (cc_status.flags & CC_NO_OVERFLOW)
2953 switch (GET_CODE (cond))
2955 case GEU:
2956 /* Jump becomes unconditional. */
2957 return 1;
2959 case LEU:
2960 PUT_CODE (cond, EQ);
2961 value = 2;
2962 break;
2964 case GTU:
2965 PUT_CODE (cond, NE);
2966 value = 2;
2967 break;
2969 case LTU:
2970 /* Jump becomes no-op. */
2971 return -1;
2973 default:
2974 break;
2977 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2978 switch (GET_CODE (cond))
2980 default:
2981 gcc_unreachable ();
2983 case NE:
2984 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2985 value = 2;
2986 break;
2988 case EQ:
2989 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2990 value = 2;
2991 break;
2994 if (cc_status.flags & CC_NOT_SIGNED)
2995 /* The flags are valid if signed condition operators are converted
2996 to unsigned. */
2997 switch (GET_CODE (cond))
2999 case LE:
3000 PUT_CODE (cond, LEU);
3001 value = 2;
3002 break;
3004 case LT:
3005 PUT_CODE (cond, LTU);
3006 value = 2;
3007 break;
3009 case GT:
3010 PUT_CODE (cond, GTU);
3011 value = 2;
3012 break;
3014 case GE:
3015 PUT_CODE (cond, GEU);
3016 value = 2;
3017 break;
3019 default:
3020 break;
3023 return value;
3025 #endif
3027 /* Report inconsistency between the assembler template and the operands.
3028 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3030 void
3031 output_operand_lossage (const char *cmsgid, ...)
3033 char *fmt_string;
3034 char *new_message;
3035 const char *pfx_str;
3036 va_list ap;
3038 va_start (ap, cmsgid);
3040 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3041 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3042 vasprintf (&new_message, fmt_string, ap);
3044 if (this_is_asm_operands)
3045 error_for_asm (this_is_asm_operands, "%s", new_message);
3046 else
3047 internal_error ("%s", new_message);
3049 free (fmt_string);
3050 free (new_message);
3051 va_end (ap);
3054 /* Output of assembler code from a template, and its subroutines. */
3056 /* Annotate the assembly with a comment describing the pattern and
3057 alternative used. */
3059 static void
3060 output_asm_name (void)
3062 if (debug_insn)
3064 int num = INSN_CODE (debug_insn);
3065 fprintf (asm_out_file, "\t%s %d\t%s",
3066 ASM_COMMENT_START, INSN_UID (debug_insn),
3067 insn_data[num].name);
3068 if (insn_data[num].n_alternatives > 1)
3069 fprintf (asm_out_file, "/%d", which_alternative + 1);
3070 #ifdef HAVE_ATTR_length
3071 fprintf (asm_out_file, "\t[length = %d]",
3072 get_attr_length (debug_insn));
3073 #endif
3074 /* Clear this so only the first assembler insn
3075 of any rtl insn will get the special comment for -dp. */
3076 debug_insn = 0;
3080 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3081 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3082 corresponds to the address of the object and 0 if to the object. */
3084 static tree
3085 get_mem_expr_from_op (rtx op, int *paddressp)
3087 tree expr;
3088 int inner_addressp;
3090 *paddressp = 0;
3092 if (REG_P (op))
3093 return REG_EXPR (op);
3094 else if (!MEM_P (op))
3095 return 0;
3097 if (MEM_EXPR (op) != 0)
3098 return MEM_EXPR (op);
3100 /* Otherwise we have an address, so indicate it and look at the address. */
3101 *paddressp = 1;
3102 op = XEXP (op, 0);
3104 /* First check if we have a decl for the address, then look at the right side
3105 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3106 But don't allow the address to itself be indirect. */
3107 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3108 return expr;
3109 else if (GET_CODE (op) == PLUS
3110 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3111 return expr;
3113 while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
3114 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3115 op = XEXP (op, 0);
3117 expr = get_mem_expr_from_op (op, &inner_addressp);
3118 return inner_addressp ? 0 : expr;
3121 /* Output operand names for assembler instructions. OPERANDS is the
3122 operand vector, OPORDER is the order to write the operands, and NOPS
3123 is the number of operands to write. */
3125 static void
3126 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3128 int wrote = 0;
3129 int i;
3131 for (i = 0; i < nops; i++)
3133 int addressp;
3134 rtx op = operands[oporder[i]];
3135 tree expr = get_mem_expr_from_op (op, &addressp);
3137 fprintf (asm_out_file, "%c%s",
3138 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3139 wrote = 1;
3140 if (expr)
3142 fprintf (asm_out_file, "%s",
3143 addressp ? "*" : "");
3144 print_mem_expr (asm_out_file, expr);
3145 wrote = 1;
3147 else if (REG_P (op) && ORIGINAL_REGNO (op)
3148 && ORIGINAL_REGNO (op) != REGNO (op))
3149 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3153 /* Output text from TEMPLATE to the assembler output file,
3154 obeying %-directions to substitute operands taken from
3155 the vector OPERANDS.
3157 %N (for N a digit) means print operand N in usual manner.
3158 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3159 and print the label name with no punctuation.
3160 %cN means require operand N to be a constant
3161 and print the constant expression with no punctuation.
3162 %aN means expect operand N to be a memory address
3163 (not a memory reference!) and print a reference
3164 to that address.
3165 %nN means expect operand N to be a constant
3166 and print a constant expression for minus the value
3167 of the operand, with no other punctuation. */
3169 void
3170 output_asm_insn (const char *templ, rtx *operands)
3172 const char *p;
3173 int c;
3174 #ifdef ASSEMBLER_DIALECT
3175 int dialect = 0;
3176 #endif
3177 int oporder[MAX_RECOG_OPERANDS];
3178 char opoutput[MAX_RECOG_OPERANDS];
3179 int ops = 0;
3181 /* An insn may return a null string template
3182 in a case where no assembler code is needed. */
3183 if (*templ == 0)
3184 return;
3186 memset (opoutput, 0, sizeof opoutput);
3187 p = templ;
3188 putc ('\t', asm_out_file);
3190 #ifdef ASM_OUTPUT_OPCODE
3191 ASM_OUTPUT_OPCODE (asm_out_file, p);
3192 #endif
3194 while ((c = *p++))
3195 switch (c)
3197 case '\n':
3198 if (flag_verbose_asm)
3199 output_asm_operand_names (operands, oporder, ops);
3200 if (flag_print_asm_name)
3201 output_asm_name ();
3203 ops = 0;
3204 memset (opoutput, 0, sizeof opoutput);
3206 putc (c, asm_out_file);
3207 #ifdef ASM_OUTPUT_OPCODE
3208 while ((c = *p) == '\t')
3210 putc (c, asm_out_file);
3211 p++;
3213 ASM_OUTPUT_OPCODE (asm_out_file, p);
3214 #endif
3215 break;
3217 #ifdef ASSEMBLER_DIALECT
3218 case '{':
3220 int i;
3222 if (dialect)
3223 output_operand_lossage ("nested assembly dialect alternatives");
3224 else
3225 dialect = 1;
3227 /* If we want the first dialect, do nothing. Otherwise, skip
3228 DIALECT_NUMBER of strings ending with '|'. */
3229 for (i = 0; i < dialect_number; i++)
3231 while (*p && *p != '}' && *p++ != '|')
3233 if (*p == '}')
3234 break;
3235 if (*p == '|')
3236 p++;
3239 if (*p == '\0')
3240 output_operand_lossage ("unterminated assembly dialect alternative");
3242 break;
3244 case '|':
3245 if (dialect)
3247 /* Skip to close brace. */
3250 if (*p == '\0')
3252 output_operand_lossage ("unterminated assembly dialect alternative");
3253 break;
3256 while (*p++ != '}');
3257 dialect = 0;
3259 else
3260 putc (c, asm_out_file);
3261 break;
3263 case '}':
3264 if (! dialect)
3265 putc (c, asm_out_file);
3266 dialect = 0;
3267 break;
3268 #endif
3270 case '%':
3271 /* %% outputs a single %. */
3272 if (*p == '%')
3274 p++;
3275 putc (c, asm_out_file);
3277 /* %= outputs a number which is unique to each insn in the entire
3278 compilation. This is useful for making local labels that are
3279 referred to more than once in a given insn. */
3280 else if (*p == '=')
3282 p++;
3283 fprintf (asm_out_file, "%d", insn_counter);
3285 /* % followed by a letter and some digits
3286 outputs an operand in a special way depending on the letter.
3287 Letters `acln' are implemented directly.
3288 Other letters are passed to `output_operand' so that
3289 the PRINT_OPERAND macro can define them. */
3290 else if (ISALPHA (*p))
3292 int letter = *p++;
3293 unsigned long opnum;
3294 char *endptr;
3296 opnum = strtoul (p, &endptr, 10);
3298 if (endptr == p)
3299 output_operand_lossage ("operand number missing "
3300 "after %%-letter");
3301 else if (this_is_asm_operands && opnum >= insn_noperands)
3302 output_operand_lossage ("operand number out of range");
3303 else if (letter == 'l')
3304 output_asm_label (operands[opnum]);
3305 else if (letter == 'a')
3306 output_address (operands[opnum]);
3307 else if (letter == 'c')
3309 if (CONSTANT_ADDRESS_P (operands[opnum]))
3310 output_addr_const (asm_out_file, operands[opnum]);
3311 else
3312 output_operand (operands[opnum], 'c');
3314 else if (letter == 'n')
3316 if (GET_CODE (operands[opnum]) == CONST_INT)
3317 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3318 - INTVAL (operands[opnum]));
3319 else
3321 putc ('-', asm_out_file);
3322 output_addr_const (asm_out_file, operands[opnum]);
3325 else
3326 output_operand (operands[opnum], letter);
3328 if (!opoutput[opnum])
3329 oporder[ops++] = opnum;
3330 opoutput[opnum] = 1;
3332 p = endptr;
3333 c = *p;
3335 /* % followed by a digit outputs an operand the default way. */
3336 else if (ISDIGIT (*p))
3338 unsigned long opnum;
3339 char *endptr;
3341 opnum = strtoul (p, &endptr, 10);
3342 if (this_is_asm_operands && opnum >= insn_noperands)
3343 output_operand_lossage ("operand number out of range");
3344 else
3345 output_operand (operands[opnum], 0);
3347 if (!opoutput[opnum])
3348 oporder[ops++] = opnum;
3349 opoutput[opnum] = 1;
3351 p = endptr;
3352 c = *p;
3354 /* % followed by punctuation: output something for that
3355 punctuation character alone, with no operand.
3356 The PRINT_OPERAND macro decides what is actually done. */
3357 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3358 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3359 output_operand (NULL_RTX, *p++);
3360 #endif
3361 else
3362 output_operand_lossage ("invalid %%-code");
3363 break;
3365 default:
3366 putc (c, asm_out_file);
3369 /* Write out the variable names for operands, if we know them. */
3370 if (flag_verbose_asm)
3371 output_asm_operand_names (operands, oporder, ops);
3372 if (flag_print_asm_name)
3373 output_asm_name ();
3375 putc ('\n', asm_out_file);
3378 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3380 void
3381 output_asm_label (rtx x)
3383 char buf[256];
3385 if (GET_CODE (x) == LABEL_REF)
3386 x = XEXP (x, 0);
3387 if (LABEL_P (x)
3388 || (NOTE_P (x)
3389 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3390 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3391 else
3392 output_operand_lossage ("'%%l' operand isn't a label");
3394 assemble_name (asm_out_file, buf);
3397 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3398 output_operand. Marks SYMBOL_REFs as referenced through use of
3399 assemble_external. */
3401 static int
3402 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3404 rtx x = *xp;
3406 /* If we have a used symbol, we may have to emit assembly
3407 annotations corresponding to whether the symbol is external, weak
3408 or has non-default visibility. */
3409 if (GET_CODE (x) == SYMBOL_REF)
3411 tree t;
3413 t = SYMBOL_REF_DECL (x);
3414 if (t)
3415 assemble_external (t);
3417 return -1;
3420 return 0;
3423 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3425 void
3426 mark_symbol_refs_as_used (rtx x)
3428 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3431 /* Print operand X using machine-dependent assembler syntax.
3432 The macro PRINT_OPERAND is defined just to control this function.
3433 CODE is a non-digit that preceded the operand-number in the % spec,
3434 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3435 between the % and the digits.
3436 When CODE is a non-letter, X is 0.
3438 The meanings of the letters are machine-dependent and controlled
3439 by PRINT_OPERAND. */
3441 static void
3442 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3444 if (x && GET_CODE (x) == SUBREG)
3445 x = alter_subreg (&x);
3447 /* X must not be a pseudo reg. */
3448 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3450 PRINT_OPERAND (asm_out_file, x, code);
3452 if (x == NULL_RTX)
3453 return;
3455 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3458 /* Print a memory reference operand for address X
3459 using machine-dependent assembler syntax.
3460 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3462 void
3463 output_address (rtx x)
3465 bool changed = false;
3466 walk_alter_subreg (&x, &changed);
3467 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3470 /* Print an integer constant expression in assembler syntax.
3471 Addition and subtraction are the only arithmetic
3472 that may appear in these expressions. */
3474 void
3475 output_addr_const (FILE *file, rtx x)
3477 char buf[256];
3479 restart:
3480 switch (GET_CODE (x))
3482 case PC:
3483 putc ('.', file);
3484 break;
3486 case SYMBOL_REF:
3487 if (SYMBOL_REF_DECL (x))
3489 mark_decl_referenced (SYMBOL_REF_DECL (x));
3490 assemble_external (SYMBOL_REF_DECL (x));
3492 #ifdef ASM_OUTPUT_SYMBOL_REF
3493 ASM_OUTPUT_SYMBOL_REF (file, x);
3494 #else
3495 assemble_name (file, XSTR (x, 0));
3496 #endif
3497 break;
3499 case LABEL_REF:
3500 x = XEXP (x, 0);
3501 /* Fall through. */
3502 case CODE_LABEL:
3503 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3504 #ifdef ASM_OUTPUT_LABEL_REF
3505 ASM_OUTPUT_LABEL_REF (file, buf);
3506 #else
3507 assemble_name (file, buf);
3508 #endif
3509 break;
3511 case CONST_INT:
3512 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3513 break;
3515 case CONST:
3516 /* This used to output parentheses around the expression,
3517 but that does not work on the 386 (either ATT or BSD assembler). */
3518 output_addr_const (file, XEXP (x, 0));
3519 break;
3521 case CONST_DOUBLE:
3522 if (GET_MODE (x) == VOIDmode)
3524 /* We can use %d if the number is one word and positive. */
3525 if (CONST_DOUBLE_HIGH (x))
3526 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3527 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3528 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3529 else if (CONST_DOUBLE_LOW (x) < 0)
3530 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3531 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3532 else
3533 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3535 else
3536 /* We can't handle floating point constants;
3537 PRINT_OPERAND must handle them. */
3538 output_operand_lossage ("floating constant misused");
3539 break;
3541 case CONST_FIXED:
3542 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3543 (unsigned HOST_WIDE_INT) CONST_FIXED_VALUE_LOW (x));
3544 break;
3546 case PLUS:
3547 /* Some assemblers need integer constants to appear last (eg masm). */
3548 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3550 output_addr_const (file, XEXP (x, 1));
3551 if (INTVAL (XEXP (x, 0)) >= 0)
3552 fprintf (file, "+");
3553 output_addr_const (file, XEXP (x, 0));
3555 else
3557 output_addr_const (file, XEXP (x, 0));
3558 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3559 || INTVAL (XEXP (x, 1)) >= 0)
3560 fprintf (file, "+");
3561 output_addr_const (file, XEXP (x, 1));
3563 break;
3565 case MINUS:
3566 /* Avoid outputting things like x-x or x+5-x,
3567 since some assemblers can't handle that. */
3568 x = simplify_subtraction (x);
3569 if (GET_CODE (x) != MINUS)
3570 goto restart;
3572 output_addr_const (file, XEXP (x, 0));
3573 fprintf (file, "-");
3574 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3575 || GET_CODE (XEXP (x, 1)) == PC
3576 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3577 output_addr_const (file, XEXP (x, 1));
3578 else
3580 fputs (targetm.asm_out.open_paren, file);
3581 output_addr_const (file, XEXP (x, 1));
3582 fputs (targetm.asm_out.close_paren, file);
3584 break;
3586 case ZERO_EXTEND:
3587 case SIGN_EXTEND:
3588 case SUBREG:
3589 case TRUNCATE:
3590 output_addr_const (file, XEXP (x, 0));
3591 break;
3593 default:
3594 #ifdef OUTPUT_ADDR_CONST_EXTRA
3595 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3596 break;
3598 fail:
3599 #endif
3600 output_operand_lossage ("invalid expression as operand");
3604 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3605 %R prints the value of REGISTER_PREFIX.
3606 %L prints the value of LOCAL_LABEL_PREFIX.
3607 %U prints the value of USER_LABEL_PREFIX.
3608 %I prints the value of IMMEDIATE_PREFIX.
3609 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3610 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3612 We handle alternate assembler dialects here, just like output_asm_insn. */
3614 void
3615 asm_fprintf (FILE *file, const char *p, ...)
3617 char buf[10];
3618 char *q, c;
3619 va_list argptr;
3621 va_start (argptr, p);
3623 buf[0] = '%';
3625 while ((c = *p++))
3626 switch (c)
3628 #ifdef ASSEMBLER_DIALECT
3629 case '{':
3631 int i;
3633 /* If we want the first dialect, do nothing. Otherwise, skip
3634 DIALECT_NUMBER of strings ending with '|'. */
3635 for (i = 0; i < dialect_number; i++)
3637 while (*p && *p++ != '|')
3640 if (*p == '|')
3641 p++;
3644 break;
3646 case '|':
3647 /* Skip to close brace. */
3648 while (*p && *p++ != '}')
3650 break;
3652 case '}':
3653 break;
3654 #endif
3656 case '%':
3657 c = *p++;
3658 q = &buf[1];
3659 while (strchr ("-+ #0", c))
3661 *q++ = c;
3662 c = *p++;
3664 while (ISDIGIT (c) || c == '.')
3666 *q++ = c;
3667 c = *p++;
3669 switch (c)
3671 case '%':
3672 putc ('%', file);
3673 break;
3675 case 'd': case 'i': case 'u':
3676 case 'x': case 'X': case 'o':
3677 case 'c':
3678 *q++ = c;
3679 *q = 0;
3680 fprintf (file, buf, va_arg (argptr, int));
3681 break;
3683 case 'w':
3684 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3685 'o' cases, but we do not check for those cases. It
3686 means that the value is a HOST_WIDE_INT, which may be
3687 either `long' or `long long'. */
3688 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3689 q += strlen (HOST_WIDE_INT_PRINT);
3690 *q++ = *p++;
3691 *q = 0;
3692 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3693 break;
3695 case 'l':
3696 *q++ = c;
3697 #ifdef HAVE_LONG_LONG
3698 if (*p == 'l')
3700 *q++ = *p++;
3701 *q++ = *p++;
3702 *q = 0;
3703 fprintf (file, buf, va_arg (argptr, long long));
3705 else
3706 #endif
3708 *q++ = *p++;
3709 *q = 0;
3710 fprintf (file, buf, va_arg (argptr, long));
3713 break;
3715 case 's':
3716 *q++ = c;
3717 *q = 0;
3718 fprintf (file, buf, va_arg (argptr, char *));
3719 break;
3721 case 'O':
3722 #ifdef ASM_OUTPUT_OPCODE
3723 ASM_OUTPUT_OPCODE (asm_out_file, p);
3724 #endif
3725 break;
3727 case 'R':
3728 #ifdef REGISTER_PREFIX
3729 fprintf (file, "%s", REGISTER_PREFIX);
3730 #endif
3731 break;
3733 case 'I':
3734 #ifdef IMMEDIATE_PREFIX
3735 fprintf (file, "%s", IMMEDIATE_PREFIX);
3736 #endif
3737 break;
3739 case 'L':
3740 #ifdef LOCAL_LABEL_PREFIX
3741 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3742 #endif
3743 break;
3745 case 'U':
3746 fputs (user_label_prefix, file);
3747 break;
3749 #ifdef ASM_FPRINTF_EXTENSIONS
3750 /* Uppercase letters are reserved for general use by asm_fprintf
3751 and so are not available to target specific code. In order to
3752 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3753 they are defined here. As they get turned into real extensions
3754 to asm_fprintf they should be removed from this list. */
3755 case 'A': case 'B': case 'C': case 'D': case 'E':
3756 case 'F': case 'G': case 'H': case 'J': case 'K':
3757 case 'M': case 'N': case 'P': case 'Q': case 'S':
3758 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3759 break;
3761 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3762 #endif
3763 default:
3764 gcc_unreachable ();
3766 break;
3768 default:
3769 putc (c, file);
3771 va_end (argptr);
3774 /* Split up a CONST_DOUBLE or integer constant rtx
3775 into two rtx's for single words,
3776 storing in *FIRST the word that comes first in memory in the target
3777 and in *SECOND the other. */
3779 void
3780 split_double (rtx value, rtx *first, rtx *second)
3782 if (GET_CODE (value) == CONST_INT)
3784 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3786 /* In this case the CONST_INT holds both target words.
3787 Extract the bits from it into two word-sized pieces.
3788 Sign extend each half to HOST_WIDE_INT. */
3789 unsigned HOST_WIDE_INT low, high;
3790 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3792 /* Set sign_bit to the most significant bit of a word. */
3793 sign_bit = 1;
3794 sign_bit <<= BITS_PER_WORD - 1;
3796 /* Set mask so that all bits of the word are set. We could
3797 have used 1 << BITS_PER_WORD instead of basing the
3798 calculation on sign_bit. However, on machines where
3799 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3800 compiler warning, even though the code would never be
3801 executed. */
3802 mask = sign_bit << 1;
3803 mask--;
3805 /* Set sign_extend as any remaining bits. */
3806 sign_extend = ~mask;
3808 /* Pick the lower word and sign-extend it. */
3809 low = INTVAL (value);
3810 low &= mask;
3811 if (low & sign_bit)
3812 low |= sign_extend;
3814 /* Pick the higher word, shifted to the least significant
3815 bits, and sign-extend it. */
3816 high = INTVAL (value);
3817 high >>= BITS_PER_WORD - 1;
3818 high >>= 1;
3819 high &= mask;
3820 if (high & sign_bit)
3821 high |= sign_extend;
3823 /* Store the words in the target machine order. */
3824 if (WORDS_BIG_ENDIAN)
3826 *first = GEN_INT (high);
3827 *second = GEN_INT (low);
3829 else
3831 *first = GEN_INT (low);
3832 *second = GEN_INT (high);
3835 else
3837 /* The rule for using CONST_INT for a wider mode
3838 is that we regard the value as signed.
3839 So sign-extend it. */
3840 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3841 if (WORDS_BIG_ENDIAN)
3843 *first = high;
3844 *second = value;
3846 else
3848 *first = value;
3849 *second = high;
3853 else if (GET_CODE (value) != CONST_DOUBLE)
3855 if (WORDS_BIG_ENDIAN)
3857 *first = const0_rtx;
3858 *second = value;
3860 else
3862 *first = value;
3863 *second = const0_rtx;
3866 else if (GET_MODE (value) == VOIDmode
3867 /* This is the old way we did CONST_DOUBLE integers. */
3868 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3870 /* In an integer, the words are defined as most and least significant.
3871 So order them by the target's convention. */
3872 if (WORDS_BIG_ENDIAN)
3874 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3875 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3877 else
3879 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3880 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3883 else
3885 REAL_VALUE_TYPE r;
3886 long l[2];
3887 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3889 /* Note, this converts the REAL_VALUE_TYPE to the target's
3890 format, splits up the floating point double and outputs
3891 exactly 32 bits of it into each of l[0] and l[1] --
3892 not necessarily BITS_PER_WORD bits. */
3893 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3895 /* If 32 bits is an entire word for the target, but not for the host,
3896 then sign-extend on the host so that the number will look the same
3897 way on the host that it would on the target. See for instance
3898 simplify_unary_operation. The #if is needed to avoid compiler
3899 warnings. */
3901 #if HOST_BITS_PER_LONG > 32
3902 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3904 if (l[0] & ((long) 1 << 31))
3905 l[0] |= ((long) (-1) << 32);
3906 if (l[1] & ((long) 1 << 31))
3907 l[1] |= ((long) (-1) << 32);
3909 #endif
3911 *first = GEN_INT (l[0]);
3912 *second = GEN_INT (l[1]);
3916 /* Return nonzero if this function has no function calls. */
3919 leaf_function_p (void)
3921 rtx insn;
3922 rtx link;
3924 if (crtl->profile || profile_arc_flag)
3925 return 0;
3927 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3929 if (CALL_P (insn)
3930 && ! SIBLING_CALL_P (insn))
3931 return 0;
3932 if (NONJUMP_INSN_P (insn)
3933 && GET_CODE (PATTERN (insn)) == SEQUENCE
3934 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3935 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3936 return 0;
3938 for (link = crtl->epilogue_delay_list;
3939 link;
3940 link = XEXP (link, 1))
3942 insn = XEXP (link, 0);
3944 if (CALL_P (insn)
3945 && ! SIBLING_CALL_P (insn))
3946 return 0;
3947 if (NONJUMP_INSN_P (insn)
3948 && GET_CODE (PATTERN (insn)) == SEQUENCE
3949 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3950 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3951 return 0;
3954 return 1;
3957 /* Return 1 if branch is a forward branch.
3958 Uses insn_shuid array, so it works only in the final pass. May be used by
3959 output templates to customary add branch prediction hints.
3962 final_forward_branch_p (rtx insn)
3964 int insn_id, label_id;
3966 gcc_assert (uid_shuid);
3967 insn_id = INSN_SHUID (insn);
3968 label_id = INSN_SHUID (JUMP_LABEL (insn));
3969 /* We've hit some insns that does not have id information available. */
3970 gcc_assert (insn_id && label_id);
3971 return insn_id < label_id;
3974 /* On some machines, a function with no call insns
3975 can run faster if it doesn't create its own register window.
3976 When output, the leaf function should use only the "output"
3977 registers. Ordinarily, the function would be compiled to use
3978 the "input" registers to find its arguments; it is a candidate
3979 for leaf treatment if it uses only the "input" registers.
3980 Leaf function treatment means renumbering so the function
3981 uses the "output" registers instead. */
3983 #ifdef LEAF_REGISTERS
3985 /* Return 1 if this function uses only the registers that can be
3986 safely renumbered. */
3989 only_leaf_regs_used (void)
3991 int i;
3992 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3994 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3995 if ((df_regs_ever_live_p (i) || global_regs[i])
3996 && ! permitted_reg_in_leaf_functions[i])
3997 return 0;
3999 if (crtl->uses_pic_offset_table
4000 && pic_offset_table_rtx != 0
4001 && REG_P (pic_offset_table_rtx)
4002 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4003 return 0;
4005 return 1;
4008 /* Scan all instructions and renumber all registers into those
4009 available in leaf functions. */
4011 static void
4012 leaf_renumber_regs (rtx first)
4014 rtx insn;
4016 /* Renumber only the actual patterns.
4017 The reg-notes can contain frame pointer refs,
4018 and renumbering them could crash, and should not be needed. */
4019 for (insn = first; insn; insn = NEXT_INSN (insn))
4020 if (INSN_P (insn))
4021 leaf_renumber_regs_insn (PATTERN (insn));
4022 for (insn = crtl->epilogue_delay_list;
4023 insn;
4024 insn = XEXP (insn, 1))
4025 if (INSN_P (XEXP (insn, 0)))
4026 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
4029 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4030 available in leaf functions. */
4032 void
4033 leaf_renumber_regs_insn (rtx in_rtx)
4035 int i, j;
4036 const char *format_ptr;
4038 if (in_rtx == 0)
4039 return;
4041 /* Renumber all input-registers into output-registers.
4042 renumbered_regs would be 1 for an output-register;
4043 they */
4045 if (REG_P (in_rtx))
4047 int newreg;
4049 /* Don't renumber the same reg twice. */
4050 if (in_rtx->used)
4051 return;
4053 newreg = REGNO (in_rtx);
4054 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4055 to reach here as part of a REG_NOTE. */
4056 if (newreg >= FIRST_PSEUDO_REGISTER)
4058 in_rtx->used = 1;
4059 return;
4061 newreg = LEAF_REG_REMAP (newreg);
4062 gcc_assert (newreg >= 0);
4063 df_set_regs_ever_live (REGNO (in_rtx), false);
4064 df_set_regs_ever_live (newreg, true);
4065 SET_REGNO (in_rtx, newreg);
4066 in_rtx->used = 1;
4069 if (INSN_P (in_rtx))
4071 /* Inside a SEQUENCE, we find insns.
4072 Renumber just the patterns of these insns,
4073 just as we do for the top-level insns. */
4074 leaf_renumber_regs_insn (PATTERN (in_rtx));
4075 return;
4078 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4080 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4081 switch (*format_ptr++)
4083 case 'e':
4084 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4085 break;
4087 case 'E':
4088 if (NULL != XVEC (in_rtx, i))
4090 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4091 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4093 break;
4095 case 'S':
4096 case 's':
4097 case '0':
4098 case 'i':
4099 case 'w':
4100 case 'n':
4101 case 'u':
4102 break;
4104 default:
4105 gcc_unreachable ();
4108 #endif
4111 /* When -gused is used, emit debug info for only used symbols. But in
4112 addition to the standard intercepted debug_hooks there are some direct
4113 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
4114 Those routines may also be called from a higher level intercepted routine. So
4115 to prevent recording data for an inner call to one of these for an intercept,
4116 we maintain an intercept nesting counter (debug_nesting). We only save the
4117 intercepted arguments if the nesting is 1. */
4118 int debug_nesting = 0;
4120 static tree *symbol_queue;
4121 int symbol_queue_index = 0;
4122 static int symbol_queue_size = 0;
4124 /* Generate the symbols for any queued up type symbols we encountered
4125 while generating the type info for some originally used symbol.
4126 This might generate additional entries in the queue. Only when
4127 the nesting depth goes to 0 is this routine called. */
4129 void
4130 debug_flush_symbol_queue (void)
4132 int i;
4134 /* Make sure that additionally queued items are not flushed
4135 prematurely. */
4137 ++debug_nesting;
4139 for (i = 0; i < symbol_queue_index; ++i)
4141 /* If we pushed queued symbols then such symbols must be
4142 output no matter what anyone else says. Specifically,
4143 we need to make sure dbxout_symbol() thinks the symbol was
4144 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
4145 which may be set for outside reasons. */
4146 int saved_tree_used = TREE_USED (symbol_queue[i]);
4147 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
4148 TREE_USED (symbol_queue[i]) = 1;
4149 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
4151 #ifdef DBX_DEBUGGING_INFO
4152 dbxout_symbol (symbol_queue[i], 0);
4153 #endif
4155 TREE_USED (symbol_queue[i]) = saved_tree_used;
4156 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
4159 symbol_queue_index = 0;
4160 --debug_nesting;
4163 /* Queue a type symbol needed as part of the definition of a decl
4164 symbol. These symbols are generated when debug_flush_symbol_queue()
4165 is called. */
4167 void
4168 debug_queue_symbol (tree decl)
4170 if (symbol_queue_index >= symbol_queue_size)
4172 symbol_queue_size += 10;
4173 symbol_queue = XRESIZEVEC (tree, symbol_queue, symbol_queue_size);
4176 symbol_queue[symbol_queue_index++] = decl;
4179 /* Free symbol queue. */
4180 void
4181 debug_free_queue (void)
4183 if (symbol_queue)
4185 free (symbol_queue);
4186 symbol_queue = NULL;
4187 symbol_queue_size = 0;
4191 /* Turn the RTL into assembly. */
4192 static unsigned int
4193 rest_of_handle_final (void)
4195 rtx x;
4196 const char *fnname;
4198 /* Get the function's name, as described by its RTL. This may be
4199 different from the DECL_NAME name used in the source file. */
4201 x = DECL_RTL (current_function_decl);
4202 gcc_assert (MEM_P (x));
4203 x = XEXP (x, 0);
4204 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4205 fnname = XSTR (x, 0);
4207 assemble_start_function (current_function_decl, fnname);
4208 final_start_function (get_insns (), asm_out_file, optimize);
4209 final (get_insns (), asm_out_file, optimize);
4210 final_end_function ();
4212 #ifdef TARGET_UNWIND_INFO
4213 /* ??? The IA-64 ".handlerdata" directive must be issued before
4214 the ".endp" directive that closes the procedure descriptor. */
4215 output_function_exception_table (fnname);
4216 #endif
4218 assemble_end_function (current_function_decl, fnname);
4220 #ifndef TARGET_UNWIND_INFO
4221 /* Otherwise, it feels unclean to switch sections in the middle. */
4222 output_function_exception_table (fnname);
4223 #endif
4225 user_defined_section_attribute = false;
4227 /* Free up reg info memory. */
4228 free_reg_info ();
4230 if (! quiet_flag)
4231 fflush (asm_out_file);
4233 /* Write DBX symbols if requested. */
4235 /* Note that for those inline functions where we don't initially
4236 know for certain that we will be generating an out-of-line copy,
4237 the first invocation of this routine (rest_of_compilation) will
4238 skip over this code by doing a `goto exit_rest_of_compilation;'.
4239 Later on, wrapup_global_declarations will (indirectly) call
4240 rest_of_compilation again for those inline functions that need
4241 to have out-of-line copies generated. During that call, we
4242 *will* be routed past here. */
4244 timevar_push (TV_SYMOUT);
4245 (*debug_hooks->function_decl) (current_function_decl);
4246 timevar_pop (TV_SYMOUT);
4248 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4249 DECL_INITIAL (current_function_decl) = error_mark_node;
4251 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4252 && targetm.have_ctors_dtors)
4253 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4254 decl_init_priority_lookup
4255 (current_function_decl));
4256 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4257 && targetm.have_ctors_dtors)
4258 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4259 decl_fini_priority_lookup
4260 (current_function_decl));
4261 return 0;
4264 struct rtl_opt_pass pass_final =
4267 RTL_PASS,
4268 NULL, /* name */
4269 NULL, /* gate */
4270 rest_of_handle_final, /* execute */
4271 NULL, /* sub */
4272 NULL, /* next */
4273 0, /* static_pass_number */
4274 TV_FINAL, /* tv_id */
4275 0, /* properties_required */
4276 0, /* properties_provided */
4277 0, /* properties_destroyed */
4278 0, /* todo_flags_start */
4279 TODO_ggc_collect /* todo_flags_finish */
4284 static unsigned int
4285 rest_of_handle_shorten_branches (void)
4287 /* Shorten branches. */
4288 shorten_branches (get_insns ());
4289 return 0;
4292 struct rtl_opt_pass pass_shorten_branches =
4295 RTL_PASS,
4296 "shorten", /* name */
4297 NULL, /* gate */
4298 rest_of_handle_shorten_branches, /* execute */
4299 NULL, /* sub */
4300 NULL, /* next */
4301 0, /* static_pass_number */
4302 TV_FINAL, /* tv_id */
4303 0, /* properties_required */
4304 0, /* properties_provided */
4305 0, /* properties_destroyed */
4306 0, /* todo_flags_start */
4307 TODO_dump_func /* todo_flags_finish */
4312 static unsigned int
4313 rest_of_clean_state (void)
4315 rtx insn, next;
4317 /* It is very important to decompose the RTL instruction chain here:
4318 debug information keeps pointing into CODE_LABEL insns inside the function
4319 body. If these remain pointing to the other insns, we end up preserving
4320 whole RTL chain and attached detailed debug info in memory. */
4321 for (insn = get_insns (); insn; insn = next)
4323 next = NEXT_INSN (insn);
4324 NEXT_INSN (insn) = NULL;
4325 PREV_INSN (insn) = NULL;
4328 /* In case the function was not output,
4329 don't leave any temporary anonymous types
4330 queued up for sdb output. */
4331 #ifdef SDB_DEBUGGING_INFO
4332 if (write_symbols == SDB_DEBUG)
4333 sdbout_types (NULL_TREE);
4334 #endif
4336 flag_rerun_cse_after_global_opts = 0;
4337 reload_completed = 0;
4338 epilogue_completed = 0;
4339 #ifdef STACK_REGS
4340 regstack_completed = 0;
4341 #endif
4343 /* Clear out the insn_length contents now that they are no
4344 longer valid. */
4345 init_insn_lengths ();
4347 /* Show no temporary slots allocated. */
4348 init_temp_slots ();
4350 free_bb_for_insn ();
4352 if (targetm.binds_local_p (current_function_decl))
4354 unsigned int pref = crtl->preferred_stack_boundary;
4355 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4356 pref = crtl->stack_alignment_needed;
4357 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4358 = pref;
4361 /* Make sure volatile mem refs aren't considered valid operands for
4362 arithmetic insns. We must call this here if this is a nested inline
4363 function, since the above code leaves us in the init_recog state,
4364 and the function context push/pop code does not save/restore volatile_ok.
4366 ??? Maybe it isn't necessary for expand_start_function to call this
4367 anymore if we do it here? */
4369 init_recog_no_volatile ();
4371 /* We're done with this function. Free up memory if we can. */
4372 free_after_parsing (cfun);
4373 free_after_compilation (cfun);
4374 return 0;
4377 struct rtl_opt_pass pass_clean_state =
4380 RTL_PASS,
4381 NULL, /* name */
4382 NULL, /* gate */
4383 rest_of_clean_state, /* execute */
4384 NULL, /* sub */
4385 NULL, /* next */
4386 0, /* static_pass_number */
4387 TV_FINAL, /* tv_id */
4388 0, /* properties_required */
4389 0, /* properties_provided */
4390 PROP_rtl, /* properties_destroyed */
4391 0, /* todo_flags_start */
4392 0 /* todo_flags_finish */