Fix cygwin performance loss on linpack.
[official-gcc.git] / gcc / final.c
blob2f57b1bc3f1bdc84e701971b153169a6ceca3778
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "target.h"
50 #include "rtl.h"
51 #include "tree.h"
52 #include "cfghooks.h"
53 #include "df.h"
54 #include "tm_p.h"
55 #include "insn-config.h"
56 #include "regs.h"
57 #include "emit-rtl.h"
58 #include "recog.h"
59 #include "cgraph.h"
60 #include "tree-pretty-print.h" /* for dump_function_header */
61 #include "varasm.h"
62 #include "insn-attr.h"
63 #include "conditions.h"
64 #include "flags.h"
65 #include "output.h"
66 #include "except.h"
67 #include "rtl-error.h"
68 #include "toplev.h" /* exact_log2, floor_log2 */
69 #include "reload.h"
70 #include "intl.h"
71 #include "cfgrtl.h"
72 #include "debug.h"
73 #include "tree-pass.h"
74 #include "tree-ssa.h"
75 #include "cfgloop.h"
76 #include "params.h"
77 #include "asan.h"
78 #include "rtl-iter.h"
79 #include "print-rtl.h"
81 #ifdef XCOFF_DEBUGGING_INFO
82 #include "xcoffout.h" /* Needed for external data declarations. */
83 #endif
85 #include "dwarf2out.h"
87 #ifdef DBX_DEBUGGING_INFO
88 #include "dbxout.h"
89 #endif
91 #include "sdbout.h"
93 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
94 So define a null default for it to save conditionalization later. */
95 #ifndef CC_STATUS_INIT
96 #define CC_STATUS_INIT
97 #endif
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
102 #endif
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
108 /* Bitflags used by final_scan_insn. */
109 #define SEEN_NOTE 1
110 #define SEEN_EMITTED 2
112 /* Last insn processed by final_scan_insn. */
113 static rtx_insn *debug_insn;
114 rtx_insn *current_output_insn;
116 /* Line number of last NOTE. */
117 static int last_linenum;
119 /* Last discriminator written to assembly. */
120 static int last_discriminator;
122 /* Discriminator of current block. */
123 static int discriminator;
125 /* Highest line number in current block. */
126 static int high_block_linenum;
128 /* Likewise for function. */
129 static int high_function_linenum;
131 /* Filename of last NOTE. */
132 static const char *last_filename;
134 /* Override filename and line number. */
135 static const char *override_filename;
136 static int override_linenum;
138 /* Whether to force emission of a line note before the next insn. */
139 static bool force_source_line = false;
141 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
143 /* Nonzero while outputting an `asm' with operands.
144 This means that inconsistencies are the user's fault, so don't die.
145 The precise value is the insn being output, to pass to error_for_asm. */
146 const rtx_insn *this_is_asm_operands;
148 /* Number of operands of this insn, for an `asm' with operands. */
149 static unsigned int insn_noperands;
151 /* Compare optimization flag. */
153 static rtx last_ignored_compare = 0;
155 /* Assign a unique number to each insn that is output.
156 This can be used to generate unique local labels. */
158 static int insn_counter = 0;
160 /* This variable contains machine-dependent flags (defined in tm.h)
161 set and examined by output routines
162 that describe how to interpret the condition codes properly. */
164 CC_STATUS cc_status;
166 /* During output of an insn, this contains a copy of cc_status
167 from before the insn. */
169 CC_STATUS cc_prev_status;
171 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
173 static int block_depth;
175 /* Nonzero if have enabled APP processing of our assembler output. */
177 static int app_on;
179 /* If we are outputting an insn sequence, this contains the sequence rtx.
180 Zero otherwise. */
182 rtx_sequence *final_sequence;
184 #ifdef ASSEMBLER_DIALECT
186 /* Number of the assembler dialect to use, starting at 0. */
187 static int dialect_number;
188 #endif
190 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
191 rtx current_insn_predicate;
193 /* True if printing into -fdump-final-insns= dump. */
194 bool final_insns_dump_p;
196 /* True if profile_function should be called, but hasn't been called yet. */
197 static bool need_profile_function;
199 static int asm_insn_count (rtx);
200 static void profile_function (FILE *);
201 static void profile_after_prologue (FILE *);
202 static bool notice_source_line (rtx_insn *, bool *);
203 static rtx walk_alter_subreg (rtx *, bool *);
204 static void output_asm_name (void);
205 static void output_alternate_entry_point (FILE *, rtx_insn *);
206 static tree get_mem_expr_from_op (rtx, int *);
207 static void output_asm_operand_names (rtx *, int *, int);
208 #ifdef LEAF_REGISTERS
209 static void leaf_renumber_regs (rtx_insn *);
210 #endif
211 #if HAVE_cc0
212 static int alter_cond (rtx);
213 #endif
214 #ifndef ADDR_VEC_ALIGN
215 static int final_addr_vec_align (rtx);
216 #endif
217 static int align_fuzz (rtx, rtx, int, unsigned);
218 static void collect_fn_hard_reg_usage (void);
219 static tree get_call_fndecl (rtx_insn *);
221 /* Initialize data in final at the beginning of a compilation. */
223 void
224 init_final (const char *filename ATTRIBUTE_UNUSED)
226 app_on = 0;
227 final_sequence = 0;
229 #ifdef ASSEMBLER_DIALECT
230 dialect_number = ASSEMBLER_DIALECT;
231 #endif
234 /* Default target function prologue and epilogue assembler output.
236 If not overridden for epilogue code, then the function body itself
237 contains return instructions wherever needed. */
238 void
239 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
240 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
244 void
245 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
246 tree decl ATTRIBUTE_UNUSED,
247 bool new_is_cold ATTRIBUTE_UNUSED)
251 /* Default target hook that outputs nothing to a stream. */
252 void
253 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
257 /* Enable APP processing of subsequent output.
258 Used before the output from an `asm' statement. */
260 void
261 app_enable (void)
263 if (! app_on)
265 fputs (ASM_APP_ON, asm_out_file);
266 app_on = 1;
270 /* Disable APP processing of subsequent output.
271 Called from varasm.c before most kinds of output. */
273 void
274 app_disable (void)
276 if (app_on)
278 fputs (ASM_APP_OFF, asm_out_file);
279 app_on = 0;
283 /* Return the number of slots filled in the current
284 delayed branch sequence (we don't count the insn needing the
285 delay slot). Zero if not in a delayed branch sequence. */
288 dbr_sequence_length (void)
290 if (final_sequence != 0)
291 return XVECLEN (final_sequence, 0) - 1;
292 else
293 return 0;
296 /* The next two pages contain routines used to compute the length of an insn
297 and to shorten branches. */
299 /* Arrays for insn lengths, and addresses. The latter is referenced by
300 `insn_current_length'. */
302 static int *insn_lengths;
304 vec<int> insn_addresses_;
306 /* Max uid for which the above arrays are valid. */
307 static int insn_lengths_max_uid;
309 /* Address of insn being processed. Used by `insn_current_length'. */
310 int insn_current_address;
312 /* Address of insn being processed in previous iteration. */
313 int insn_last_address;
315 /* known invariant alignment of insn being processed. */
316 int insn_current_align;
318 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
319 gives the next following alignment insn that increases the known
320 alignment, or NULL_RTX if there is no such insn.
321 For any alignment obtained this way, we can again index uid_align with
322 its uid to obtain the next following align that in turn increases the
323 alignment, till we reach NULL_RTX; the sequence obtained this way
324 for each insn we'll call the alignment chain of this insn in the following
325 comments. */
327 struct label_alignment
329 short alignment;
330 short max_skip;
333 static rtx *uid_align;
334 static int *uid_shuid;
335 static struct label_alignment *label_align;
337 /* Indicate that branch shortening hasn't yet been done. */
339 void
340 init_insn_lengths (void)
342 if (uid_shuid)
344 free (uid_shuid);
345 uid_shuid = 0;
347 if (insn_lengths)
349 free (insn_lengths);
350 insn_lengths = 0;
351 insn_lengths_max_uid = 0;
353 if (HAVE_ATTR_length)
354 INSN_ADDRESSES_FREE ();
355 if (uid_align)
357 free (uid_align);
358 uid_align = 0;
362 /* Obtain the current length of an insn. If branch shortening has been done,
363 get its actual length. Otherwise, use FALLBACK_FN to calculate the
364 length. */
365 static int
366 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
368 rtx body;
369 int i;
370 int length = 0;
372 if (!HAVE_ATTR_length)
373 return 0;
375 if (insn_lengths_max_uid > INSN_UID (insn))
376 return insn_lengths[INSN_UID (insn)];
377 else
378 switch (GET_CODE (insn))
380 case NOTE:
381 case BARRIER:
382 case CODE_LABEL:
383 case DEBUG_INSN:
384 return 0;
386 case CALL_INSN:
387 case JUMP_INSN:
388 length = fallback_fn (insn);
389 break;
391 case INSN:
392 body = PATTERN (insn);
393 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
394 return 0;
396 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
397 length = asm_insn_count (body) * fallback_fn (insn);
398 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
399 for (i = 0; i < seq->len (); i++)
400 length += get_attr_length_1 (seq->insn (i), fallback_fn);
401 else
402 length = fallback_fn (insn);
403 break;
405 default:
406 break;
409 #ifdef ADJUST_INSN_LENGTH
410 ADJUST_INSN_LENGTH (insn, length);
411 #endif
412 return length;
415 /* Obtain the current length of an insn. If branch shortening has been done,
416 get its actual length. Otherwise, get its maximum length. */
418 get_attr_length (rtx_insn *insn)
420 return get_attr_length_1 (insn, insn_default_length);
423 /* Obtain the current length of an insn. If branch shortening has been done,
424 get its actual length. Otherwise, get its minimum length. */
426 get_attr_min_length (rtx_insn *insn)
428 return get_attr_length_1 (insn, insn_min_length);
431 /* Code to handle alignment inside shorten_branches. */
433 /* Here is an explanation how the algorithm in align_fuzz can give
434 proper results:
436 Call a sequence of instructions beginning with alignment point X
437 and continuing until the next alignment point `block X'. When `X'
438 is used in an expression, it means the alignment value of the
439 alignment point.
441 Call the distance between the start of the first insn of block X, and
442 the end of the last insn of block X `IX', for the `inner size of X'.
443 This is clearly the sum of the instruction lengths.
445 Likewise with the next alignment-delimited block following X, which we
446 shall call block Y.
448 Call the distance between the start of the first insn of block X, and
449 the start of the first insn of block Y `OX', for the `outer size of X'.
451 The estimated padding is then OX - IX.
453 OX can be safely estimated as
455 if (X >= Y)
456 OX = round_up(IX, Y)
457 else
458 OX = round_up(IX, X) + Y - X
460 Clearly est(IX) >= real(IX), because that only depends on the
461 instruction lengths, and those being overestimated is a given.
463 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
464 we needn't worry about that when thinking about OX.
466 When X >= Y, the alignment provided by Y adds no uncertainty factor
467 for branch ranges starting before X, so we can just round what we have.
468 But when X < Y, we don't know anything about the, so to speak,
469 `middle bits', so we have to assume the worst when aligning up from an
470 address mod X to one mod Y, which is Y - X. */
472 #ifndef LABEL_ALIGN
473 #define LABEL_ALIGN(LABEL) align_labels_log
474 #endif
476 #ifndef LOOP_ALIGN
477 #define LOOP_ALIGN(LABEL) align_loops_log
478 #endif
480 #ifndef LABEL_ALIGN_AFTER_BARRIER
481 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
482 #endif
484 #ifndef JUMP_ALIGN
485 #define JUMP_ALIGN(LABEL) align_jumps_log
486 #endif
489 default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
491 return 0;
495 default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
497 return align_loops_max_skip;
501 default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
503 return align_labels_max_skip;
507 default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
509 return align_jumps_max_skip;
512 #ifndef ADDR_VEC_ALIGN
513 static int
514 final_addr_vec_align (rtx addr_vec)
516 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
518 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
519 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
520 return exact_log2 (align);
524 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
525 #endif
527 #ifndef INSN_LENGTH_ALIGNMENT
528 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
529 #endif
531 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
533 static int min_labelno, max_labelno;
535 #define LABEL_TO_ALIGNMENT(LABEL) \
536 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
538 #define LABEL_TO_MAX_SKIP(LABEL) \
539 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
541 /* For the benefit of port specific code do this also as a function. */
544 label_to_alignment (rtx label)
546 if (CODE_LABEL_NUMBER (label) <= max_labelno)
547 return LABEL_TO_ALIGNMENT (label);
548 return 0;
552 label_to_max_skip (rtx label)
554 if (CODE_LABEL_NUMBER (label) <= max_labelno)
555 return LABEL_TO_MAX_SKIP (label);
556 return 0;
559 /* The differences in addresses
560 between a branch and its target might grow or shrink depending on
561 the alignment the start insn of the range (the branch for a forward
562 branch or the label for a backward branch) starts out on; if these
563 differences are used naively, they can even oscillate infinitely.
564 We therefore want to compute a 'worst case' address difference that
565 is independent of the alignment the start insn of the range end
566 up on, and that is at least as large as the actual difference.
567 The function align_fuzz calculates the amount we have to add to the
568 naively computed difference, by traversing the part of the alignment
569 chain of the start insn of the range that is in front of the end insn
570 of the range, and considering for each alignment the maximum amount
571 that it might contribute to a size increase.
573 For casesi tables, we also want to know worst case minimum amounts of
574 address difference, in case a machine description wants to introduce
575 some common offset that is added to all offsets in a table.
576 For this purpose, align_fuzz with a growth argument of 0 computes the
577 appropriate adjustment. */
579 /* Compute the maximum delta by which the difference of the addresses of
580 START and END might grow / shrink due to a different address for start
581 which changes the size of alignment insns between START and END.
582 KNOWN_ALIGN_LOG is the alignment known for START.
583 GROWTH should be ~0 if the objective is to compute potential code size
584 increase, and 0 if the objective is to compute potential shrink.
585 The return value is undefined for any other value of GROWTH. */
587 static int
588 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
590 int uid = INSN_UID (start);
591 rtx align_label;
592 int known_align = 1 << known_align_log;
593 int end_shuid = INSN_SHUID (end);
594 int fuzz = 0;
596 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
598 int align_addr, new_align;
600 uid = INSN_UID (align_label);
601 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
602 if (uid_shuid[uid] > end_shuid)
603 break;
604 known_align_log = LABEL_TO_ALIGNMENT (align_label);
605 new_align = 1 << known_align_log;
606 if (new_align < known_align)
607 continue;
608 fuzz += (-align_addr ^ growth) & (new_align - known_align);
609 known_align = new_align;
611 return fuzz;
614 /* Compute a worst-case reference address of a branch so that it
615 can be safely used in the presence of aligned labels. Since the
616 size of the branch itself is unknown, the size of the branch is
617 not included in the range. I.e. for a forward branch, the reference
618 address is the end address of the branch as known from the previous
619 branch shortening pass, minus a value to account for possible size
620 increase due to alignment. For a backward branch, it is the start
621 address of the branch as known from the current pass, plus a value
622 to account for possible size increase due to alignment.
623 NB.: Therefore, the maximum offset allowed for backward branches needs
624 to exclude the branch size. */
627 insn_current_reference_address (rtx_insn *branch)
629 rtx dest;
630 int seq_uid;
632 if (! INSN_ADDRESSES_SET_P ())
633 return 0;
635 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
636 seq_uid = INSN_UID (seq);
637 if (!JUMP_P (branch))
638 /* This can happen for example on the PA; the objective is to know the
639 offset to address something in front of the start of the function.
640 Thus, we can treat it like a backward branch.
641 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
642 any alignment we'd encounter, so we skip the call to align_fuzz. */
643 return insn_current_address;
644 dest = JUMP_LABEL (branch);
646 /* BRANCH has no proper alignment chain set, so use SEQ.
647 BRANCH also has no INSN_SHUID. */
648 if (INSN_SHUID (seq) < INSN_SHUID (dest))
650 /* Forward branch. */
651 return (insn_last_address + insn_lengths[seq_uid]
652 - align_fuzz (seq, dest, length_unit_log, ~0));
654 else
656 /* Backward branch. */
657 return (insn_current_address
658 + align_fuzz (dest, seq, length_unit_log, ~0));
662 /* Compute branch alignments based on frequency information in the
663 CFG. */
665 unsigned int
666 compute_alignments (void)
668 int log, max_skip, max_log;
669 basic_block bb;
670 int freq_max = 0;
671 int freq_threshold = 0;
673 if (label_align)
675 free (label_align);
676 label_align = 0;
679 max_labelno = max_label_num ();
680 min_labelno = get_first_label_num ();
681 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
683 /* If not optimizing or optimizing for size, don't assign any alignments. */
684 if (! optimize || optimize_function_for_size_p (cfun))
685 return 0;
687 if (dump_file)
689 dump_reg_info (dump_file);
690 dump_flow_info (dump_file, TDF_DETAILS);
691 flow_loops_dump (dump_file, NULL, 1);
693 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
694 FOR_EACH_BB_FN (bb, cfun)
695 if (bb->frequency > freq_max)
696 freq_max = bb->frequency;
697 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
699 if (dump_file)
700 fprintf (dump_file, "freq_max: %i\n",freq_max);
701 FOR_EACH_BB_FN (bb, cfun)
703 rtx_insn *label = BB_HEAD (bb);
704 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
705 edge e;
706 edge_iterator ei;
708 if (!LABEL_P (label)
709 || optimize_bb_for_size_p (bb))
711 if (dump_file)
712 fprintf (dump_file,
713 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
714 bb->index, bb->frequency, bb->loop_father->num,
715 bb_loop_depth (bb));
716 continue;
718 max_log = LABEL_ALIGN (label);
719 max_skip = targetm.asm_out.label_align_max_skip (label);
721 FOR_EACH_EDGE (e, ei, bb->preds)
723 if (e->flags & EDGE_FALLTHRU)
724 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
725 else
726 branch_frequency += EDGE_FREQUENCY (e);
728 if (dump_file)
730 fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
731 " %2i fall %4i branch %4i",
732 bb->index, bb->frequency, bb->loop_father->num,
733 bb_loop_depth (bb),
734 fallthru_frequency, branch_frequency);
735 if (!bb->loop_father->inner && bb->loop_father->num)
736 fprintf (dump_file, " inner_loop");
737 if (bb->loop_father->header == bb)
738 fprintf (dump_file, " loop_header");
739 fprintf (dump_file, "\n");
742 /* There are two purposes to align block with no fallthru incoming edge:
743 1) to avoid fetch stalls when branch destination is near cache boundary
744 2) to improve cache efficiency in case the previous block is not executed
745 (so it does not need to be in the cache).
747 We to catch first case, we align frequently executed blocks.
748 To catch the second, we align blocks that are executed more frequently
749 than the predecessor and the predecessor is likely to not be executed
750 when function is called. */
752 if (!has_fallthru
753 && (branch_frequency > freq_threshold
754 || (bb->frequency > bb->prev_bb->frequency * 10
755 && (bb->prev_bb->frequency
756 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
758 log = JUMP_ALIGN (label);
759 if (dump_file)
760 fprintf (dump_file, " jump alignment added.\n");
761 if (max_log < log)
763 max_log = log;
764 max_skip = targetm.asm_out.jump_align_max_skip (label);
767 /* In case block is frequent and reached mostly by non-fallthru edge,
768 align it. It is most likely a first block of loop. */
769 if (has_fallthru
770 && !(single_succ_p (bb)
771 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
772 && optimize_bb_for_speed_p (bb)
773 && branch_frequency + fallthru_frequency > freq_threshold
774 && (branch_frequency
775 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
777 log = LOOP_ALIGN (label);
778 if (dump_file)
779 fprintf (dump_file, " internal loop alignment added.\n");
780 if (max_log < log)
782 max_log = log;
783 max_skip = targetm.asm_out.loop_align_max_skip (label);
786 LABEL_TO_ALIGNMENT (label) = max_log;
787 LABEL_TO_MAX_SKIP (label) = max_skip;
790 loop_optimizer_finalize ();
791 free_dominance_info (CDI_DOMINATORS);
792 return 0;
795 /* Grow the LABEL_ALIGN array after new labels are created. */
797 static void
798 grow_label_align (void)
800 int old = max_labelno;
801 int n_labels;
802 int n_old_labels;
804 max_labelno = max_label_num ();
806 n_labels = max_labelno - min_labelno + 1;
807 n_old_labels = old - min_labelno + 1;
809 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
811 /* Range of labels grows monotonically in the function. Failing here
812 means that the initialization of array got lost. */
813 gcc_assert (n_old_labels <= n_labels);
815 memset (label_align + n_old_labels, 0,
816 (n_labels - n_old_labels) * sizeof (struct label_alignment));
819 /* Update the already computed alignment information. LABEL_PAIRS is a vector
820 made up of pairs of labels for which the alignment information of the first
821 element will be copied from that of the second element. */
823 void
824 update_alignments (vec<rtx> &label_pairs)
826 unsigned int i = 0;
827 rtx iter, label = NULL_RTX;
829 if (max_labelno != max_label_num ())
830 grow_label_align ();
832 FOR_EACH_VEC_ELT (label_pairs, i, iter)
833 if (i & 1)
835 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
836 LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
838 else
839 label = iter;
842 namespace {
844 const pass_data pass_data_compute_alignments =
846 RTL_PASS, /* type */
847 "alignments", /* name */
848 OPTGROUP_NONE, /* optinfo_flags */
849 TV_NONE, /* tv_id */
850 0, /* properties_required */
851 0, /* properties_provided */
852 0, /* properties_destroyed */
853 0, /* todo_flags_start */
854 0, /* todo_flags_finish */
857 class pass_compute_alignments : public rtl_opt_pass
859 public:
860 pass_compute_alignments (gcc::context *ctxt)
861 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
864 /* opt_pass methods: */
865 virtual unsigned int execute (function *) { return compute_alignments (); }
867 }; // class pass_compute_alignments
869 } // anon namespace
871 rtl_opt_pass *
872 make_pass_compute_alignments (gcc::context *ctxt)
874 return new pass_compute_alignments (ctxt);
878 /* Make a pass over all insns and compute their actual lengths by shortening
879 any branches of variable length if possible. */
881 /* shorten_branches might be called multiple times: for example, the SH
882 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
883 In order to do this, it needs proper length information, which it obtains
884 by calling shorten_branches. This cannot be collapsed with
885 shorten_branches itself into a single pass unless we also want to integrate
886 reorg.c, since the branch splitting exposes new instructions with delay
887 slots. */
889 void
890 shorten_branches (rtx_insn *first)
892 rtx_insn *insn;
893 int max_uid;
894 int i;
895 int max_log;
896 int max_skip;
897 #define MAX_CODE_ALIGN 16
898 rtx_insn *seq;
899 int something_changed = 1;
900 char *varying_length;
901 rtx body;
902 int uid;
903 rtx align_tab[MAX_CODE_ALIGN];
905 /* Compute maximum UID and allocate label_align / uid_shuid. */
906 max_uid = get_max_uid ();
908 /* Free uid_shuid before reallocating it. */
909 free (uid_shuid);
911 uid_shuid = XNEWVEC (int, max_uid);
913 if (max_labelno != max_label_num ())
914 grow_label_align ();
916 /* Initialize label_align and set up uid_shuid to be strictly
917 monotonically rising with insn order. */
918 /* We use max_log here to keep track of the maximum alignment we want to
919 impose on the next CODE_LABEL (or the current one if we are processing
920 the CODE_LABEL itself). */
922 max_log = 0;
923 max_skip = 0;
925 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
927 int log;
929 INSN_SHUID (insn) = i++;
930 if (INSN_P (insn))
931 continue;
933 if (LABEL_P (insn))
935 rtx_insn *next;
936 bool next_is_jumptable;
938 /* Merge in alignments computed by compute_alignments. */
939 log = LABEL_TO_ALIGNMENT (insn);
940 if (max_log < log)
942 max_log = log;
943 max_skip = LABEL_TO_MAX_SKIP (insn);
946 next = next_nonnote_insn (insn);
947 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
948 if (!next_is_jumptable)
950 log = LABEL_ALIGN (insn);
951 if (max_log < log)
953 max_log = log;
954 max_skip = targetm.asm_out.label_align_max_skip (insn);
957 /* ADDR_VECs only take room if read-only data goes into the text
958 section. */
959 if ((JUMP_TABLES_IN_TEXT_SECTION
960 || readonly_data_section == text_section)
961 && next_is_jumptable)
963 log = ADDR_VEC_ALIGN (next);
964 if (max_log < log)
966 max_log = log;
967 max_skip = targetm.asm_out.label_align_max_skip (insn);
970 LABEL_TO_ALIGNMENT (insn) = max_log;
971 LABEL_TO_MAX_SKIP (insn) = max_skip;
972 max_log = 0;
973 max_skip = 0;
975 else if (BARRIER_P (insn))
977 rtx_insn *label;
979 for (label = insn; label && ! INSN_P (label);
980 label = NEXT_INSN (label))
981 if (LABEL_P (label))
983 log = LABEL_ALIGN_AFTER_BARRIER (insn);
984 if (max_log < log)
986 max_log = log;
987 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
989 break;
993 if (!HAVE_ATTR_length)
994 return;
996 /* Allocate the rest of the arrays. */
997 insn_lengths = XNEWVEC (int, max_uid);
998 insn_lengths_max_uid = max_uid;
999 /* Syntax errors can lead to labels being outside of the main insn stream.
1000 Initialize insn_addresses, so that we get reproducible results. */
1001 INSN_ADDRESSES_ALLOC (max_uid);
1003 varying_length = XCNEWVEC (char, max_uid);
1005 /* Initialize uid_align. We scan instructions
1006 from end to start, and keep in align_tab[n] the last seen insn
1007 that does an alignment of at least n+1, i.e. the successor
1008 in the alignment chain for an insn that does / has a known
1009 alignment of n. */
1010 uid_align = XCNEWVEC (rtx, max_uid);
1012 for (i = MAX_CODE_ALIGN; --i >= 0;)
1013 align_tab[i] = NULL_RTX;
1014 seq = get_last_insn ();
1015 for (; seq; seq = PREV_INSN (seq))
1017 int uid = INSN_UID (seq);
1018 int log;
1019 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1020 uid_align[uid] = align_tab[0];
1021 if (log)
1023 /* Found an alignment label. */
1024 uid_align[uid] = align_tab[log];
1025 for (i = log - 1; i >= 0; i--)
1026 align_tab[i] = seq;
1030 /* When optimizing, we start assuming minimum length, and keep increasing
1031 lengths as we find the need for this, till nothing changes.
1032 When not optimizing, we start assuming maximum lengths, and
1033 do a single pass to update the lengths. */
1034 bool increasing = optimize != 0;
1036 #ifdef CASE_VECTOR_SHORTEN_MODE
1037 if (optimize)
1039 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1040 label fields. */
1042 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1043 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1044 int rel;
1046 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1048 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1049 int len, i, min, max, insn_shuid;
1050 int min_align;
1051 addr_diff_vec_flags flags;
1053 if (! JUMP_TABLE_DATA_P (insn)
1054 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1055 continue;
1056 pat = PATTERN (insn);
1057 len = XVECLEN (pat, 1);
1058 gcc_assert (len > 0);
1059 min_align = MAX_CODE_ALIGN;
1060 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1062 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1063 int shuid = INSN_SHUID (lab);
1064 if (shuid < min)
1066 min = shuid;
1067 min_lab = lab;
1069 if (shuid > max)
1071 max = shuid;
1072 max_lab = lab;
1074 if (min_align > LABEL_TO_ALIGNMENT (lab))
1075 min_align = LABEL_TO_ALIGNMENT (lab);
1077 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1078 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1079 insn_shuid = INSN_SHUID (insn);
1080 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1081 memset (&flags, 0, sizeof (flags));
1082 flags.min_align = min_align;
1083 flags.base_after_vec = rel > insn_shuid;
1084 flags.min_after_vec = min > insn_shuid;
1085 flags.max_after_vec = max > insn_shuid;
1086 flags.min_after_base = min > rel;
1087 flags.max_after_base = max > rel;
1088 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1090 if (increasing)
1091 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1094 #endif /* CASE_VECTOR_SHORTEN_MODE */
1096 /* Compute initial lengths, addresses, and varying flags for each insn. */
1097 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1099 for (insn_current_address = 0, insn = first;
1100 insn != 0;
1101 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1103 uid = INSN_UID (insn);
1105 insn_lengths[uid] = 0;
1107 if (LABEL_P (insn))
1109 int log = LABEL_TO_ALIGNMENT (insn);
1110 if (log)
1112 int align = 1 << log;
1113 int new_address = (insn_current_address + align - 1) & -align;
1114 insn_lengths[uid] = new_address - insn_current_address;
1118 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1120 if (NOTE_P (insn) || BARRIER_P (insn)
1121 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1122 continue;
1123 if (insn->deleted ())
1124 continue;
1126 body = PATTERN (insn);
1127 if (JUMP_TABLE_DATA_P (insn))
1129 /* This only takes room if read-only data goes into the text
1130 section. */
1131 if (JUMP_TABLES_IN_TEXT_SECTION
1132 || readonly_data_section == text_section)
1133 insn_lengths[uid] = (XVECLEN (body,
1134 GET_CODE (body) == ADDR_DIFF_VEC)
1135 * GET_MODE_SIZE (GET_MODE (body)));
1136 /* Alignment is handled by ADDR_VEC_ALIGN. */
1138 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1139 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1140 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1142 int i;
1143 int const_delay_slots;
1144 if (DELAY_SLOTS)
1145 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1146 else
1147 const_delay_slots = 0;
1149 int (*inner_length_fun) (rtx_insn *)
1150 = const_delay_slots ? length_fun : insn_default_length;
1151 /* Inside a delay slot sequence, we do not do any branch shortening
1152 if the shortening could change the number of delay slots
1153 of the branch. */
1154 for (i = 0; i < body_seq->len (); i++)
1156 rtx_insn *inner_insn = body_seq->insn (i);
1157 int inner_uid = INSN_UID (inner_insn);
1158 int inner_length;
1160 if (GET_CODE (body) == ASM_INPUT
1161 || asm_noperands (PATTERN (inner_insn)) >= 0)
1162 inner_length = (asm_insn_count (PATTERN (inner_insn))
1163 * insn_default_length (inner_insn));
1164 else
1165 inner_length = inner_length_fun (inner_insn);
1167 insn_lengths[inner_uid] = inner_length;
1168 if (const_delay_slots)
1170 if ((varying_length[inner_uid]
1171 = insn_variable_length_p (inner_insn)) != 0)
1172 varying_length[uid] = 1;
1173 INSN_ADDRESSES (inner_uid) = (insn_current_address
1174 + insn_lengths[uid]);
1176 else
1177 varying_length[inner_uid] = 0;
1178 insn_lengths[uid] += inner_length;
1181 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1183 insn_lengths[uid] = length_fun (insn);
1184 varying_length[uid] = insn_variable_length_p (insn);
1187 /* If needed, do any adjustment. */
1188 #ifdef ADJUST_INSN_LENGTH
1189 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1190 if (insn_lengths[uid] < 0)
1191 fatal_insn ("negative insn length", insn);
1192 #endif
1195 /* Now loop over all the insns finding varying length insns. For each,
1196 get the current insn length. If it has changed, reflect the change.
1197 When nothing changes for a full pass, we are done. */
1199 while (something_changed)
1201 something_changed = 0;
1202 insn_current_align = MAX_CODE_ALIGN - 1;
1203 for (insn_current_address = 0, insn = first;
1204 insn != 0;
1205 insn = NEXT_INSN (insn))
1207 int new_length;
1208 #ifdef ADJUST_INSN_LENGTH
1209 int tmp_length;
1210 #endif
1211 int length_align;
1213 uid = INSN_UID (insn);
1215 if (LABEL_P (insn))
1217 int log = LABEL_TO_ALIGNMENT (insn);
1219 #ifdef CASE_VECTOR_SHORTEN_MODE
1220 /* If the mode of a following jump table was changed, we
1221 may need to update the alignment of this label. */
1222 rtx_insn *next;
1223 bool next_is_jumptable;
1225 next = next_nonnote_insn (insn);
1226 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1227 if ((JUMP_TABLES_IN_TEXT_SECTION
1228 || readonly_data_section == text_section)
1229 && next_is_jumptable)
1231 int newlog = ADDR_VEC_ALIGN (next);
1232 if (newlog != log)
1234 log = newlog;
1235 LABEL_TO_ALIGNMENT (insn) = log;
1236 something_changed = 1;
1239 #endif
1241 if (log > insn_current_align)
1243 int align = 1 << log;
1244 int new_address= (insn_current_address + align - 1) & -align;
1245 insn_lengths[uid] = new_address - insn_current_address;
1246 insn_current_align = log;
1247 insn_current_address = new_address;
1249 else
1250 insn_lengths[uid] = 0;
1251 INSN_ADDRESSES (uid) = insn_current_address;
1252 continue;
1255 length_align = INSN_LENGTH_ALIGNMENT (insn);
1256 if (length_align < insn_current_align)
1257 insn_current_align = length_align;
1259 insn_last_address = INSN_ADDRESSES (uid);
1260 INSN_ADDRESSES (uid) = insn_current_address;
1262 #ifdef CASE_VECTOR_SHORTEN_MODE
1263 if (optimize
1264 && JUMP_TABLE_DATA_P (insn)
1265 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1267 rtx body = PATTERN (insn);
1268 int old_length = insn_lengths[uid];
1269 rtx_insn *rel_lab =
1270 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1271 rtx min_lab = XEXP (XEXP (body, 2), 0);
1272 rtx max_lab = XEXP (XEXP (body, 3), 0);
1273 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1274 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1275 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1276 rtx_insn *prev;
1277 int rel_align = 0;
1278 addr_diff_vec_flags flags;
1279 machine_mode vec_mode;
1281 /* Avoid automatic aggregate initialization. */
1282 flags = ADDR_DIFF_VEC_FLAGS (body);
1284 /* Try to find a known alignment for rel_lab. */
1285 for (prev = rel_lab;
1286 prev
1287 && ! insn_lengths[INSN_UID (prev)]
1288 && ! (varying_length[INSN_UID (prev)] & 1);
1289 prev = PREV_INSN (prev))
1290 if (varying_length[INSN_UID (prev)] & 2)
1292 rel_align = LABEL_TO_ALIGNMENT (prev);
1293 break;
1296 /* See the comment on addr_diff_vec_flags in rtl.h for the
1297 meaning of the flags values. base: REL_LAB vec: INSN */
1298 /* Anything after INSN has still addresses from the last
1299 pass; adjust these so that they reflect our current
1300 estimate for this pass. */
1301 if (flags.base_after_vec)
1302 rel_addr += insn_current_address - insn_last_address;
1303 if (flags.min_after_vec)
1304 min_addr += insn_current_address - insn_last_address;
1305 if (flags.max_after_vec)
1306 max_addr += insn_current_address - insn_last_address;
1307 /* We want to know the worst case, i.e. lowest possible value
1308 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1309 its offset is positive, and we have to be wary of code shrink;
1310 otherwise, it is negative, and we have to be vary of code
1311 size increase. */
1312 if (flags.min_after_base)
1314 /* If INSN is between REL_LAB and MIN_LAB, the size
1315 changes we are about to make can change the alignment
1316 within the observed offset, therefore we have to break
1317 it up into two parts that are independent. */
1318 if (! flags.base_after_vec && flags.min_after_vec)
1320 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1321 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1323 else
1324 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1326 else
1328 if (flags.base_after_vec && ! flags.min_after_vec)
1330 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1331 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1333 else
1334 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1336 /* Likewise, determine the highest lowest possible value
1337 for the offset of MAX_LAB. */
1338 if (flags.max_after_base)
1340 if (! flags.base_after_vec && flags.max_after_vec)
1342 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1343 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1345 else
1346 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1348 else
1350 if (flags.base_after_vec && ! flags.max_after_vec)
1352 max_addr += align_fuzz (max_lab, insn, 0, 0);
1353 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1355 else
1356 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1358 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1359 max_addr - rel_addr, body);
1360 if (!increasing
1361 || (GET_MODE_SIZE (vec_mode)
1362 >= GET_MODE_SIZE (GET_MODE (body))))
1363 PUT_MODE (body, vec_mode);
1364 if (JUMP_TABLES_IN_TEXT_SECTION
1365 || readonly_data_section == text_section)
1367 insn_lengths[uid]
1368 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1369 insn_current_address += insn_lengths[uid];
1370 if (insn_lengths[uid] != old_length)
1371 something_changed = 1;
1374 continue;
1376 #endif /* CASE_VECTOR_SHORTEN_MODE */
1378 if (! (varying_length[uid]))
1380 if (NONJUMP_INSN_P (insn)
1381 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1383 int i;
1385 body = PATTERN (insn);
1386 for (i = 0; i < XVECLEN (body, 0); i++)
1388 rtx inner_insn = XVECEXP (body, 0, i);
1389 int inner_uid = INSN_UID (inner_insn);
1391 INSN_ADDRESSES (inner_uid) = insn_current_address;
1393 insn_current_address += insn_lengths[inner_uid];
1396 else
1397 insn_current_address += insn_lengths[uid];
1399 continue;
1402 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1404 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1405 int i;
1407 body = PATTERN (insn);
1408 new_length = 0;
1409 for (i = 0; i < seqn->len (); i++)
1411 rtx_insn *inner_insn = seqn->insn (i);
1412 int inner_uid = INSN_UID (inner_insn);
1413 int inner_length;
1415 INSN_ADDRESSES (inner_uid) = insn_current_address;
1417 /* insn_current_length returns 0 for insns with a
1418 non-varying length. */
1419 if (! varying_length[inner_uid])
1420 inner_length = insn_lengths[inner_uid];
1421 else
1422 inner_length = insn_current_length (inner_insn);
1424 if (inner_length != insn_lengths[inner_uid])
1426 if (!increasing || inner_length > insn_lengths[inner_uid])
1428 insn_lengths[inner_uid] = inner_length;
1429 something_changed = 1;
1431 else
1432 inner_length = insn_lengths[inner_uid];
1434 insn_current_address += inner_length;
1435 new_length += inner_length;
1438 else
1440 new_length = insn_current_length (insn);
1441 insn_current_address += new_length;
1444 #ifdef ADJUST_INSN_LENGTH
1445 /* If needed, do any adjustment. */
1446 tmp_length = new_length;
1447 ADJUST_INSN_LENGTH (insn, new_length);
1448 insn_current_address += (new_length - tmp_length);
1449 #endif
1451 if (new_length != insn_lengths[uid]
1452 && (!increasing || new_length > insn_lengths[uid]))
1454 insn_lengths[uid] = new_length;
1455 something_changed = 1;
1457 else
1458 insn_current_address += insn_lengths[uid] - new_length;
1460 /* For a non-optimizing compile, do only a single pass. */
1461 if (!increasing)
1462 break;
1465 free (varying_length);
1468 /* Given the body of an INSN known to be generated by an ASM statement, return
1469 the number of machine instructions likely to be generated for this insn.
1470 This is used to compute its length. */
1472 static int
1473 asm_insn_count (rtx body)
1475 const char *templ;
1477 if (GET_CODE (body) == ASM_INPUT)
1478 templ = XSTR (body, 0);
1479 else
1480 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1482 return asm_str_count (templ);
1485 /* Return the number of machine instructions likely to be generated for the
1486 inline-asm template. */
1488 asm_str_count (const char *templ)
1490 int count = 1;
1492 if (!*templ)
1493 return 0;
1495 for (; *templ; templ++)
1496 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1497 || *templ == '\n')
1498 count++;
1500 return count;
1503 /* ??? This is probably the wrong place for these. */
1504 /* Structure recording the mapping from source file and directory
1505 names at compile time to those to be embedded in debug
1506 information. */
1507 struct debug_prefix_map
1509 const char *old_prefix;
1510 const char *new_prefix;
1511 size_t old_len;
1512 size_t new_len;
1513 struct debug_prefix_map *next;
1516 /* Linked list of such structures. */
1517 static debug_prefix_map *debug_prefix_maps;
1520 /* Record a debug file prefix mapping. ARG is the argument to
1521 -fdebug-prefix-map and must be of the form OLD=NEW. */
1523 void
1524 add_debug_prefix_map (const char *arg)
1526 debug_prefix_map *map;
1527 const char *p;
1529 p = strchr (arg, '=');
1530 if (!p)
1532 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1533 return;
1535 map = XNEW (debug_prefix_map);
1536 map->old_prefix = xstrndup (arg, p - arg);
1537 map->old_len = p - arg;
1538 p++;
1539 map->new_prefix = xstrdup (p);
1540 map->new_len = strlen (p);
1541 map->next = debug_prefix_maps;
1542 debug_prefix_maps = map;
1545 /* Perform user-specified mapping of debug filename prefixes. Return
1546 the new name corresponding to FILENAME. */
1548 const char *
1549 remap_debug_filename (const char *filename)
1551 debug_prefix_map *map;
1552 char *s;
1553 const char *name;
1554 size_t name_len;
1556 for (map = debug_prefix_maps; map; map = map->next)
1557 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1558 break;
1559 if (!map)
1560 return filename;
1561 name = filename + map->old_len;
1562 name_len = strlen (name) + 1;
1563 s = (char *) alloca (name_len + map->new_len);
1564 memcpy (s, map->new_prefix, map->new_len);
1565 memcpy (s + map->new_len, name, name_len);
1566 return ggc_strdup (s);
1569 /* Return true if DWARF2 debug info can be emitted for DECL. */
1571 static bool
1572 dwarf2_debug_info_emitted_p (tree decl)
1574 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1575 return false;
1577 if (DECL_IGNORED_P (decl))
1578 return false;
1580 return true;
1583 /* Return scope resulting from combination of S1 and S2. */
1584 static tree
1585 choose_inner_scope (tree s1, tree s2)
1587 if (!s1)
1588 return s2;
1589 if (!s2)
1590 return s1;
1591 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1592 return s1;
1593 return s2;
1596 /* Emit lexical block notes needed to change scope from S1 to S2. */
1598 static void
1599 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1601 rtx_insn *insn = orig_insn;
1602 tree com = NULL_TREE;
1603 tree ts1 = s1, ts2 = s2;
1604 tree s;
1606 while (ts1 != ts2)
1608 gcc_assert (ts1 && ts2);
1609 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1610 ts1 = BLOCK_SUPERCONTEXT (ts1);
1611 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1612 ts2 = BLOCK_SUPERCONTEXT (ts2);
1613 else
1615 ts1 = BLOCK_SUPERCONTEXT (ts1);
1616 ts2 = BLOCK_SUPERCONTEXT (ts2);
1619 com = ts1;
1621 /* Close scopes. */
1622 s = s1;
1623 while (s != com)
1625 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1626 NOTE_BLOCK (note) = s;
1627 s = BLOCK_SUPERCONTEXT (s);
1630 /* Open scopes. */
1631 s = s2;
1632 while (s != com)
1634 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1635 NOTE_BLOCK (insn) = s;
1636 s = BLOCK_SUPERCONTEXT (s);
1640 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1641 on the scope tree and the newly reordered instructions. */
1643 static void
1644 reemit_insn_block_notes (void)
1646 tree cur_block = DECL_INITIAL (cfun->decl);
1647 rtx_insn *insn;
1648 rtx_note *note;
1650 insn = get_insns ();
1651 for (; insn; insn = NEXT_INSN (insn))
1653 tree this_block;
1655 /* Prevent lexical blocks from straddling section boundaries. */
1656 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1658 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1659 s = BLOCK_SUPERCONTEXT (s))
1661 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1662 NOTE_BLOCK (note) = s;
1663 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1664 NOTE_BLOCK (note) = s;
1668 if (!active_insn_p (insn))
1669 continue;
1671 /* Avoid putting scope notes between jump table and its label. */
1672 if (JUMP_TABLE_DATA_P (insn))
1673 continue;
1675 this_block = insn_scope (insn);
1676 /* For sequences compute scope resulting from merging all scopes
1677 of instructions nested inside. */
1678 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1680 int i;
1682 this_block = NULL;
1683 for (i = 0; i < body->len (); i++)
1684 this_block = choose_inner_scope (this_block,
1685 insn_scope (body->insn (i)));
1687 if (! this_block)
1689 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1690 continue;
1691 else
1692 this_block = DECL_INITIAL (cfun->decl);
1695 if (this_block != cur_block)
1697 change_scope (insn, cur_block, this_block);
1698 cur_block = this_block;
1702 /* change_scope emits before the insn, not after. */
1703 note = emit_note (NOTE_INSN_DELETED);
1704 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1705 delete_insn (note);
1707 reorder_blocks ();
1710 static const char *some_local_dynamic_name;
1712 /* Locate some local-dynamic symbol still in use by this function
1713 so that we can print its name in local-dynamic base patterns.
1714 Return null if there are no local-dynamic references. */
1716 const char *
1717 get_some_local_dynamic_name ()
1719 subrtx_iterator::array_type array;
1720 rtx_insn *insn;
1722 if (some_local_dynamic_name)
1723 return some_local_dynamic_name;
1725 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1726 if (NONDEBUG_INSN_P (insn))
1727 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1729 const_rtx x = *iter;
1730 if (GET_CODE (x) == SYMBOL_REF)
1732 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1733 return some_local_dynamic_name = XSTR (x, 0);
1734 if (CONSTANT_POOL_ADDRESS_P (x))
1735 iter.substitute (get_pool_constant (x));
1739 return 0;
1742 /* Output assembler code for the start of a function,
1743 and initialize some of the variables in this file
1744 for the new function. The label for the function and associated
1745 assembler pseudo-ops have already been output in `assemble_start_function'.
1747 FIRST is the first insn of the rtl for the function being compiled.
1748 FILE is the file to write assembler code to.
1749 OPTIMIZE_P is nonzero if we should eliminate redundant
1750 test and compare insns. */
1752 void
1753 final_start_function (rtx_insn *first, FILE *file,
1754 int optimize_p ATTRIBUTE_UNUSED)
1756 block_depth = 0;
1758 this_is_asm_operands = 0;
1760 need_profile_function = false;
1762 last_filename = LOCATION_FILE (prologue_location);
1763 last_linenum = LOCATION_LINE (prologue_location);
1764 last_discriminator = discriminator = 0;
1766 high_block_linenum = high_function_linenum = last_linenum;
1768 if (flag_sanitize & SANITIZE_ADDRESS)
1769 asan_function_start ();
1771 if (!DECL_IGNORED_P (current_function_decl))
1772 debug_hooks->begin_prologue (last_linenum, last_filename);
1774 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1775 dwarf2out_begin_prologue (0, NULL);
1777 #ifdef LEAF_REG_REMAP
1778 if (crtl->uses_only_leaf_regs)
1779 leaf_renumber_regs (first);
1780 #endif
1782 /* The Sun386i and perhaps other machines don't work right
1783 if the profiling code comes after the prologue. */
1784 if (targetm.profile_before_prologue () && crtl->profile)
1786 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1787 && targetm.have_prologue ())
1789 rtx_insn *insn;
1790 for (insn = first; insn; insn = NEXT_INSN (insn))
1791 if (!NOTE_P (insn))
1793 insn = NULL;
1794 break;
1796 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1797 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1798 break;
1799 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1800 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1801 continue;
1802 else
1804 insn = NULL;
1805 break;
1808 if (insn)
1809 need_profile_function = true;
1810 else
1811 profile_function (file);
1813 else
1814 profile_function (file);
1817 /* If debugging, assign block numbers to all of the blocks in this
1818 function. */
1819 if (write_symbols)
1821 reemit_insn_block_notes ();
1822 number_blocks (current_function_decl);
1823 /* We never actually put out begin/end notes for the top-level
1824 block in the function. But, conceptually, that block is
1825 always needed. */
1826 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1829 if (warn_frame_larger_than
1830 && get_frame_size () > frame_larger_than_size)
1832 /* Issue a warning */
1833 warning (OPT_Wframe_larger_than_,
1834 "the frame size of %wd bytes is larger than %wd bytes",
1835 get_frame_size (), frame_larger_than_size);
1838 /* First output the function prologue: code to set up the stack frame. */
1839 targetm.asm_out.function_prologue (file, get_frame_size ());
1841 /* If the machine represents the prologue as RTL, the profiling code must
1842 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1843 if (! targetm.have_prologue ())
1844 profile_after_prologue (file);
1847 static void
1848 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1850 if (!targetm.profile_before_prologue () && crtl->profile)
1851 profile_function (file);
1854 static void
1855 profile_function (FILE *file ATTRIBUTE_UNUSED)
1857 #ifndef NO_PROFILE_COUNTERS
1858 # define NO_PROFILE_COUNTERS 0
1859 #endif
1860 #ifdef ASM_OUTPUT_REG_PUSH
1861 rtx sval = NULL, chain = NULL;
1863 if (cfun->returns_struct)
1864 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1865 true);
1866 if (cfun->static_chain_decl)
1867 chain = targetm.calls.static_chain (current_function_decl, true);
1868 #endif /* ASM_OUTPUT_REG_PUSH */
1870 if (! NO_PROFILE_COUNTERS)
1872 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1873 switch_to_section (data_section);
1874 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1875 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1876 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1879 switch_to_section (current_function_section ());
1881 #ifdef ASM_OUTPUT_REG_PUSH
1882 if (sval && REG_P (sval))
1883 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1884 if (chain && REG_P (chain))
1885 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1886 #endif
1888 FUNCTION_PROFILER (file, current_function_funcdef_no);
1890 #ifdef ASM_OUTPUT_REG_PUSH
1891 if (chain && REG_P (chain))
1892 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1893 if (sval && REG_P (sval))
1894 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1895 #endif
1898 /* Output assembler code for the end of a function.
1899 For clarity, args are same as those of `final_start_function'
1900 even though not all of them are needed. */
1902 void
1903 final_end_function (void)
1905 app_disable ();
1907 if (!DECL_IGNORED_P (current_function_decl))
1908 debug_hooks->end_function (high_function_linenum);
1910 /* Finally, output the function epilogue:
1911 code to restore the stack frame and return to the caller. */
1912 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1914 /* And debug output. */
1915 if (!DECL_IGNORED_P (current_function_decl))
1916 debug_hooks->end_epilogue (last_linenum, last_filename);
1918 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1919 && dwarf2out_do_frame ())
1920 dwarf2out_end_epilogue (last_linenum, last_filename);
1922 some_local_dynamic_name = 0;
1926 /* Dumper helper for basic block information. FILE is the assembly
1927 output file, and INSN is the instruction being emitted. */
1929 static void
1930 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1931 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1933 basic_block bb;
1935 if (!flag_debug_asm)
1936 return;
1938 if (INSN_UID (insn) < bb_map_size
1939 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1941 edge e;
1942 edge_iterator ei;
1944 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1945 if (bb->frequency)
1946 fprintf (file, " freq:%d", bb->frequency);
1947 if (bb->count)
1948 fprintf (file, " count:%" PRId64,
1949 bb->count);
1950 fprintf (file, " seq:%d", (*bb_seqn)++);
1951 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1952 FOR_EACH_EDGE (e, ei, bb->preds)
1954 dump_edge_info (file, e, TDF_DETAILS, 0);
1956 fprintf (file, "\n");
1958 if (INSN_UID (insn) < bb_map_size
1959 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1961 edge e;
1962 edge_iterator ei;
1964 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1965 FOR_EACH_EDGE (e, ei, bb->succs)
1967 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1969 fprintf (file, "\n");
1973 /* Output assembler code for some insns: all or part of a function.
1974 For description of args, see `final_start_function', above. */
1976 void
1977 final (rtx_insn *first, FILE *file, int optimize_p)
1979 rtx_insn *insn, *next;
1980 int seen = 0;
1982 /* Used for -dA dump. */
1983 basic_block *start_to_bb = NULL;
1984 basic_block *end_to_bb = NULL;
1985 int bb_map_size = 0;
1986 int bb_seqn = 0;
1988 last_ignored_compare = 0;
1990 if (HAVE_cc0)
1991 for (insn = first; insn; insn = NEXT_INSN (insn))
1993 /* If CC tracking across branches is enabled, record the insn which
1994 jumps to each branch only reached from one place. */
1995 if (optimize_p && JUMP_P (insn))
1997 rtx lab = JUMP_LABEL (insn);
1998 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
2000 LABEL_REFS (lab) = insn;
2005 init_recog ();
2007 CC_STATUS_INIT;
2009 if (flag_debug_asm)
2011 basic_block bb;
2013 bb_map_size = get_max_uid () + 1;
2014 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
2015 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
2017 /* There is no cfg for a thunk. */
2018 if (!cfun->is_thunk)
2019 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2021 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2022 end_to_bb[INSN_UID (BB_END (bb))] = bb;
2026 /* Output the insns. */
2027 for (insn = first; insn;)
2029 if (HAVE_ATTR_length)
2031 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2033 /* This can be triggered by bugs elsewhere in the compiler if
2034 new insns are created after init_insn_lengths is called. */
2035 gcc_assert (NOTE_P (insn));
2036 insn_current_address = -1;
2038 else
2039 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2042 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2043 bb_map_size, &bb_seqn);
2044 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2047 if (flag_debug_asm)
2049 free (start_to_bb);
2050 free (end_to_bb);
2053 /* Remove CFI notes, to avoid compare-debug failures. */
2054 for (insn = first; insn; insn = next)
2056 next = NEXT_INSN (insn);
2057 if (NOTE_P (insn)
2058 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2059 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2060 delete_insn (insn);
2064 const char *
2065 get_insn_template (int code, rtx insn)
2067 switch (insn_data[code].output_format)
2069 case INSN_OUTPUT_FORMAT_SINGLE:
2070 return insn_data[code].output.single;
2071 case INSN_OUTPUT_FORMAT_MULTI:
2072 return insn_data[code].output.multi[which_alternative];
2073 case INSN_OUTPUT_FORMAT_FUNCTION:
2074 gcc_assert (insn);
2075 return (*insn_data[code].output.function) (recog_data.operand,
2076 as_a <rtx_insn *> (insn));
2078 default:
2079 gcc_unreachable ();
2083 /* Emit the appropriate declaration for an alternate-entry-point
2084 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2085 LABEL_KIND != LABEL_NORMAL.
2087 The case fall-through in this function is intentional. */
2088 static void
2089 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2091 const char *name = LABEL_NAME (insn);
2093 switch (LABEL_KIND (insn))
2095 case LABEL_WEAK_ENTRY:
2096 #ifdef ASM_WEAKEN_LABEL
2097 ASM_WEAKEN_LABEL (file, name);
2098 #endif
2099 case LABEL_GLOBAL_ENTRY:
2100 targetm.asm_out.globalize_label (file, name);
2101 case LABEL_STATIC_ENTRY:
2102 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2103 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2104 #endif
2105 ASM_OUTPUT_LABEL (file, name);
2106 break;
2108 case LABEL_NORMAL:
2109 default:
2110 gcc_unreachable ();
2114 /* Given a CALL_INSN, find and return the nested CALL. */
2115 static rtx
2116 call_from_call_insn (rtx_call_insn *insn)
2118 rtx x;
2119 gcc_assert (CALL_P (insn));
2120 x = PATTERN (insn);
2122 while (GET_CODE (x) != CALL)
2124 switch (GET_CODE (x))
2126 default:
2127 gcc_unreachable ();
2128 case COND_EXEC:
2129 x = COND_EXEC_CODE (x);
2130 break;
2131 case PARALLEL:
2132 x = XVECEXP (x, 0, 0);
2133 break;
2134 case SET:
2135 x = XEXP (x, 1);
2136 break;
2139 return x;
2142 /* The final scan for one insn, INSN.
2143 Args are same as in `final', except that INSN
2144 is the insn being scanned.
2145 Value returned is the next insn to be scanned.
2147 NOPEEPHOLES is the flag to disallow peephole processing (currently
2148 used for within delayed branch sequence output).
2150 SEEN is used to track the end of the prologue, for emitting
2151 debug information. We force the emission of a line note after
2152 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2154 rtx_insn *
2155 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2156 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2158 #if HAVE_cc0
2159 rtx set;
2160 #endif
2161 rtx_insn *next;
2163 insn_counter++;
2165 /* Ignore deleted insns. These can occur when we split insns (due to a
2166 template of "#") while not optimizing. */
2167 if (insn->deleted ())
2168 return NEXT_INSN (insn);
2170 switch (GET_CODE (insn))
2172 case NOTE:
2173 switch (NOTE_KIND (insn))
2175 case NOTE_INSN_DELETED:
2176 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2177 break;
2179 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2180 in_cold_section_p = !in_cold_section_p;
2182 if (dwarf2out_do_frame ())
2183 dwarf2out_switch_text_section ();
2184 else if (!DECL_IGNORED_P (current_function_decl))
2185 debug_hooks->switch_text_section ();
2187 switch_to_section (current_function_section ());
2188 targetm.asm_out.function_switched_text_sections (asm_out_file,
2189 current_function_decl,
2190 in_cold_section_p);
2191 /* Emit a label for the split cold section. Form label name by
2192 suffixing "cold" to the original function's name. */
2193 if (in_cold_section_p)
2195 cold_function_name
2196 = clone_function_name (current_function_decl, "cold");
2197 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2198 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2199 IDENTIFIER_POINTER
2200 (cold_function_name),
2201 current_function_decl);
2202 #else
2203 ASM_OUTPUT_LABEL (asm_out_file,
2204 IDENTIFIER_POINTER (cold_function_name));
2205 #endif
2207 break;
2209 case NOTE_INSN_BASIC_BLOCK:
2210 if (need_profile_function)
2212 profile_function (asm_out_file);
2213 need_profile_function = false;
2216 if (targetm.asm_out.unwind_emit)
2217 targetm.asm_out.unwind_emit (asm_out_file, insn);
2219 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2221 break;
2223 case NOTE_INSN_EH_REGION_BEG:
2224 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2225 NOTE_EH_HANDLER (insn));
2226 break;
2228 case NOTE_INSN_EH_REGION_END:
2229 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2230 NOTE_EH_HANDLER (insn));
2231 break;
2233 case NOTE_INSN_PROLOGUE_END:
2234 targetm.asm_out.function_end_prologue (file);
2235 profile_after_prologue (file);
2237 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2239 *seen |= SEEN_EMITTED;
2240 force_source_line = true;
2242 else
2243 *seen |= SEEN_NOTE;
2245 break;
2247 case NOTE_INSN_EPILOGUE_BEG:
2248 if (!DECL_IGNORED_P (current_function_decl))
2249 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2250 targetm.asm_out.function_begin_epilogue (file);
2251 break;
2253 case NOTE_INSN_CFI:
2254 dwarf2out_emit_cfi (NOTE_CFI (insn));
2255 break;
2257 case NOTE_INSN_CFI_LABEL:
2258 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2259 NOTE_LABEL_NUMBER (insn));
2260 break;
2262 case NOTE_INSN_FUNCTION_BEG:
2263 if (need_profile_function)
2265 profile_function (asm_out_file);
2266 need_profile_function = false;
2269 app_disable ();
2270 if (!DECL_IGNORED_P (current_function_decl))
2271 debug_hooks->end_prologue (last_linenum, last_filename);
2273 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2275 *seen |= SEEN_EMITTED;
2276 force_source_line = true;
2278 else
2279 *seen |= SEEN_NOTE;
2281 break;
2283 case NOTE_INSN_BLOCK_BEG:
2284 if (debug_info_level == DINFO_LEVEL_NORMAL
2285 || debug_info_level == DINFO_LEVEL_VERBOSE
2286 || write_symbols == DWARF2_DEBUG
2287 || write_symbols == VMS_AND_DWARF2_DEBUG
2288 || write_symbols == VMS_DEBUG)
2290 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2292 app_disable ();
2293 ++block_depth;
2294 high_block_linenum = last_linenum;
2296 /* Output debugging info about the symbol-block beginning. */
2297 if (!DECL_IGNORED_P (current_function_decl))
2298 debug_hooks->begin_block (last_linenum, n);
2300 /* Mark this block as output. */
2301 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2303 if (write_symbols == DBX_DEBUG
2304 || write_symbols == SDB_DEBUG)
2306 location_t *locus_ptr
2307 = block_nonartificial_location (NOTE_BLOCK (insn));
2309 if (locus_ptr != NULL)
2311 override_filename = LOCATION_FILE (*locus_ptr);
2312 override_linenum = LOCATION_LINE (*locus_ptr);
2315 break;
2317 case NOTE_INSN_BLOCK_END:
2318 if (debug_info_level == DINFO_LEVEL_NORMAL
2319 || debug_info_level == DINFO_LEVEL_VERBOSE
2320 || write_symbols == DWARF2_DEBUG
2321 || write_symbols == VMS_AND_DWARF2_DEBUG
2322 || write_symbols == VMS_DEBUG)
2324 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2326 app_disable ();
2328 /* End of a symbol-block. */
2329 --block_depth;
2330 gcc_assert (block_depth >= 0);
2332 if (!DECL_IGNORED_P (current_function_decl))
2333 debug_hooks->end_block (high_block_linenum, n);
2335 if (write_symbols == DBX_DEBUG
2336 || write_symbols == SDB_DEBUG)
2338 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2339 location_t *locus_ptr
2340 = block_nonartificial_location (outer_block);
2342 if (locus_ptr != NULL)
2344 override_filename = LOCATION_FILE (*locus_ptr);
2345 override_linenum = LOCATION_LINE (*locus_ptr);
2347 else
2349 override_filename = NULL;
2350 override_linenum = 0;
2353 break;
2355 case NOTE_INSN_DELETED_LABEL:
2356 /* Emit the label. We may have deleted the CODE_LABEL because
2357 the label could be proved to be unreachable, though still
2358 referenced (in the form of having its address taken. */
2359 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2360 break;
2362 case NOTE_INSN_DELETED_DEBUG_LABEL:
2363 /* Similarly, but need to use different namespace for it. */
2364 if (CODE_LABEL_NUMBER (insn) != -1)
2365 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2366 break;
2368 case NOTE_INSN_VAR_LOCATION:
2369 case NOTE_INSN_CALL_ARG_LOCATION:
2370 if (!DECL_IGNORED_P (current_function_decl))
2371 debug_hooks->var_location (insn);
2372 break;
2374 default:
2375 gcc_unreachable ();
2376 break;
2378 break;
2380 case BARRIER:
2381 break;
2383 case CODE_LABEL:
2384 /* The target port might emit labels in the output function for
2385 some insn, e.g. sh.c output_branchy_insn. */
2386 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2388 int align = LABEL_TO_ALIGNMENT (insn);
2389 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2390 int max_skip = LABEL_TO_MAX_SKIP (insn);
2391 #endif
2393 if (align && NEXT_INSN (insn))
2395 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2396 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2397 #else
2398 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2399 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2400 #else
2401 ASM_OUTPUT_ALIGN (file, align);
2402 #endif
2403 #endif
2406 CC_STATUS_INIT;
2408 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2409 debug_hooks->label (as_a <rtx_code_label *> (insn));
2411 app_disable ();
2413 next = next_nonnote_insn (insn);
2414 /* If this label is followed by a jump-table, make sure we put
2415 the label in the read-only section. Also possibly write the
2416 label and jump table together. */
2417 if (next != 0 && JUMP_TABLE_DATA_P (next))
2419 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2420 /* In this case, the case vector is being moved by the
2421 target, so don't output the label at all. Leave that
2422 to the back end macros. */
2423 #else
2424 if (! JUMP_TABLES_IN_TEXT_SECTION)
2426 int log_align;
2428 switch_to_section (targetm.asm_out.function_rodata_section
2429 (current_function_decl));
2431 #ifdef ADDR_VEC_ALIGN
2432 log_align = ADDR_VEC_ALIGN (next);
2433 #else
2434 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2435 #endif
2436 ASM_OUTPUT_ALIGN (file, log_align);
2438 else
2439 switch_to_section (current_function_section ());
2441 #ifdef ASM_OUTPUT_CASE_LABEL
2442 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2443 next);
2444 #else
2445 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2446 #endif
2447 #endif
2448 break;
2450 if (LABEL_ALT_ENTRY_P (insn))
2451 output_alternate_entry_point (file, insn);
2452 else
2453 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2454 break;
2456 default:
2458 rtx body = PATTERN (insn);
2459 int insn_code_number;
2460 const char *templ;
2461 bool is_stmt;
2463 /* Reset this early so it is correct for ASM statements. */
2464 current_insn_predicate = NULL_RTX;
2466 /* An INSN, JUMP_INSN or CALL_INSN.
2467 First check for special kinds that recog doesn't recognize. */
2469 if (GET_CODE (body) == USE /* These are just declarations. */
2470 || GET_CODE (body) == CLOBBER)
2471 break;
2473 #if HAVE_cc0
2475 /* If there is a REG_CC_SETTER note on this insn, it means that
2476 the setting of the condition code was done in the delay slot
2477 of the insn that branched here. So recover the cc status
2478 from the insn that set it. */
2480 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2481 if (note)
2483 rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2484 NOTICE_UPDATE_CC (PATTERN (other), other);
2485 cc_prev_status = cc_status;
2488 #endif
2490 /* Detect insns that are really jump-tables
2491 and output them as such. */
2493 if (JUMP_TABLE_DATA_P (insn))
2495 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2496 int vlen, idx;
2497 #endif
2499 if (! JUMP_TABLES_IN_TEXT_SECTION)
2500 switch_to_section (targetm.asm_out.function_rodata_section
2501 (current_function_decl));
2502 else
2503 switch_to_section (current_function_section ());
2505 app_disable ();
2507 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2508 if (GET_CODE (body) == ADDR_VEC)
2510 #ifdef ASM_OUTPUT_ADDR_VEC
2511 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2512 #else
2513 gcc_unreachable ();
2514 #endif
2516 else
2518 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2519 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2520 #else
2521 gcc_unreachable ();
2522 #endif
2524 #else
2525 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2526 for (idx = 0; idx < vlen; idx++)
2528 if (GET_CODE (body) == ADDR_VEC)
2530 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2531 ASM_OUTPUT_ADDR_VEC_ELT
2532 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2533 #else
2534 gcc_unreachable ();
2535 #endif
2537 else
2539 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2540 ASM_OUTPUT_ADDR_DIFF_ELT
2541 (file,
2542 body,
2543 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2544 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2545 #else
2546 gcc_unreachable ();
2547 #endif
2550 #ifdef ASM_OUTPUT_CASE_END
2551 ASM_OUTPUT_CASE_END (file,
2552 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2553 insn);
2554 #endif
2555 #endif
2557 switch_to_section (current_function_section ());
2559 break;
2561 /* Output this line note if it is the first or the last line
2562 note in a row. */
2563 if (!DECL_IGNORED_P (current_function_decl)
2564 && notice_source_line (insn, &is_stmt))
2565 (*debug_hooks->source_line) (last_linenum, last_filename,
2566 last_discriminator, is_stmt);
2568 if (GET_CODE (body) == ASM_INPUT)
2570 const char *string = XSTR (body, 0);
2572 /* There's no telling what that did to the condition codes. */
2573 CC_STATUS_INIT;
2575 if (string[0])
2577 expanded_location loc;
2579 app_enable ();
2580 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2581 if (*loc.file && loc.line)
2582 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2583 ASM_COMMENT_START, loc.line, loc.file);
2584 fprintf (asm_out_file, "\t%s\n", string);
2585 #if HAVE_AS_LINE_ZERO
2586 if (*loc.file && loc.line)
2587 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2588 #endif
2590 break;
2593 /* Detect `asm' construct with operands. */
2594 if (asm_noperands (body) >= 0)
2596 unsigned int noperands = asm_noperands (body);
2597 rtx *ops = XALLOCAVEC (rtx, noperands);
2598 const char *string;
2599 location_t loc;
2600 expanded_location expanded;
2602 /* There's no telling what that did to the condition codes. */
2603 CC_STATUS_INIT;
2605 /* Get out the operand values. */
2606 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2607 /* Inhibit dying on what would otherwise be compiler bugs. */
2608 insn_noperands = noperands;
2609 this_is_asm_operands = insn;
2610 expanded = expand_location (loc);
2612 #ifdef FINAL_PRESCAN_INSN
2613 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2614 #endif
2616 /* Output the insn using them. */
2617 if (string[0])
2619 app_enable ();
2620 if (expanded.file && expanded.line)
2621 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2622 ASM_COMMENT_START, expanded.line, expanded.file);
2623 output_asm_insn (string, ops);
2624 #if HAVE_AS_LINE_ZERO
2625 if (expanded.file && expanded.line)
2626 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2627 #endif
2630 if (targetm.asm_out.final_postscan_insn)
2631 targetm.asm_out.final_postscan_insn (file, insn, ops,
2632 insn_noperands);
2634 this_is_asm_operands = 0;
2635 break;
2638 app_disable ();
2640 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2642 /* A delayed-branch sequence */
2643 int i;
2645 final_sequence = seq;
2647 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2648 force the restoration of a comparison that was previously
2649 thought unnecessary. If that happens, cancel this sequence
2650 and cause that insn to be restored. */
2652 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2653 if (next != seq->insn (1))
2655 final_sequence = 0;
2656 return next;
2659 for (i = 1; i < seq->len (); i++)
2661 rtx_insn *insn = seq->insn (i);
2662 rtx_insn *next = NEXT_INSN (insn);
2663 /* We loop in case any instruction in a delay slot gets
2664 split. */
2666 insn = final_scan_insn (insn, file, 0, 1, seen);
2667 while (insn != next);
2669 #ifdef DBR_OUTPUT_SEQEND
2670 DBR_OUTPUT_SEQEND (file);
2671 #endif
2672 final_sequence = 0;
2674 /* If the insn requiring the delay slot was a CALL_INSN, the
2675 insns in the delay slot are actually executed before the
2676 called function. Hence we don't preserve any CC-setting
2677 actions in these insns and the CC must be marked as being
2678 clobbered by the function. */
2679 if (CALL_P (seq->insn (0)))
2681 CC_STATUS_INIT;
2683 break;
2686 /* We have a real machine instruction as rtl. */
2688 body = PATTERN (insn);
2690 #if HAVE_cc0
2691 set = single_set (insn);
2693 /* Check for redundant test and compare instructions
2694 (when the condition codes are already set up as desired).
2695 This is done only when optimizing; if not optimizing,
2696 it should be possible for the user to alter a variable
2697 with the debugger in between statements
2698 and the next statement should reexamine the variable
2699 to compute the condition codes. */
2701 if (optimize_p)
2703 if (set
2704 && GET_CODE (SET_DEST (set)) == CC0
2705 && insn != last_ignored_compare)
2707 rtx src1, src2;
2708 if (GET_CODE (SET_SRC (set)) == SUBREG)
2709 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2711 src1 = SET_SRC (set);
2712 src2 = NULL_RTX;
2713 if (GET_CODE (SET_SRC (set)) == COMPARE)
2715 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2716 XEXP (SET_SRC (set), 0)
2717 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2718 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2719 XEXP (SET_SRC (set), 1)
2720 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2721 if (XEXP (SET_SRC (set), 1)
2722 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2723 src2 = XEXP (SET_SRC (set), 0);
2725 if ((cc_status.value1 != 0
2726 && rtx_equal_p (src1, cc_status.value1))
2727 || (cc_status.value2 != 0
2728 && rtx_equal_p (src1, cc_status.value2))
2729 || (src2 != 0 && cc_status.value1 != 0
2730 && rtx_equal_p (src2, cc_status.value1))
2731 || (src2 != 0 && cc_status.value2 != 0
2732 && rtx_equal_p (src2, cc_status.value2)))
2734 /* Don't delete insn if it has an addressing side-effect. */
2735 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2736 /* or if anything in it is volatile. */
2737 && ! volatile_refs_p (PATTERN (insn)))
2739 /* We don't really delete the insn; just ignore it. */
2740 last_ignored_compare = insn;
2741 break;
2747 /* If this is a conditional branch, maybe modify it
2748 if the cc's are in a nonstandard state
2749 so that it accomplishes the same thing that it would
2750 do straightforwardly if the cc's were set up normally. */
2752 if (cc_status.flags != 0
2753 && JUMP_P (insn)
2754 && GET_CODE (body) == SET
2755 && SET_DEST (body) == pc_rtx
2756 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2757 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2758 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2760 /* This function may alter the contents of its argument
2761 and clear some of the cc_status.flags bits.
2762 It may also return 1 meaning condition now always true
2763 or -1 meaning condition now always false
2764 or 2 meaning condition nontrivial but altered. */
2765 int result = alter_cond (XEXP (SET_SRC (body), 0));
2766 /* If condition now has fixed value, replace the IF_THEN_ELSE
2767 with its then-operand or its else-operand. */
2768 if (result == 1)
2769 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2770 if (result == -1)
2771 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2773 /* The jump is now either unconditional or a no-op.
2774 If it has become a no-op, don't try to output it.
2775 (It would not be recognized.) */
2776 if (SET_SRC (body) == pc_rtx)
2778 delete_insn (insn);
2779 break;
2781 else if (ANY_RETURN_P (SET_SRC (body)))
2782 /* Replace (set (pc) (return)) with (return). */
2783 PATTERN (insn) = body = SET_SRC (body);
2785 /* Rerecognize the instruction if it has changed. */
2786 if (result != 0)
2787 INSN_CODE (insn) = -1;
2790 /* If this is a conditional trap, maybe modify it if the cc's
2791 are in a nonstandard state so that it accomplishes the same
2792 thing that it would do straightforwardly if the cc's were
2793 set up normally. */
2794 if (cc_status.flags != 0
2795 && NONJUMP_INSN_P (insn)
2796 && GET_CODE (body) == TRAP_IF
2797 && COMPARISON_P (TRAP_CONDITION (body))
2798 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2800 /* This function may alter the contents of its argument
2801 and clear some of the cc_status.flags bits.
2802 It may also return 1 meaning condition now always true
2803 or -1 meaning condition now always false
2804 or 2 meaning condition nontrivial but altered. */
2805 int result = alter_cond (TRAP_CONDITION (body));
2807 /* If TRAP_CONDITION has become always false, delete the
2808 instruction. */
2809 if (result == -1)
2811 delete_insn (insn);
2812 break;
2815 /* If TRAP_CONDITION has become always true, replace
2816 TRAP_CONDITION with const_true_rtx. */
2817 if (result == 1)
2818 TRAP_CONDITION (body) = const_true_rtx;
2820 /* Rerecognize the instruction if it has changed. */
2821 if (result != 0)
2822 INSN_CODE (insn) = -1;
2825 /* Make same adjustments to instructions that examine the
2826 condition codes without jumping and instructions that
2827 handle conditional moves (if this machine has either one). */
2829 if (cc_status.flags != 0
2830 && set != 0)
2832 rtx cond_rtx, then_rtx, else_rtx;
2834 if (!JUMP_P (insn)
2835 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2837 cond_rtx = XEXP (SET_SRC (set), 0);
2838 then_rtx = XEXP (SET_SRC (set), 1);
2839 else_rtx = XEXP (SET_SRC (set), 2);
2841 else
2843 cond_rtx = SET_SRC (set);
2844 then_rtx = const_true_rtx;
2845 else_rtx = const0_rtx;
2848 if (COMPARISON_P (cond_rtx)
2849 && XEXP (cond_rtx, 0) == cc0_rtx)
2851 int result;
2852 result = alter_cond (cond_rtx);
2853 if (result == 1)
2854 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2855 else if (result == -1)
2856 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2857 else if (result == 2)
2858 INSN_CODE (insn) = -1;
2859 if (SET_DEST (set) == SET_SRC (set))
2860 delete_insn (insn);
2864 #endif
2866 /* Do machine-specific peephole optimizations if desired. */
2868 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2870 rtx_insn *next = peephole (insn);
2871 /* When peepholing, if there were notes within the peephole,
2872 emit them before the peephole. */
2873 if (next != 0 && next != NEXT_INSN (insn))
2875 rtx_insn *note, *prev = PREV_INSN (insn);
2877 for (note = NEXT_INSN (insn); note != next;
2878 note = NEXT_INSN (note))
2879 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2881 /* Put the notes in the proper position for a later
2882 rescan. For example, the SH target can do this
2883 when generating a far jump in a delayed branch
2884 sequence. */
2885 note = NEXT_INSN (insn);
2886 SET_PREV_INSN (note) = prev;
2887 SET_NEXT_INSN (prev) = note;
2888 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2889 SET_PREV_INSN (insn) = PREV_INSN (next);
2890 SET_NEXT_INSN (insn) = next;
2891 SET_PREV_INSN (next) = insn;
2894 /* PEEPHOLE might have changed this. */
2895 body = PATTERN (insn);
2898 /* Try to recognize the instruction.
2899 If successful, verify that the operands satisfy the
2900 constraints for the instruction. Crash if they don't,
2901 since `reload' should have changed them so that they do. */
2903 insn_code_number = recog_memoized (insn);
2904 cleanup_subreg_operands (insn);
2906 /* Dump the insn in the assembly for debugging (-dAP).
2907 If the final dump is requested as slim RTL, dump slim
2908 RTL to the assembly file also. */
2909 if (flag_dump_rtl_in_asm)
2911 print_rtx_head = ASM_COMMENT_START;
2912 if (! (dump_flags & TDF_SLIM))
2913 print_rtl_single (asm_out_file, insn);
2914 else
2915 dump_insn_slim (asm_out_file, insn);
2916 print_rtx_head = "";
2919 if (! constrain_operands_cached (insn, 1))
2920 fatal_insn_not_found (insn);
2922 /* Some target machines need to prescan each insn before
2923 it is output. */
2925 #ifdef FINAL_PRESCAN_INSN
2926 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2927 #endif
2929 if (targetm.have_conditional_execution ()
2930 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2931 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2933 #if HAVE_cc0
2934 cc_prev_status = cc_status;
2936 /* Update `cc_status' for this instruction.
2937 The instruction's output routine may change it further.
2938 If the output routine for a jump insn needs to depend
2939 on the cc status, it should look at cc_prev_status. */
2941 NOTICE_UPDATE_CC (body, insn);
2942 #endif
2944 current_output_insn = debug_insn = insn;
2946 /* Find the proper template for this insn. */
2947 templ = get_insn_template (insn_code_number, insn);
2949 /* If the C code returns 0, it means that it is a jump insn
2950 which follows a deleted test insn, and that test insn
2951 needs to be reinserted. */
2952 if (templ == 0)
2954 rtx_insn *prev;
2956 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2958 /* We have already processed the notes between the setter and
2959 the user. Make sure we don't process them again, this is
2960 particularly important if one of the notes is a block
2961 scope note or an EH note. */
2962 for (prev = insn;
2963 prev != last_ignored_compare;
2964 prev = PREV_INSN (prev))
2966 if (NOTE_P (prev))
2967 delete_insn (prev); /* Use delete_note. */
2970 return prev;
2973 /* If the template is the string "#", it means that this insn must
2974 be split. */
2975 if (templ[0] == '#' && templ[1] == '\0')
2977 rtx_insn *new_rtx = try_split (body, insn, 0);
2979 /* If we didn't split the insn, go away. */
2980 if (new_rtx == insn && PATTERN (new_rtx) == body)
2981 fatal_insn ("could not split insn", insn);
2983 /* If we have a length attribute, this instruction should have
2984 been split in shorten_branches, to ensure that we would have
2985 valid length info for the splitees. */
2986 gcc_assert (!HAVE_ATTR_length);
2988 return new_rtx;
2991 /* ??? This will put the directives in the wrong place if
2992 get_insn_template outputs assembly directly. However calling it
2993 before get_insn_template breaks if the insns is split. */
2994 if (targetm.asm_out.unwind_emit_before_insn
2995 && targetm.asm_out.unwind_emit)
2996 targetm.asm_out.unwind_emit (asm_out_file, insn);
2998 if (rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn))
3000 rtx x = call_from_call_insn (call_insn);
3001 x = XEXP (x, 0);
3002 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3004 tree t;
3005 x = XEXP (x, 0);
3006 t = SYMBOL_REF_DECL (x);
3007 if (t)
3008 assemble_external (t);
3010 if (!DECL_IGNORED_P (current_function_decl))
3011 debug_hooks->var_location (insn);
3014 /* Output assembler code from the template. */
3015 output_asm_insn (templ, recog_data.operand);
3017 /* Some target machines need to postscan each insn after
3018 it is output. */
3019 if (targetm.asm_out.final_postscan_insn)
3020 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3021 recog_data.n_operands);
3023 if (!targetm.asm_out.unwind_emit_before_insn
3024 && targetm.asm_out.unwind_emit)
3025 targetm.asm_out.unwind_emit (asm_out_file, insn);
3027 current_output_insn = debug_insn = 0;
3030 return NEXT_INSN (insn);
3033 /* Return whether a source line note needs to be emitted before INSN.
3034 Sets IS_STMT to TRUE if the line should be marked as a possible
3035 breakpoint location. */
3037 static bool
3038 notice_source_line (rtx_insn *insn, bool *is_stmt)
3040 const char *filename;
3041 int linenum;
3043 if (override_filename)
3045 filename = override_filename;
3046 linenum = override_linenum;
3048 else if (INSN_HAS_LOCATION (insn))
3050 expanded_location xloc = insn_location (insn);
3051 filename = xloc.file;
3052 linenum = xloc.line;
3054 else
3056 filename = NULL;
3057 linenum = 0;
3060 if (filename == NULL)
3061 return false;
3063 if (force_source_line
3064 || filename != last_filename
3065 || last_linenum != linenum)
3067 force_source_line = false;
3068 last_filename = filename;
3069 last_linenum = linenum;
3070 last_discriminator = discriminator;
3071 *is_stmt = true;
3072 high_block_linenum = MAX (last_linenum, high_block_linenum);
3073 high_function_linenum = MAX (last_linenum, high_function_linenum);
3074 return true;
3077 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3079 /* If the discriminator changed, but the line number did not,
3080 output the line table entry with is_stmt false so the
3081 debugger does not treat this as a breakpoint location. */
3082 last_discriminator = discriminator;
3083 *is_stmt = false;
3084 return true;
3087 return false;
3090 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3091 directly to the desired hard register. */
3093 void
3094 cleanup_subreg_operands (rtx_insn *insn)
3096 int i;
3097 bool changed = false;
3098 extract_insn_cached (insn);
3099 for (i = 0; i < recog_data.n_operands; i++)
3101 /* The following test cannot use recog_data.operand when testing
3102 for a SUBREG: the underlying object might have been changed
3103 already if we are inside a match_operator expression that
3104 matches the else clause. Instead we test the underlying
3105 expression directly. */
3106 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3108 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3109 changed = true;
3111 else if (GET_CODE (recog_data.operand[i]) == PLUS
3112 || GET_CODE (recog_data.operand[i]) == MULT
3113 || MEM_P (recog_data.operand[i]))
3114 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3117 for (i = 0; i < recog_data.n_dups; i++)
3119 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3121 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3122 changed = true;
3124 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3125 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3126 || MEM_P (*recog_data.dup_loc[i]))
3127 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3129 if (changed)
3130 df_insn_rescan (insn);
3133 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3134 the thing it is a subreg of. Do it anyway if FINAL_P. */
3137 alter_subreg (rtx *xp, bool final_p)
3139 rtx x = *xp;
3140 rtx y = SUBREG_REG (x);
3142 /* simplify_subreg does not remove subreg from volatile references.
3143 We are required to. */
3144 if (MEM_P (y))
3146 int offset = SUBREG_BYTE (x);
3148 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3149 contains 0 instead of the proper offset. See simplify_subreg. */
3150 if (offset == 0
3151 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3153 int difference = GET_MODE_SIZE (GET_MODE (y))
3154 - GET_MODE_SIZE (GET_MODE (x));
3155 if (WORDS_BIG_ENDIAN)
3156 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3157 if (BYTES_BIG_ENDIAN)
3158 offset += difference % UNITS_PER_WORD;
3161 if (final_p)
3162 *xp = adjust_address (y, GET_MODE (x), offset);
3163 else
3164 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3166 else if (REG_P (y) && HARD_REGISTER_P (y))
3168 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3169 SUBREG_BYTE (x));
3171 if (new_rtx != 0)
3172 *xp = new_rtx;
3173 else if (final_p && REG_P (y))
3175 /* Simplify_subreg can't handle some REG cases, but we have to. */
3176 unsigned int regno;
3177 HOST_WIDE_INT offset;
3179 regno = subreg_regno (x);
3180 if (subreg_lowpart_p (x))
3181 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3182 else
3183 offset = SUBREG_BYTE (x);
3184 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3188 return *xp;
3191 /* Do alter_subreg on all the SUBREGs contained in X. */
3193 static rtx
3194 walk_alter_subreg (rtx *xp, bool *changed)
3196 rtx x = *xp;
3197 switch (GET_CODE (x))
3199 case PLUS:
3200 case MULT:
3201 case AND:
3202 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3203 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3204 break;
3206 case MEM:
3207 case ZERO_EXTEND:
3208 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3209 break;
3211 case SUBREG:
3212 *changed = true;
3213 return alter_subreg (xp, true);
3215 default:
3216 break;
3219 return *xp;
3222 #if HAVE_cc0
3224 /* Given BODY, the body of a jump instruction, alter the jump condition
3225 as required by the bits that are set in cc_status.flags.
3226 Not all of the bits there can be handled at this level in all cases.
3228 The value is normally 0.
3229 1 means that the condition has become always true.
3230 -1 means that the condition has become always false.
3231 2 means that COND has been altered. */
3233 static int
3234 alter_cond (rtx cond)
3236 int value = 0;
3238 if (cc_status.flags & CC_REVERSED)
3240 value = 2;
3241 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3244 if (cc_status.flags & CC_INVERTED)
3246 value = 2;
3247 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3250 if (cc_status.flags & CC_NOT_POSITIVE)
3251 switch (GET_CODE (cond))
3253 case LE:
3254 case LEU:
3255 case GEU:
3256 /* Jump becomes unconditional. */
3257 return 1;
3259 case GT:
3260 case GTU:
3261 case LTU:
3262 /* Jump becomes no-op. */
3263 return -1;
3265 case GE:
3266 PUT_CODE (cond, EQ);
3267 value = 2;
3268 break;
3270 case LT:
3271 PUT_CODE (cond, NE);
3272 value = 2;
3273 break;
3275 default:
3276 break;
3279 if (cc_status.flags & CC_NOT_NEGATIVE)
3280 switch (GET_CODE (cond))
3282 case GE:
3283 case GEU:
3284 /* Jump becomes unconditional. */
3285 return 1;
3287 case LT:
3288 case LTU:
3289 /* Jump becomes no-op. */
3290 return -1;
3292 case LE:
3293 case LEU:
3294 PUT_CODE (cond, EQ);
3295 value = 2;
3296 break;
3298 case GT:
3299 case GTU:
3300 PUT_CODE (cond, NE);
3301 value = 2;
3302 break;
3304 default:
3305 break;
3308 if (cc_status.flags & CC_NO_OVERFLOW)
3309 switch (GET_CODE (cond))
3311 case GEU:
3312 /* Jump becomes unconditional. */
3313 return 1;
3315 case LEU:
3316 PUT_CODE (cond, EQ);
3317 value = 2;
3318 break;
3320 case GTU:
3321 PUT_CODE (cond, NE);
3322 value = 2;
3323 break;
3325 case LTU:
3326 /* Jump becomes no-op. */
3327 return -1;
3329 default:
3330 break;
3333 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3334 switch (GET_CODE (cond))
3336 default:
3337 gcc_unreachable ();
3339 case NE:
3340 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3341 value = 2;
3342 break;
3344 case EQ:
3345 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3346 value = 2;
3347 break;
3350 if (cc_status.flags & CC_NOT_SIGNED)
3351 /* The flags are valid if signed condition operators are converted
3352 to unsigned. */
3353 switch (GET_CODE (cond))
3355 case LE:
3356 PUT_CODE (cond, LEU);
3357 value = 2;
3358 break;
3360 case LT:
3361 PUT_CODE (cond, LTU);
3362 value = 2;
3363 break;
3365 case GT:
3366 PUT_CODE (cond, GTU);
3367 value = 2;
3368 break;
3370 case GE:
3371 PUT_CODE (cond, GEU);
3372 value = 2;
3373 break;
3375 default:
3376 break;
3379 return value;
3381 #endif
3383 /* Report inconsistency between the assembler template and the operands.
3384 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3386 void
3387 output_operand_lossage (const char *cmsgid, ...)
3389 char *fmt_string;
3390 char *new_message;
3391 const char *pfx_str;
3392 va_list ap;
3394 va_start (ap, cmsgid);
3396 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3397 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3398 new_message = xvasprintf (fmt_string, ap);
3400 if (this_is_asm_operands)
3401 error_for_asm (this_is_asm_operands, "%s", new_message);
3402 else
3403 internal_error ("%s", new_message);
3405 free (fmt_string);
3406 free (new_message);
3407 va_end (ap);
3410 /* Output of assembler code from a template, and its subroutines. */
3412 /* Annotate the assembly with a comment describing the pattern and
3413 alternative used. */
3415 static void
3416 output_asm_name (void)
3418 if (debug_insn)
3420 int num = INSN_CODE (debug_insn);
3421 fprintf (asm_out_file, "\t%s %d\t%s",
3422 ASM_COMMENT_START, INSN_UID (debug_insn),
3423 insn_data[num].name);
3424 if (insn_data[num].n_alternatives > 1)
3425 fprintf (asm_out_file, "/%d", which_alternative + 1);
3427 if (HAVE_ATTR_length)
3428 fprintf (asm_out_file, "\t[length = %d]",
3429 get_attr_length (debug_insn));
3431 /* Clear this so only the first assembler insn
3432 of any rtl insn will get the special comment for -dp. */
3433 debug_insn = 0;
3437 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3438 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3439 corresponds to the address of the object and 0 if to the object. */
3441 static tree
3442 get_mem_expr_from_op (rtx op, int *paddressp)
3444 tree expr;
3445 int inner_addressp;
3447 *paddressp = 0;
3449 if (REG_P (op))
3450 return REG_EXPR (op);
3451 else if (!MEM_P (op))
3452 return 0;
3454 if (MEM_EXPR (op) != 0)
3455 return MEM_EXPR (op);
3457 /* Otherwise we have an address, so indicate it and look at the address. */
3458 *paddressp = 1;
3459 op = XEXP (op, 0);
3461 /* First check if we have a decl for the address, then look at the right side
3462 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3463 But don't allow the address to itself be indirect. */
3464 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3465 return expr;
3466 else if (GET_CODE (op) == PLUS
3467 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3468 return expr;
3470 while (UNARY_P (op)
3471 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3472 op = XEXP (op, 0);
3474 expr = get_mem_expr_from_op (op, &inner_addressp);
3475 return inner_addressp ? 0 : expr;
3478 /* Output operand names for assembler instructions. OPERANDS is the
3479 operand vector, OPORDER is the order to write the operands, and NOPS
3480 is the number of operands to write. */
3482 static void
3483 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3485 int wrote = 0;
3486 int i;
3488 for (i = 0; i < nops; i++)
3490 int addressp;
3491 rtx op = operands[oporder[i]];
3492 tree expr = get_mem_expr_from_op (op, &addressp);
3494 fprintf (asm_out_file, "%c%s",
3495 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3496 wrote = 1;
3497 if (expr)
3499 fprintf (asm_out_file, "%s",
3500 addressp ? "*" : "");
3501 print_mem_expr (asm_out_file, expr);
3502 wrote = 1;
3504 else if (REG_P (op) && ORIGINAL_REGNO (op)
3505 && ORIGINAL_REGNO (op) != REGNO (op))
3506 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3510 #ifdef ASSEMBLER_DIALECT
3511 /* Helper function to parse assembler dialects in the asm string.
3512 This is called from output_asm_insn and asm_fprintf. */
3513 static const char *
3514 do_assembler_dialects (const char *p, int *dialect)
3516 char c = *(p - 1);
3518 switch (c)
3520 case '{':
3522 int i;
3524 if (*dialect)
3525 output_operand_lossage ("nested assembly dialect alternatives");
3526 else
3527 *dialect = 1;
3529 /* If we want the first dialect, do nothing. Otherwise, skip
3530 DIALECT_NUMBER of strings ending with '|'. */
3531 for (i = 0; i < dialect_number; i++)
3533 while (*p && *p != '}')
3535 if (*p == '|')
3537 p++;
3538 break;
3541 /* Skip over any character after a percent sign. */
3542 if (*p == '%')
3543 p++;
3544 if (*p)
3545 p++;
3548 if (*p == '}')
3549 break;
3552 if (*p == '\0')
3553 output_operand_lossage ("unterminated assembly dialect alternative");
3555 break;
3557 case '|':
3558 if (*dialect)
3560 /* Skip to close brace. */
3563 if (*p == '\0')
3565 output_operand_lossage ("unterminated assembly dialect alternative");
3566 break;
3569 /* Skip over any character after a percent sign. */
3570 if (*p == '%' && p[1])
3572 p += 2;
3573 continue;
3576 if (*p++ == '}')
3577 break;
3579 while (1);
3581 *dialect = 0;
3583 else
3584 putc (c, asm_out_file);
3585 break;
3587 case '}':
3588 if (! *dialect)
3589 putc (c, asm_out_file);
3590 *dialect = 0;
3591 break;
3592 default:
3593 gcc_unreachable ();
3596 return p;
3598 #endif
3600 /* Output text from TEMPLATE to the assembler output file,
3601 obeying %-directions to substitute operands taken from
3602 the vector OPERANDS.
3604 %N (for N a digit) means print operand N in usual manner.
3605 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3606 and print the label name with no punctuation.
3607 %cN means require operand N to be a constant
3608 and print the constant expression with no punctuation.
3609 %aN means expect operand N to be a memory address
3610 (not a memory reference!) and print a reference
3611 to that address.
3612 %nN means expect operand N to be a constant
3613 and print a constant expression for minus the value
3614 of the operand, with no other punctuation. */
3616 void
3617 output_asm_insn (const char *templ, rtx *operands)
3619 const char *p;
3620 int c;
3621 #ifdef ASSEMBLER_DIALECT
3622 int dialect = 0;
3623 #endif
3624 int oporder[MAX_RECOG_OPERANDS];
3625 char opoutput[MAX_RECOG_OPERANDS];
3626 int ops = 0;
3628 /* An insn may return a null string template
3629 in a case where no assembler code is needed. */
3630 if (*templ == 0)
3631 return;
3633 memset (opoutput, 0, sizeof opoutput);
3634 p = templ;
3635 putc ('\t', asm_out_file);
3637 #ifdef ASM_OUTPUT_OPCODE
3638 ASM_OUTPUT_OPCODE (asm_out_file, p);
3639 #endif
3641 while ((c = *p++))
3642 switch (c)
3644 case '\n':
3645 if (flag_verbose_asm)
3646 output_asm_operand_names (operands, oporder, ops);
3647 if (flag_print_asm_name)
3648 output_asm_name ();
3650 ops = 0;
3651 memset (opoutput, 0, sizeof opoutput);
3653 putc (c, asm_out_file);
3654 #ifdef ASM_OUTPUT_OPCODE
3655 while ((c = *p) == '\t')
3657 putc (c, asm_out_file);
3658 p++;
3660 ASM_OUTPUT_OPCODE (asm_out_file, p);
3661 #endif
3662 break;
3664 #ifdef ASSEMBLER_DIALECT
3665 case '{':
3666 case '}':
3667 case '|':
3668 p = do_assembler_dialects (p, &dialect);
3669 break;
3670 #endif
3672 case '%':
3673 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3674 if ASSEMBLER_DIALECT defined and these characters have a special
3675 meaning as dialect delimiters.*/
3676 if (*p == '%'
3677 #ifdef ASSEMBLER_DIALECT
3678 || *p == '{' || *p == '}' || *p == '|'
3679 #endif
3682 putc (*p, asm_out_file);
3683 p++;
3685 /* %= outputs a number which is unique to each insn in the entire
3686 compilation. This is useful for making local labels that are
3687 referred to more than once in a given insn. */
3688 else if (*p == '=')
3690 p++;
3691 fprintf (asm_out_file, "%d", insn_counter);
3693 /* % followed by a letter and some digits
3694 outputs an operand in a special way depending on the letter.
3695 Letters `acln' are implemented directly.
3696 Other letters are passed to `output_operand' so that
3697 the TARGET_PRINT_OPERAND hook can define them. */
3698 else if (ISALPHA (*p))
3700 int letter = *p++;
3701 unsigned long opnum;
3702 char *endptr;
3704 opnum = strtoul (p, &endptr, 10);
3706 if (endptr == p)
3707 output_operand_lossage ("operand number missing "
3708 "after %%-letter");
3709 else if (this_is_asm_operands && opnum >= insn_noperands)
3710 output_operand_lossage ("operand number out of range");
3711 else if (letter == 'l')
3712 output_asm_label (operands[opnum]);
3713 else if (letter == 'a')
3714 output_address (VOIDmode, operands[opnum]);
3715 else if (letter == 'c')
3717 if (CONSTANT_ADDRESS_P (operands[opnum]))
3718 output_addr_const (asm_out_file, operands[opnum]);
3719 else
3720 output_operand (operands[opnum], 'c');
3722 else if (letter == 'n')
3724 if (CONST_INT_P (operands[opnum]))
3725 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3726 - INTVAL (operands[opnum]));
3727 else
3729 putc ('-', asm_out_file);
3730 output_addr_const (asm_out_file, operands[opnum]);
3733 else
3734 output_operand (operands[opnum], letter);
3736 if (!opoutput[opnum])
3737 oporder[ops++] = opnum;
3738 opoutput[opnum] = 1;
3740 p = endptr;
3741 c = *p;
3743 /* % followed by a digit outputs an operand the default way. */
3744 else if (ISDIGIT (*p))
3746 unsigned long opnum;
3747 char *endptr;
3749 opnum = strtoul (p, &endptr, 10);
3750 if (this_is_asm_operands && opnum >= insn_noperands)
3751 output_operand_lossage ("operand number out of range");
3752 else
3753 output_operand (operands[opnum], 0);
3755 if (!opoutput[opnum])
3756 oporder[ops++] = opnum;
3757 opoutput[opnum] = 1;
3759 p = endptr;
3760 c = *p;
3762 /* % followed by punctuation: output something for that
3763 punctuation character alone, with no operand. The
3764 TARGET_PRINT_OPERAND hook decides what is actually done. */
3765 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3766 output_operand (NULL_RTX, *p++);
3767 else
3768 output_operand_lossage ("invalid %%-code");
3769 break;
3771 default:
3772 putc (c, asm_out_file);
3775 /* Write out the variable names for operands, if we know them. */
3776 if (flag_verbose_asm)
3777 output_asm_operand_names (operands, oporder, ops);
3778 if (flag_print_asm_name)
3779 output_asm_name ();
3781 putc ('\n', asm_out_file);
3784 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3786 void
3787 output_asm_label (rtx x)
3789 char buf[256];
3791 if (GET_CODE (x) == LABEL_REF)
3792 x = LABEL_REF_LABEL (x);
3793 if (LABEL_P (x)
3794 || (NOTE_P (x)
3795 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3796 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3797 else
3798 output_operand_lossage ("'%%l' operand isn't a label");
3800 assemble_name (asm_out_file, buf);
3803 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3805 void
3806 mark_symbol_refs_as_used (rtx x)
3808 subrtx_iterator::array_type array;
3809 FOR_EACH_SUBRTX (iter, array, x, ALL)
3811 const_rtx x = *iter;
3812 if (GET_CODE (x) == SYMBOL_REF)
3813 if (tree t = SYMBOL_REF_DECL (x))
3814 assemble_external (t);
3818 /* Print operand X using machine-dependent assembler syntax.
3819 CODE is a non-digit that preceded the operand-number in the % spec,
3820 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3821 between the % and the digits.
3822 When CODE is a non-letter, X is 0.
3824 The meanings of the letters are machine-dependent and controlled
3825 by TARGET_PRINT_OPERAND. */
3827 void
3828 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3830 if (x && GET_CODE (x) == SUBREG)
3831 x = alter_subreg (&x, true);
3833 /* X must not be a pseudo reg. */
3834 if (!targetm.no_register_allocation)
3835 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3837 targetm.asm_out.print_operand (asm_out_file, x, code);
3839 if (x == NULL_RTX)
3840 return;
3842 mark_symbol_refs_as_used (x);
3845 /* Print a memory reference operand for address X using
3846 machine-dependent assembler syntax. */
3848 void
3849 output_address (machine_mode mode, rtx x)
3851 bool changed = false;
3852 walk_alter_subreg (&x, &changed);
3853 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3856 /* Print an integer constant expression in assembler syntax.
3857 Addition and subtraction are the only arithmetic
3858 that may appear in these expressions. */
3860 void
3861 output_addr_const (FILE *file, rtx x)
3863 char buf[256];
3865 restart:
3866 switch (GET_CODE (x))
3868 case PC:
3869 putc ('.', file);
3870 break;
3872 case SYMBOL_REF:
3873 if (SYMBOL_REF_DECL (x))
3874 assemble_external (SYMBOL_REF_DECL (x));
3875 #ifdef ASM_OUTPUT_SYMBOL_REF
3876 ASM_OUTPUT_SYMBOL_REF (file, x);
3877 #else
3878 assemble_name (file, XSTR (x, 0));
3879 #endif
3880 break;
3882 case LABEL_REF:
3883 x = LABEL_REF_LABEL (x);
3884 /* Fall through. */
3885 case CODE_LABEL:
3886 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3887 #ifdef ASM_OUTPUT_LABEL_REF
3888 ASM_OUTPUT_LABEL_REF (file, buf);
3889 #else
3890 assemble_name (file, buf);
3891 #endif
3892 break;
3894 case CONST_INT:
3895 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3896 break;
3898 case CONST:
3899 /* This used to output parentheses around the expression,
3900 but that does not work on the 386 (either ATT or BSD assembler). */
3901 output_addr_const (file, XEXP (x, 0));
3902 break;
3904 case CONST_WIDE_INT:
3905 /* We do not know the mode here so we have to use a round about
3906 way to build a wide-int to get it printed properly. */
3908 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3909 CONST_WIDE_INT_NUNITS (x),
3910 CONST_WIDE_INT_NUNITS (x)
3911 * HOST_BITS_PER_WIDE_INT,
3912 false);
3913 print_decs (w, file);
3915 break;
3917 case CONST_DOUBLE:
3918 if (CONST_DOUBLE_AS_INT_P (x))
3920 /* We can use %d if the number is one word and positive. */
3921 if (CONST_DOUBLE_HIGH (x))
3922 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3923 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3924 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3925 else if (CONST_DOUBLE_LOW (x) < 0)
3926 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3927 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3928 else
3929 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3931 else
3932 /* We can't handle floating point constants;
3933 PRINT_OPERAND must handle them. */
3934 output_operand_lossage ("floating constant misused");
3935 break;
3937 case CONST_FIXED:
3938 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3939 break;
3941 case PLUS:
3942 /* Some assemblers need integer constants to appear last (eg masm). */
3943 if (CONST_INT_P (XEXP (x, 0)))
3945 output_addr_const (file, XEXP (x, 1));
3946 if (INTVAL (XEXP (x, 0)) >= 0)
3947 fprintf (file, "+");
3948 output_addr_const (file, XEXP (x, 0));
3950 else
3952 output_addr_const (file, XEXP (x, 0));
3953 if (!CONST_INT_P (XEXP (x, 1))
3954 || INTVAL (XEXP (x, 1)) >= 0)
3955 fprintf (file, "+");
3956 output_addr_const (file, XEXP (x, 1));
3958 break;
3960 case MINUS:
3961 /* Avoid outputting things like x-x or x+5-x,
3962 since some assemblers can't handle that. */
3963 x = simplify_subtraction (x);
3964 if (GET_CODE (x) != MINUS)
3965 goto restart;
3967 output_addr_const (file, XEXP (x, 0));
3968 fprintf (file, "-");
3969 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3970 || GET_CODE (XEXP (x, 1)) == PC
3971 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3972 output_addr_const (file, XEXP (x, 1));
3973 else
3975 fputs (targetm.asm_out.open_paren, file);
3976 output_addr_const (file, XEXP (x, 1));
3977 fputs (targetm.asm_out.close_paren, file);
3979 break;
3981 case ZERO_EXTEND:
3982 case SIGN_EXTEND:
3983 case SUBREG:
3984 case TRUNCATE:
3985 output_addr_const (file, XEXP (x, 0));
3986 break;
3988 default:
3989 if (targetm.asm_out.output_addr_const_extra (file, x))
3990 break;
3992 output_operand_lossage ("invalid expression as operand");
3996 /* Output a quoted string. */
3998 void
3999 output_quoted_string (FILE *asm_file, const char *string)
4001 #ifdef OUTPUT_QUOTED_STRING
4002 OUTPUT_QUOTED_STRING (asm_file, string);
4003 #else
4004 char c;
4006 putc ('\"', asm_file);
4007 while ((c = *string++) != 0)
4009 if (ISPRINT (c))
4011 if (c == '\"' || c == '\\')
4012 putc ('\\', asm_file);
4013 putc (c, asm_file);
4015 else
4016 fprintf (asm_file, "\\%03o", (unsigned char) c);
4018 putc ('\"', asm_file);
4019 #endif
4022 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4024 void
4025 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4027 char buf[2 + CHAR_BIT * sizeof (value) / 4];
4028 if (value == 0)
4029 putc ('0', f);
4030 else
4032 char *p = buf + sizeof (buf);
4034 *--p = "0123456789abcdef"[value % 16];
4035 while ((value /= 16) != 0);
4036 *--p = 'x';
4037 *--p = '0';
4038 fwrite (p, 1, buf + sizeof (buf) - p, f);
4042 /* Internal function that prints an unsigned long in decimal in reverse.
4043 The output string IS NOT null-terminated. */
4045 static int
4046 sprint_ul_rev (char *s, unsigned long value)
4048 int i = 0;
4051 s[i] = "0123456789"[value % 10];
4052 value /= 10;
4053 i++;
4054 /* alternate version, without modulo */
4055 /* oldval = value; */
4056 /* value /= 10; */
4057 /* s[i] = "0123456789" [oldval - 10*value]; */
4058 /* i++ */
4060 while (value != 0);
4061 return i;
4064 /* Write an unsigned long as decimal to a file, fast. */
4066 void
4067 fprint_ul (FILE *f, unsigned long value)
4069 /* python says: len(str(2**64)) == 20 */
4070 char s[20];
4071 int i;
4073 i = sprint_ul_rev (s, value);
4075 /* It's probably too small to bother with string reversal and fputs. */
4078 i--;
4079 putc (s[i], f);
4081 while (i != 0);
4084 /* Write an unsigned long as decimal to a string, fast.
4085 s must be wide enough to not overflow, at least 21 chars.
4086 Returns the length of the string (without terminating '\0'). */
4089 sprint_ul (char *s, unsigned long value)
4091 int len = sprint_ul_rev (s, value);
4092 s[len] = '\0';
4094 std::reverse (s, s + len);
4095 return len;
4098 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4099 %R prints the value of REGISTER_PREFIX.
4100 %L prints the value of LOCAL_LABEL_PREFIX.
4101 %U prints the value of USER_LABEL_PREFIX.
4102 %I prints the value of IMMEDIATE_PREFIX.
4103 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4104 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4106 We handle alternate assembler dialects here, just like output_asm_insn. */
4108 void
4109 asm_fprintf (FILE *file, const char *p, ...)
4111 char buf[10];
4112 char *q, c;
4113 #ifdef ASSEMBLER_DIALECT
4114 int dialect = 0;
4115 #endif
4116 va_list argptr;
4118 va_start (argptr, p);
4120 buf[0] = '%';
4122 while ((c = *p++))
4123 switch (c)
4125 #ifdef ASSEMBLER_DIALECT
4126 case '{':
4127 case '}':
4128 case '|':
4129 p = do_assembler_dialects (p, &dialect);
4130 break;
4131 #endif
4133 case '%':
4134 c = *p++;
4135 q = &buf[1];
4136 while (strchr ("-+ #0", c))
4138 *q++ = c;
4139 c = *p++;
4141 while (ISDIGIT (c) || c == '.')
4143 *q++ = c;
4144 c = *p++;
4146 switch (c)
4148 case '%':
4149 putc ('%', file);
4150 break;
4152 case 'd': case 'i': case 'u':
4153 case 'x': case 'X': case 'o':
4154 case 'c':
4155 *q++ = c;
4156 *q = 0;
4157 fprintf (file, buf, va_arg (argptr, int));
4158 break;
4160 case 'w':
4161 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4162 'o' cases, but we do not check for those cases. It
4163 means that the value is a HOST_WIDE_INT, which may be
4164 either `long' or `long long'. */
4165 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4166 q += strlen (HOST_WIDE_INT_PRINT);
4167 *q++ = *p++;
4168 *q = 0;
4169 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4170 break;
4172 case 'l':
4173 *q++ = c;
4174 #ifdef HAVE_LONG_LONG
4175 if (*p == 'l')
4177 *q++ = *p++;
4178 *q++ = *p++;
4179 *q = 0;
4180 fprintf (file, buf, va_arg (argptr, long long));
4182 else
4183 #endif
4185 *q++ = *p++;
4186 *q = 0;
4187 fprintf (file, buf, va_arg (argptr, long));
4190 break;
4192 case 's':
4193 *q++ = c;
4194 *q = 0;
4195 fprintf (file, buf, va_arg (argptr, char *));
4196 break;
4198 case 'O':
4199 #ifdef ASM_OUTPUT_OPCODE
4200 ASM_OUTPUT_OPCODE (asm_out_file, p);
4201 #endif
4202 break;
4204 case 'R':
4205 #ifdef REGISTER_PREFIX
4206 fprintf (file, "%s", REGISTER_PREFIX);
4207 #endif
4208 break;
4210 case 'I':
4211 #ifdef IMMEDIATE_PREFIX
4212 fprintf (file, "%s", IMMEDIATE_PREFIX);
4213 #endif
4214 break;
4216 case 'L':
4217 #ifdef LOCAL_LABEL_PREFIX
4218 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4219 #endif
4220 break;
4222 case 'U':
4223 fputs (user_label_prefix, file);
4224 break;
4226 #ifdef ASM_FPRINTF_EXTENSIONS
4227 /* Uppercase letters are reserved for general use by asm_fprintf
4228 and so are not available to target specific code. In order to
4229 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4230 they are defined here. As they get turned into real extensions
4231 to asm_fprintf they should be removed from this list. */
4232 case 'A': case 'B': case 'C': case 'D': case 'E':
4233 case 'F': case 'G': case 'H': case 'J': case 'K':
4234 case 'M': case 'N': case 'P': case 'Q': case 'S':
4235 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4236 break;
4238 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4239 #endif
4240 default:
4241 gcc_unreachable ();
4243 break;
4245 default:
4246 putc (c, file);
4248 va_end (argptr);
4251 /* Return nonzero if this function has no function calls. */
4254 leaf_function_p (void)
4256 rtx_insn *insn;
4258 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4259 functions even if they call mcount. */
4260 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4261 return 0;
4263 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4265 if (CALL_P (insn)
4266 && ! SIBLING_CALL_P (insn))
4267 return 0;
4268 if (NONJUMP_INSN_P (insn)
4269 && GET_CODE (PATTERN (insn)) == SEQUENCE
4270 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4271 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4272 return 0;
4275 return 1;
4278 /* Return 1 if branch is a forward branch.
4279 Uses insn_shuid array, so it works only in the final pass. May be used by
4280 output templates to customary add branch prediction hints.
4283 final_forward_branch_p (rtx_insn *insn)
4285 int insn_id, label_id;
4287 gcc_assert (uid_shuid);
4288 insn_id = INSN_SHUID (insn);
4289 label_id = INSN_SHUID (JUMP_LABEL (insn));
4290 /* We've hit some insns that does not have id information available. */
4291 gcc_assert (insn_id && label_id);
4292 return insn_id < label_id;
4295 /* On some machines, a function with no call insns
4296 can run faster if it doesn't create its own register window.
4297 When output, the leaf function should use only the "output"
4298 registers. Ordinarily, the function would be compiled to use
4299 the "input" registers to find its arguments; it is a candidate
4300 for leaf treatment if it uses only the "input" registers.
4301 Leaf function treatment means renumbering so the function
4302 uses the "output" registers instead. */
4304 #ifdef LEAF_REGISTERS
4306 /* Return 1 if this function uses only the registers that can be
4307 safely renumbered. */
4310 only_leaf_regs_used (void)
4312 int i;
4313 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4315 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4316 if ((df_regs_ever_live_p (i) || global_regs[i])
4317 && ! permitted_reg_in_leaf_functions[i])
4318 return 0;
4320 if (crtl->uses_pic_offset_table
4321 && pic_offset_table_rtx != 0
4322 && REG_P (pic_offset_table_rtx)
4323 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4324 return 0;
4326 return 1;
4329 /* Scan all instructions and renumber all registers into those
4330 available in leaf functions. */
4332 static void
4333 leaf_renumber_regs (rtx_insn *first)
4335 rtx_insn *insn;
4337 /* Renumber only the actual patterns.
4338 The reg-notes can contain frame pointer refs,
4339 and renumbering them could crash, and should not be needed. */
4340 for (insn = first; insn; insn = NEXT_INSN (insn))
4341 if (INSN_P (insn))
4342 leaf_renumber_regs_insn (PATTERN (insn));
4345 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4346 available in leaf functions. */
4348 void
4349 leaf_renumber_regs_insn (rtx in_rtx)
4351 int i, j;
4352 const char *format_ptr;
4354 if (in_rtx == 0)
4355 return;
4357 /* Renumber all input-registers into output-registers.
4358 renumbered_regs would be 1 for an output-register;
4359 they */
4361 if (REG_P (in_rtx))
4363 int newreg;
4365 /* Don't renumber the same reg twice. */
4366 if (in_rtx->used)
4367 return;
4369 newreg = REGNO (in_rtx);
4370 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4371 to reach here as part of a REG_NOTE. */
4372 if (newreg >= FIRST_PSEUDO_REGISTER)
4374 in_rtx->used = 1;
4375 return;
4377 newreg = LEAF_REG_REMAP (newreg);
4378 gcc_assert (newreg >= 0);
4379 df_set_regs_ever_live (REGNO (in_rtx), false);
4380 df_set_regs_ever_live (newreg, true);
4381 SET_REGNO (in_rtx, newreg);
4382 in_rtx->used = 1;
4383 return;
4386 if (INSN_P (in_rtx))
4388 /* Inside a SEQUENCE, we find insns.
4389 Renumber just the patterns of these insns,
4390 just as we do for the top-level insns. */
4391 leaf_renumber_regs_insn (PATTERN (in_rtx));
4392 return;
4395 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4397 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4398 switch (*format_ptr++)
4400 case 'e':
4401 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4402 break;
4404 case 'E':
4405 if (NULL != XVEC (in_rtx, i))
4407 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4408 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4410 break;
4412 case 'S':
4413 case 's':
4414 case '0':
4415 case 'i':
4416 case 'w':
4417 case 'n':
4418 case 'u':
4419 break;
4421 default:
4422 gcc_unreachable ();
4425 #endif
4427 /* Turn the RTL into assembly. */
4428 static unsigned int
4429 rest_of_handle_final (void)
4431 const char *fnname = get_fnname_from_decl (current_function_decl);
4433 assemble_start_function (current_function_decl, fnname);
4434 final_start_function (get_insns (), asm_out_file, optimize);
4435 final (get_insns (), asm_out_file, optimize);
4436 if (flag_ipa_ra)
4437 collect_fn_hard_reg_usage ();
4438 final_end_function ();
4440 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4441 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4442 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4443 output_function_exception_table (fnname);
4445 assemble_end_function (current_function_decl, fnname);
4447 user_defined_section_attribute = false;
4449 /* Free up reg info memory. */
4450 free_reg_info ();
4452 if (! quiet_flag)
4453 fflush (asm_out_file);
4455 /* Write DBX symbols if requested. */
4457 /* Note that for those inline functions where we don't initially
4458 know for certain that we will be generating an out-of-line copy,
4459 the first invocation of this routine (rest_of_compilation) will
4460 skip over this code by doing a `goto exit_rest_of_compilation;'.
4461 Later on, wrapup_global_declarations will (indirectly) call
4462 rest_of_compilation again for those inline functions that need
4463 to have out-of-line copies generated. During that call, we
4464 *will* be routed past here. */
4466 timevar_push (TV_SYMOUT);
4467 if (!DECL_IGNORED_P (current_function_decl))
4468 debug_hooks->function_decl (current_function_decl);
4469 timevar_pop (TV_SYMOUT);
4471 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4472 DECL_INITIAL (current_function_decl) = error_mark_node;
4474 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4475 && targetm.have_ctors_dtors)
4476 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4477 decl_init_priority_lookup
4478 (current_function_decl));
4479 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4480 && targetm.have_ctors_dtors)
4481 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4482 decl_fini_priority_lookup
4483 (current_function_decl));
4484 return 0;
4487 namespace {
4489 const pass_data pass_data_final =
4491 RTL_PASS, /* type */
4492 "final", /* name */
4493 OPTGROUP_NONE, /* optinfo_flags */
4494 TV_FINAL, /* tv_id */
4495 0, /* properties_required */
4496 0, /* properties_provided */
4497 0, /* properties_destroyed */
4498 0, /* todo_flags_start */
4499 0, /* todo_flags_finish */
4502 class pass_final : public rtl_opt_pass
4504 public:
4505 pass_final (gcc::context *ctxt)
4506 : rtl_opt_pass (pass_data_final, ctxt)
4509 /* opt_pass methods: */
4510 virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4512 }; // class pass_final
4514 } // anon namespace
4516 rtl_opt_pass *
4517 make_pass_final (gcc::context *ctxt)
4519 return new pass_final (ctxt);
4523 static unsigned int
4524 rest_of_handle_shorten_branches (void)
4526 /* Shorten branches. */
4527 shorten_branches (get_insns ());
4528 return 0;
4531 namespace {
4533 const pass_data pass_data_shorten_branches =
4535 RTL_PASS, /* type */
4536 "shorten", /* name */
4537 OPTGROUP_NONE, /* optinfo_flags */
4538 TV_SHORTEN_BRANCH, /* tv_id */
4539 0, /* properties_required */
4540 0, /* properties_provided */
4541 0, /* properties_destroyed */
4542 0, /* todo_flags_start */
4543 0, /* todo_flags_finish */
4546 class pass_shorten_branches : public rtl_opt_pass
4548 public:
4549 pass_shorten_branches (gcc::context *ctxt)
4550 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4553 /* opt_pass methods: */
4554 virtual unsigned int execute (function *)
4556 return rest_of_handle_shorten_branches ();
4559 }; // class pass_shorten_branches
4561 } // anon namespace
4563 rtl_opt_pass *
4564 make_pass_shorten_branches (gcc::context *ctxt)
4566 return new pass_shorten_branches (ctxt);
4570 static unsigned int
4571 rest_of_clean_state (void)
4573 rtx_insn *insn, *next;
4574 FILE *final_output = NULL;
4575 int save_unnumbered = flag_dump_unnumbered;
4576 int save_noaddr = flag_dump_noaddr;
4578 if (flag_dump_final_insns)
4580 final_output = fopen (flag_dump_final_insns, "a");
4581 if (!final_output)
4583 error ("could not open final insn dump file %qs: %m",
4584 flag_dump_final_insns);
4585 flag_dump_final_insns = NULL;
4587 else
4589 flag_dump_noaddr = flag_dump_unnumbered = 1;
4590 if (flag_compare_debug_opt || flag_compare_debug)
4591 dump_flags |= TDF_NOUID;
4592 dump_function_header (final_output, current_function_decl,
4593 dump_flags);
4594 final_insns_dump_p = true;
4596 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4597 if (LABEL_P (insn))
4598 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4599 else
4601 if (NOTE_P (insn))
4602 set_block_for_insn (insn, NULL);
4603 INSN_UID (insn) = 0;
4608 /* It is very important to decompose the RTL instruction chain here:
4609 debug information keeps pointing into CODE_LABEL insns inside the function
4610 body. If these remain pointing to the other insns, we end up preserving
4611 whole RTL chain and attached detailed debug info in memory. */
4612 for (insn = get_insns (); insn; insn = next)
4614 next = NEXT_INSN (insn);
4615 SET_NEXT_INSN (insn) = NULL;
4616 SET_PREV_INSN (insn) = NULL;
4618 if (final_output
4619 && (!NOTE_P (insn) ||
4620 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4621 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4622 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4623 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4624 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4625 print_rtl_single (final_output, insn);
4628 if (final_output)
4630 flag_dump_noaddr = save_noaddr;
4631 flag_dump_unnumbered = save_unnumbered;
4632 final_insns_dump_p = false;
4634 if (fclose (final_output))
4636 error ("could not close final insn dump file %qs: %m",
4637 flag_dump_final_insns);
4638 flag_dump_final_insns = NULL;
4642 /* In case the function was not output,
4643 don't leave any temporary anonymous types
4644 queued up for sdb output. */
4645 if (SDB_DEBUGGING_INFO && write_symbols == SDB_DEBUG)
4646 sdbout_types (NULL_TREE);
4648 flag_rerun_cse_after_global_opts = 0;
4649 reload_completed = 0;
4650 epilogue_completed = 0;
4651 #ifdef STACK_REGS
4652 regstack_completed = 0;
4653 #endif
4655 /* Clear out the insn_length contents now that they are no
4656 longer valid. */
4657 init_insn_lengths ();
4659 /* Show no temporary slots allocated. */
4660 init_temp_slots ();
4662 free_bb_for_insn ();
4664 delete_tree_ssa (cfun);
4666 /* We can reduce stack alignment on call site only when we are sure that
4667 the function body just produced will be actually used in the final
4668 executable. */
4669 if (decl_binds_to_current_def_p (current_function_decl))
4671 unsigned int pref = crtl->preferred_stack_boundary;
4672 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4673 pref = crtl->stack_alignment_needed;
4674 cgraph_node::rtl_info (current_function_decl)
4675 ->preferred_incoming_stack_boundary = pref;
4678 /* Make sure volatile mem refs aren't considered valid operands for
4679 arithmetic insns. We must call this here if this is a nested inline
4680 function, since the above code leaves us in the init_recog state,
4681 and the function context push/pop code does not save/restore volatile_ok.
4683 ??? Maybe it isn't necessary for expand_start_function to call this
4684 anymore if we do it here? */
4686 init_recog_no_volatile ();
4688 /* We're done with this function. Free up memory if we can. */
4689 free_after_parsing (cfun);
4690 free_after_compilation (cfun);
4691 return 0;
4694 namespace {
4696 const pass_data pass_data_clean_state =
4698 RTL_PASS, /* type */
4699 "*clean_state", /* name */
4700 OPTGROUP_NONE, /* optinfo_flags */
4701 TV_FINAL, /* tv_id */
4702 0, /* properties_required */
4703 0, /* properties_provided */
4704 PROP_rtl, /* properties_destroyed */
4705 0, /* todo_flags_start */
4706 0, /* todo_flags_finish */
4709 class pass_clean_state : public rtl_opt_pass
4711 public:
4712 pass_clean_state (gcc::context *ctxt)
4713 : rtl_opt_pass (pass_data_clean_state, ctxt)
4716 /* opt_pass methods: */
4717 virtual unsigned int execute (function *)
4719 return rest_of_clean_state ();
4722 }; // class pass_clean_state
4724 } // anon namespace
4726 rtl_opt_pass *
4727 make_pass_clean_state (gcc::context *ctxt)
4729 return new pass_clean_state (ctxt);
4732 /* Return true if INSN is a call to the current function. */
4734 static bool
4735 self_recursive_call_p (rtx_insn *insn)
4737 tree fndecl = get_call_fndecl (insn);
4738 return (fndecl == current_function_decl
4739 && decl_binds_to_current_def_p (fndecl));
4742 /* Collect hard register usage for the current function. */
4744 static void
4745 collect_fn_hard_reg_usage (void)
4747 rtx_insn *insn;
4748 #ifdef STACK_REGS
4749 int i;
4750 #endif
4751 struct cgraph_rtl_info *node;
4752 HARD_REG_SET function_used_regs;
4754 /* ??? To be removed when all the ports have been fixed. */
4755 if (!targetm.call_fusage_contains_non_callee_clobbers)
4756 return;
4758 CLEAR_HARD_REG_SET (function_used_regs);
4760 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4762 HARD_REG_SET insn_used_regs;
4764 if (!NONDEBUG_INSN_P (insn))
4765 continue;
4767 if (CALL_P (insn)
4768 && !self_recursive_call_p (insn))
4770 if (!get_call_reg_set_usage (insn, &insn_used_regs,
4771 call_used_reg_set))
4772 return;
4774 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4777 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4778 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4781 /* Be conservative - mark fixed and global registers as used. */
4782 IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
4784 #ifdef STACK_REGS
4785 /* Handle STACK_REGS conservatively, since the df-framework does not
4786 provide accurate information for them. */
4788 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4789 SET_HARD_REG_BIT (function_used_regs, i);
4790 #endif
4792 /* The information we have gathered is only interesting if it exposes a
4793 register from the call_used_regs that is not used in this function. */
4794 if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
4795 return;
4797 node = cgraph_node::rtl_info (current_function_decl);
4798 gcc_assert (node != NULL);
4800 COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
4801 node->function_used_regs_valid = 1;
4804 /* Get the declaration of the function called by INSN. */
4806 static tree
4807 get_call_fndecl (rtx_insn *insn)
4809 rtx note, datum;
4811 note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
4812 if (note == NULL_RTX)
4813 return NULL_TREE;
4815 datum = XEXP (note, 0);
4816 if (datum != NULL_RTX)
4817 return SYMBOL_REF_DECL (datum);
4819 return NULL_TREE;
4822 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4823 call targets that can be overwritten. */
4825 static struct cgraph_rtl_info *
4826 get_call_cgraph_rtl_info (rtx_insn *insn)
4828 tree fndecl;
4830 if (insn == NULL_RTX)
4831 return NULL;
4833 fndecl = get_call_fndecl (insn);
4834 if (fndecl == NULL_TREE
4835 || !decl_binds_to_current_def_p (fndecl))
4836 return NULL;
4838 return cgraph_node::rtl_info (fndecl);
4841 /* Find hard registers used by function call instruction INSN, and return them
4842 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4844 bool
4845 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
4846 HARD_REG_SET default_set)
4848 if (flag_ipa_ra)
4850 struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
4851 if (node != NULL
4852 && node->function_used_regs_valid)
4854 COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
4855 AND_HARD_REG_SET (*reg_set, default_set);
4856 return true;
4860 COPY_HARD_REG_SET (*reg_set, default_set);
4861 return false;