Clean up some minor white space issues in trans-decl.c and trans-expr.c
[official-gcc.git] / gcc / final.c
blob7af6b612ec7d8e9aa29fe275315cc22e2b16b6ac
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "target.h"
50 #include "rtl.h"
51 #include "tree.h"
52 #include "cfghooks.h"
53 #include "df.h"
54 #include "tm_p.h"
55 #include "insn-config.h"
56 #include "regs.h"
57 #include "emit-rtl.h"
58 #include "recog.h"
59 #include "cgraph.h"
60 #include "tree-pretty-print.h" /* for dump_function_header */
61 #include "varasm.h"
62 #include "insn-attr.h"
63 #include "conditions.h"
64 #include "flags.h"
65 #include "output.h"
66 #include "except.h"
67 #include "rtl-error.h"
68 #include "toplev.h" /* exact_log2, floor_log2 */
69 #include "reload.h"
70 #include "intl.h"
71 #include "cfgrtl.h"
72 #include "debug.h"
73 #include "tree-pass.h"
74 #include "tree-ssa.h"
75 #include "cfgloop.h"
76 #include "params.h"
77 #include "asan.h"
78 #include "rtl-iter.h"
79 #include "print-rtl.h"
81 #ifdef XCOFF_DEBUGGING_INFO
82 #include "xcoffout.h" /* Needed for external data declarations. */
83 #endif
85 #include "dwarf2out.h"
87 #ifdef DBX_DEBUGGING_INFO
88 #include "dbxout.h"
89 #endif
91 #include "sdbout.h"
93 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
94 So define a null default for it to save conditionalization later. */
95 #ifndef CC_STATUS_INIT
96 #define CC_STATUS_INIT
97 #endif
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
102 #endif
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
108 /* Bitflags used by final_scan_insn. */
109 #define SEEN_NOTE 1
110 #define SEEN_EMITTED 2
112 /* Last insn processed by final_scan_insn. */
113 static rtx_insn *debug_insn;
114 rtx_insn *current_output_insn;
116 /* Line number of last NOTE. */
117 static int last_linenum;
119 /* Last discriminator written to assembly. */
120 static int last_discriminator;
122 /* Discriminator of current block. */
123 static int discriminator;
125 /* Highest line number in current block. */
126 static int high_block_linenum;
128 /* Likewise for function. */
129 static int high_function_linenum;
131 /* Filename of last NOTE. */
132 static const char *last_filename;
134 /* Override filename and line number. */
135 static const char *override_filename;
136 static int override_linenum;
138 /* Whether to force emission of a line note before the next insn. */
139 static bool force_source_line = false;
141 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
143 /* Nonzero while outputting an `asm' with operands.
144 This means that inconsistencies are the user's fault, so don't die.
145 The precise value is the insn being output, to pass to error_for_asm. */
146 const rtx_insn *this_is_asm_operands;
148 /* Number of operands of this insn, for an `asm' with operands. */
149 static unsigned int insn_noperands;
151 /* Compare optimization flag. */
153 static rtx last_ignored_compare = 0;
155 /* Assign a unique number to each insn that is output.
156 This can be used to generate unique local labels. */
158 static int insn_counter = 0;
160 /* This variable contains machine-dependent flags (defined in tm.h)
161 set and examined by output routines
162 that describe how to interpret the condition codes properly. */
164 CC_STATUS cc_status;
166 /* During output of an insn, this contains a copy of cc_status
167 from before the insn. */
169 CC_STATUS cc_prev_status;
171 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
173 static int block_depth;
175 /* Nonzero if have enabled APP processing of our assembler output. */
177 static int app_on;
179 /* If we are outputting an insn sequence, this contains the sequence rtx.
180 Zero otherwise. */
182 rtx_sequence *final_sequence;
184 #ifdef ASSEMBLER_DIALECT
186 /* Number of the assembler dialect to use, starting at 0. */
187 static int dialect_number;
188 #endif
190 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
191 rtx current_insn_predicate;
193 /* True if printing into -fdump-final-insns= dump. */
194 bool final_insns_dump_p;
196 /* True if profile_function should be called, but hasn't been called yet. */
197 static bool need_profile_function;
199 static int asm_insn_count (rtx);
200 static void profile_function (FILE *);
201 static void profile_after_prologue (FILE *);
202 static bool notice_source_line (rtx_insn *, bool *);
203 static rtx walk_alter_subreg (rtx *, bool *);
204 static void output_asm_name (void);
205 static void output_alternate_entry_point (FILE *, rtx_insn *);
206 static tree get_mem_expr_from_op (rtx, int *);
207 static void output_asm_operand_names (rtx *, int *, int);
208 #ifdef LEAF_REGISTERS
209 static void leaf_renumber_regs (rtx_insn *);
210 #endif
211 #if HAVE_cc0
212 static int alter_cond (rtx);
213 #endif
214 #ifndef ADDR_VEC_ALIGN
215 static int final_addr_vec_align (rtx);
216 #endif
217 static int align_fuzz (rtx, rtx, int, unsigned);
218 static void collect_fn_hard_reg_usage (void);
219 static tree get_call_fndecl (rtx_insn *);
221 /* Initialize data in final at the beginning of a compilation. */
223 void
224 init_final (const char *filename ATTRIBUTE_UNUSED)
226 app_on = 0;
227 final_sequence = 0;
229 #ifdef ASSEMBLER_DIALECT
230 dialect_number = ASSEMBLER_DIALECT;
231 #endif
234 /* Default target function prologue and epilogue assembler output.
236 If not overridden for epilogue code, then the function body itself
237 contains return instructions wherever needed. */
238 void
239 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
240 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
244 void
245 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
246 tree decl ATTRIBUTE_UNUSED,
247 bool new_is_cold ATTRIBUTE_UNUSED)
251 /* Default target hook that outputs nothing to a stream. */
252 void
253 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
257 /* Enable APP processing of subsequent output.
258 Used before the output from an `asm' statement. */
260 void
261 app_enable (void)
263 if (! app_on)
265 fputs (ASM_APP_ON, asm_out_file);
266 app_on = 1;
270 /* Disable APP processing of subsequent output.
271 Called from varasm.c before most kinds of output. */
273 void
274 app_disable (void)
276 if (app_on)
278 fputs (ASM_APP_OFF, asm_out_file);
279 app_on = 0;
283 /* Return the number of slots filled in the current
284 delayed branch sequence (we don't count the insn needing the
285 delay slot). Zero if not in a delayed branch sequence. */
288 dbr_sequence_length (void)
290 if (final_sequence != 0)
291 return XVECLEN (final_sequence, 0) - 1;
292 else
293 return 0;
296 /* The next two pages contain routines used to compute the length of an insn
297 and to shorten branches. */
299 /* Arrays for insn lengths, and addresses. The latter is referenced by
300 `insn_current_length'. */
302 static int *insn_lengths;
304 vec<int> insn_addresses_;
306 /* Max uid for which the above arrays are valid. */
307 static int insn_lengths_max_uid;
309 /* Address of insn being processed. Used by `insn_current_length'. */
310 int insn_current_address;
312 /* Address of insn being processed in previous iteration. */
313 int insn_last_address;
315 /* known invariant alignment of insn being processed. */
316 int insn_current_align;
318 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
319 gives the next following alignment insn that increases the known
320 alignment, or NULL_RTX if there is no such insn.
321 For any alignment obtained this way, we can again index uid_align with
322 its uid to obtain the next following align that in turn increases the
323 alignment, till we reach NULL_RTX; the sequence obtained this way
324 for each insn we'll call the alignment chain of this insn in the following
325 comments. */
327 struct label_alignment
329 short alignment;
330 short max_skip;
333 static rtx *uid_align;
334 static int *uid_shuid;
335 static struct label_alignment *label_align;
337 /* Indicate that branch shortening hasn't yet been done. */
339 void
340 init_insn_lengths (void)
342 if (uid_shuid)
344 free (uid_shuid);
345 uid_shuid = 0;
347 if (insn_lengths)
349 free (insn_lengths);
350 insn_lengths = 0;
351 insn_lengths_max_uid = 0;
353 if (HAVE_ATTR_length)
354 INSN_ADDRESSES_FREE ();
355 if (uid_align)
357 free (uid_align);
358 uid_align = 0;
362 /* Obtain the current length of an insn. If branch shortening has been done,
363 get its actual length. Otherwise, use FALLBACK_FN to calculate the
364 length. */
365 static int
366 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
368 rtx body;
369 int i;
370 int length = 0;
372 if (!HAVE_ATTR_length)
373 return 0;
375 if (insn_lengths_max_uid > INSN_UID (insn))
376 return insn_lengths[INSN_UID (insn)];
377 else
378 switch (GET_CODE (insn))
380 case NOTE:
381 case BARRIER:
382 case CODE_LABEL:
383 case DEBUG_INSN:
384 return 0;
386 case CALL_INSN:
387 case JUMP_INSN:
388 length = fallback_fn (insn);
389 break;
391 case INSN:
392 body = PATTERN (insn);
393 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
394 return 0;
396 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
397 length = asm_insn_count (body) * fallback_fn (insn);
398 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
399 for (i = 0; i < seq->len (); i++)
400 length += get_attr_length_1 (seq->insn (i), fallback_fn);
401 else
402 length = fallback_fn (insn);
403 break;
405 default:
406 break;
409 #ifdef ADJUST_INSN_LENGTH
410 ADJUST_INSN_LENGTH (insn, length);
411 #endif
412 return length;
415 /* Obtain the current length of an insn. If branch shortening has been done,
416 get its actual length. Otherwise, get its maximum length. */
418 get_attr_length (rtx_insn *insn)
420 return get_attr_length_1 (insn, insn_default_length);
423 /* Obtain the current length of an insn. If branch shortening has been done,
424 get its actual length. Otherwise, get its minimum length. */
426 get_attr_min_length (rtx_insn *insn)
428 return get_attr_length_1 (insn, insn_min_length);
431 /* Code to handle alignment inside shorten_branches. */
433 /* Here is an explanation how the algorithm in align_fuzz can give
434 proper results:
436 Call a sequence of instructions beginning with alignment point X
437 and continuing until the next alignment point `block X'. When `X'
438 is used in an expression, it means the alignment value of the
439 alignment point.
441 Call the distance between the start of the first insn of block X, and
442 the end of the last insn of block X `IX', for the `inner size of X'.
443 This is clearly the sum of the instruction lengths.
445 Likewise with the next alignment-delimited block following X, which we
446 shall call block Y.
448 Call the distance between the start of the first insn of block X, and
449 the start of the first insn of block Y `OX', for the `outer size of X'.
451 The estimated padding is then OX - IX.
453 OX can be safely estimated as
455 if (X >= Y)
456 OX = round_up(IX, Y)
457 else
458 OX = round_up(IX, X) + Y - X
460 Clearly est(IX) >= real(IX), because that only depends on the
461 instruction lengths, and those being overestimated is a given.
463 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
464 we needn't worry about that when thinking about OX.
466 When X >= Y, the alignment provided by Y adds no uncertainty factor
467 for branch ranges starting before X, so we can just round what we have.
468 But when X < Y, we don't know anything about the, so to speak,
469 `middle bits', so we have to assume the worst when aligning up from an
470 address mod X to one mod Y, which is Y - X. */
472 #ifndef LABEL_ALIGN
473 #define LABEL_ALIGN(LABEL) align_labels_log
474 #endif
476 #ifndef LOOP_ALIGN
477 #define LOOP_ALIGN(LABEL) align_loops_log
478 #endif
480 #ifndef LABEL_ALIGN_AFTER_BARRIER
481 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
482 #endif
484 #ifndef JUMP_ALIGN
485 #define JUMP_ALIGN(LABEL) align_jumps_log
486 #endif
489 default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
491 return 0;
495 default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
497 return align_loops_max_skip;
501 default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
503 return align_labels_max_skip;
507 default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
509 return align_jumps_max_skip;
512 #ifndef ADDR_VEC_ALIGN
513 static int
514 final_addr_vec_align (rtx addr_vec)
516 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
518 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
519 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
520 return exact_log2 (align);
524 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
525 #endif
527 #ifndef INSN_LENGTH_ALIGNMENT
528 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
529 #endif
531 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
533 static int min_labelno, max_labelno;
535 #define LABEL_TO_ALIGNMENT(LABEL) \
536 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
538 #define LABEL_TO_MAX_SKIP(LABEL) \
539 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
541 /* For the benefit of port specific code do this also as a function. */
544 label_to_alignment (rtx label)
546 if (CODE_LABEL_NUMBER (label) <= max_labelno)
547 return LABEL_TO_ALIGNMENT (label);
548 return 0;
552 label_to_max_skip (rtx label)
554 if (CODE_LABEL_NUMBER (label) <= max_labelno)
555 return LABEL_TO_MAX_SKIP (label);
556 return 0;
559 /* The differences in addresses
560 between a branch and its target might grow or shrink depending on
561 the alignment the start insn of the range (the branch for a forward
562 branch or the label for a backward branch) starts out on; if these
563 differences are used naively, they can even oscillate infinitely.
564 We therefore want to compute a 'worst case' address difference that
565 is independent of the alignment the start insn of the range end
566 up on, and that is at least as large as the actual difference.
567 The function align_fuzz calculates the amount we have to add to the
568 naively computed difference, by traversing the part of the alignment
569 chain of the start insn of the range that is in front of the end insn
570 of the range, and considering for each alignment the maximum amount
571 that it might contribute to a size increase.
573 For casesi tables, we also want to know worst case minimum amounts of
574 address difference, in case a machine description wants to introduce
575 some common offset that is added to all offsets in a table.
576 For this purpose, align_fuzz with a growth argument of 0 computes the
577 appropriate adjustment. */
579 /* Compute the maximum delta by which the difference of the addresses of
580 START and END might grow / shrink due to a different address for start
581 which changes the size of alignment insns between START and END.
582 KNOWN_ALIGN_LOG is the alignment known for START.
583 GROWTH should be ~0 if the objective is to compute potential code size
584 increase, and 0 if the objective is to compute potential shrink.
585 The return value is undefined for any other value of GROWTH. */
587 static int
588 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
590 int uid = INSN_UID (start);
591 rtx align_label;
592 int known_align = 1 << known_align_log;
593 int end_shuid = INSN_SHUID (end);
594 int fuzz = 0;
596 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
598 int align_addr, new_align;
600 uid = INSN_UID (align_label);
601 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
602 if (uid_shuid[uid] > end_shuid)
603 break;
604 known_align_log = LABEL_TO_ALIGNMENT (align_label);
605 new_align = 1 << known_align_log;
606 if (new_align < known_align)
607 continue;
608 fuzz += (-align_addr ^ growth) & (new_align - known_align);
609 known_align = new_align;
611 return fuzz;
614 /* Compute a worst-case reference address of a branch so that it
615 can be safely used in the presence of aligned labels. Since the
616 size of the branch itself is unknown, the size of the branch is
617 not included in the range. I.e. for a forward branch, the reference
618 address is the end address of the branch as known from the previous
619 branch shortening pass, minus a value to account for possible size
620 increase due to alignment. For a backward branch, it is the start
621 address of the branch as known from the current pass, plus a value
622 to account for possible size increase due to alignment.
623 NB.: Therefore, the maximum offset allowed for backward branches needs
624 to exclude the branch size. */
627 insn_current_reference_address (rtx_insn *branch)
629 rtx dest;
630 int seq_uid;
632 if (! INSN_ADDRESSES_SET_P ())
633 return 0;
635 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
636 seq_uid = INSN_UID (seq);
637 if (!JUMP_P (branch))
638 /* This can happen for example on the PA; the objective is to know the
639 offset to address something in front of the start of the function.
640 Thus, we can treat it like a backward branch.
641 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
642 any alignment we'd encounter, so we skip the call to align_fuzz. */
643 return insn_current_address;
644 dest = JUMP_LABEL (branch);
646 /* BRANCH has no proper alignment chain set, so use SEQ.
647 BRANCH also has no INSN_SHUID. */
648 if (INSN_SHUID (seq) < INSN_SHUID (dest))
650 /* Forward branch. */
651 return (insn_last_address + insn_lengths[seq_uid]
652 - align_fuzz (seq, dest, length_unit_log, ~0));
654 else
656 /* Backward branch. */
657 return (insn_current_address
658 + align_fuzz (dest, seq, length_unit_log, ~0));
662 /* Compute branch alignments based on frequency information in the
663 CFG. */
665 unsigned int
666 compute_alignments (void)
668 int log, max_skip, max_log;
669 basic_block bb;
670 int freq_max = 0;
671 int freq_threshold = 0;
673 if (label_align)
675 free (label_align);
676 label_align = 0;
679 max_labelno = max_label_num ();
680 min_labelno = get_first_label_num ();
681 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
683 /* If not optimizing or optimizing for size, don't assign any alignments. */
684 if (! optimize || optimize_function_for_size_p (cfun))
685 return 0;
687 if (dump_file)
689 dump_reg_info (dump_file);
690 dump_flow_info (dump_file, TDF_DETAILS);
691 flow_loops_dump (dump_file, NULL, 1);
693 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
694 FOR_EACH_BB_FN (bb, cfun)
695 if (bb->frequency > freq_max)
696 freq_max = bb->frequency;
697 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
699 if (dump_file)
700 fprintf (dump_file, "freq_max: %i\n",freq_max);
701 FOR_EACH_BB_FN (bb, cfun)
703 rtx_insn *label = BB_HEAD (bb);
704 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
705 edge e;
706 edge_iterator ei;
708 if (!LABEL_P (label)
709 || optimize_bb_for_size_p (bb))
711 if (dump_file)
712 fprintf (dump_file,
713 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
714 bb->index, bb->frequency, bb->loop_father->num,
715 bb_loop_depth (bb));
716 continue;
718 max_log = LABEL_ALIGN (label);
719 max_skip = targetm.asm_out.label_align_max_skip (label);
721 FOR_EACH_EDGE (e, ei, bb->preds)
723 if (e->flags & EDGE_FALLTHRU)
724 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
725 else
726 branch_frequency += EDGE_FREQUENCY (e);
728 if (dump_file)
730 fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
731 " %2i fall %4i branch %4i",
732 bb->index, bb->frequency, bb->loop_father->num,
733 bb_loop_depth (bb),
734 fallthru_frequency, branch_frequency);
735 if (!bb->loop_father->inner && bb->loop_father->num)
736 fprintf (dump_file, " inner_loop");
737 if (bb->loop_father->header == bb)
738 fprintf (dump_file, " loop_header");
739 fprintf (dump_file, "\n");
742 /* There are two purposes to align block with no fallthru incoming edge:
743 1) to avoid fetch stalls when branch destination is near cache boundary
744 2) to improve cache efficiency in case the previous block is not executed
745 (so it does not need to be in the cache).
747 We to catch first case, we align frequently executed blocks.
748 To catch the second, we align blocks that are executed more frequently
749 than the predecessor and the predecessor is likely to not be executed
750 when function is called. */
752 if (!has_fallthru
753 && (branch_frequency > freq_threshold
754 || (bb->frequency > bb->prev_bb->frequency * 10
755 && (bb->prev_bb->frequency
756 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
758 log = JUMP_ALIGN (label);
759 if (dump_file)
760 fprintf (dump_file, " jump alignment added.\n");
761 if (max_log < log)
763 max_log = log;
764 max_skip = targetm.asm_out.jump_align_max_skip (label);
767 /* In case block is frequent and reached mostly by non-fallthru edge,
768 align it. It is most likely a first block of loop. */
769 if (has_fallthru
770 && !(single_succ_p (bb)
771 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
772 && optimize_bb_for_speed_p (bb)
773 && branch_frequency + fallthru_frequency > freq_threshold
774 && (branch_frequency
775 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
777 log = LOOP_ALIGN (label);
778 if (dump_file)
779 fprintf (dump_file, " internal loop alignment added.\n");
780 if (max_log < log)
782 max_log = log;
783 max_skip = targetm.asm_out.loop_align_max_skip (label);
786 LABEL_TO_ALIGNMENT (label) = max_log;
787 LABEL_TO_MAX_SKIP (label) = max_skip;
790 loop_optimizer_finalize ();
791 free_dominance_info (CDI_DOMINATORS);
792 return 0;
795 /* Grow the LABEL_ALIGN array after new labels are created. */
797 static void
798 grow_label_align (void)
800 int old = max_labelno;
801 int n_labels;
802 int n_old_labels;
804 max_labelno = max_label_num ();
806 n_labels = max_labelno - min_labelno + 1;
807 n_old_labels = old - min_labelno + 1;
809 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
811 /* Range of labels grows monotonically in the function. Failing here
812 means that the initialization of array got lost. */
813 gcc_assert (n_old_labels <= n_labels);
815 memset (label_align + n_old_labels, 0,
816 (n_labels - n_old_labels) * sizeof (struct label_alignment));
819 /* Update the already computed alignment information. LABEL_PAIRS is a vector
820 made up of pairs of labels for which the alignment information of the first
821 element will be copied from that of the second element. */
823 void
824 update_alignments (vec<rtx> &label_pairs)
826 unsigned int i = 0;
827 rtx iter, label = NULL_RTX;
829 if (max_labelno != max_label_num ())
830 grow_label_align ();
832 FOR_EACH_VEC_ELT (label_pairs, i, iter)
833 if (i & 1)
835 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
836 LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
838 else
839 label = iter;
842 namespace {
844 const pass_data pass_data_compute_alignments =
846 RTL_PASS, /* type */
847 "alignments", /* name */
848 OPTGROUP_NONE, /* optinfo_flags */
849 TV_NONE, /* tv_id */
850 0, /* properties_required */
851 0, /* properties_provided */
852 0, /* properties_destroyed */
853 0, /* todo_flags_start */
854 0, /* todo_flags_finish */
857 class pass_compute_alignments : public rtl_opt_pass
859 public:
860 pass_compute_alignments (gcc::context *ctxt)
861 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
864 /* opt_pass methods: */
865 virtual unsigned int execute (function *) { return compute_alignments (); }
867 }; // class pass_compute_alignments
869 } // anon namespace
871 rtl_opt_pass *
872 make_pass_compute_alignments (gcc::context *ctxt)
874 return new pass_compute_alignments (ctxt);
878 /* Make a pass over all insns and compute their actual lengths by shortening
879 any branches of variable length if possible. */
881 /* shorten_branches might be called multiple times: for example, the SH
882 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
883 In order to do this, it needs proper length information, which it obtains
884 by calling shorten_branches. This cannot be collapsed with
885 shorten_branches itself into a single pass unless we also want to integrate
886 reorg.c, since the branch splitting exposes new instructions with delay
887 slots. */
889 void
890 shorten_branches (rtx_insn *first)
892 rtx_insn *insn;
893 int max_uid;
894 int i;
895 int max_log;
896 int max_skip;
897 #define MAX_CODE_ALIGN 16
898 rtx_insn *seq;
899 int something_changed = 1;
900 char *varying_length;
901 rtx body;
902 int uid;
903 rtx align_tab[MAX_CODE_ALIGN];
905 /* Compute maximum UID and allocate label_align / uid_shuid. */
906 max_uid = get_max_uid ();
908 /* Free uid_shuid before reallocating it. */
909 free (uid_shuid);
911 uid_shuid = XNEWVEC (int, max_uid);
913 if (max_labelno != max_label_num ())
914 grow_label_align ();
916 /* Initialize label_align and set up uid_shuid to be strictly
917 monotonically rising with insn order. */
918 /* We use max_log here to keep track of the maximum alignment we want to
919 impose on the next CODE_LABEL (or the current one if we are processing
920 the CODE_LABEL itself). */
922 max_log = 0;
923 max_skip = 0;
925 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
927 int log;
929 INSN_SHUID (insn) = i++;
930 if (INSN_P (insn))
931 continue;
933 if (LABEL_P (insn))
935 rtx_insn *next;
936 bool next_is_jumptable;
938 /* Merge in alignments computed by compute_alignments. */
939 log = LABEL_TO_ALIGNMENT (insn);
940 if (max_log < log)
942 max_log = log;
943 max_skip = LABEL_TO_MAX_SKIP (insn);
946 next = next_nonnote_insn (insn);
947 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
948 if (!next_is_jumptable)
950 log = LABEL_ALIGN (insn);
951 if (max_log < log)
953 max_log = log;
954 max_skip = targetm.asm_out.label_align_max_skip (insn);
957 /* ADDR_VECs only take room if read-only data goes into the text
958 section. */
959 if ((JUMP_TABLES_IN_TEXT_SECTION
960 || readonly_data_section == text_section)
961 && next_is_jumptable)
963 log = ADDR_VEC_ALIGN (next);
964 if (max_log < log)
966 max_log = log;
967 max_skip = targetm.asm_out.label_align_max_skip (insn);
970 LABEL_TO_ALIGNMENT (insn) = max_log;
971 LABEL_TO_MAX_SKIP (insn) = max_skip;
972 max_log = 0;
973 max_skip = 0;
975 else if (BARRIER_P (insn))
977 rtx_insn *label;
979 for (label = insn; label && ! INSN_P (label);
980 label = NEXT_INSN (label))
981 if (LABEL_P (label))
983 log = LABEL_ALIGN_AFTER_BARRIER (insn);
984 if (max_log < log)
986 max_log = log;
987 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
989 break;
993 if (!HAVE_ATTR_length)
994 return;
996 /* Allocate the rest of the arrays. */
997 insn_lengths = XNEWVEC (int, max_uid);
998 insn_lengths_max_uid = max_uid;
999 /* Syntax errors can lead to labels being outside of the main insn stream.
1000 Initialize insn_addresses, so that we get reproducible results. */
1001 INSN_ADDRESSES_ALLOC (max_uid);
1003 varying_length = XCNEWVEC (char, max_uid);
1005 /* Initialize uid_align. We scan instructions
1006 from end to start, and keep in align_tab[n] the last seen insn
1007 that does an alignment of at least n+1, i.e. the successor
1008 in the alignment chain for an insn that does / has a known
1009 alignment of n. */
1010 uid_align = XCNEWVEC (rtx, max_uid);
1012 for (i = MAX_CODE_ALIGN; --i >= 0;)
1013 align_tab[i] = NULL_RTX;
1014 seq = get_last_insn ();
1015 for (; seq; seq = PREV_INSN (seq))
1017 int uid = INSN_UID (seq);
1018 int log;
1019 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1020 uid_align[uid] = align_tab[0];
1021 if (log)
1023 /* Found an alignment label. */
1024 uid_align[uid] = align_tab[log];
1025 for (i = log - 1; i >= 0; i--)
1026 align_tab[i] = seq;
1030 /* When optimizing, we start assuming minimum length, and keep increasing
1031 lengths as we find the need for this, till nothing changes.
1032 When not optimizing, we start assuming maximum lengths, and
1033 do a single pass to update the lengths. */
1034 bool increasing = optimize != 0;
1036 #ifdef CASE_VECTOR_SHORTEN_MODE
1037 if (optimize)
1039 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1040 label fields. */
1042 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1043 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1044 int rel;
1046 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1048 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1049 int len, i, min, max, insn_shuid;
1050 int min_align;
1051 addr_diff_vec_flags flags;
1053 if (! JUMP_TABLE_DATA_P (insn)
1054 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1055 continue;
1056 pat = PATTERN (insn);
1057 len = XVECLEN (pat, 1);
1058 gcc_assert (len > 0);
1059 min_align = MAX_CODE_ALIGN;
1060 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1062 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1063 int shuid = INSN_SHUID (lab);
1064 if (shuid < min)
1066 min = shuid;
1067 min_lab = lab;
1069 if (shuid > max)
1071 max = shuid;
1072 max_lab = lab;
1074 if (min_align > LABEL_TO_ALIGNMENT (lab))
1075 min_align = LABEL_TO_ALIGNMENT (lab);
1077 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1078 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1079 insn_shuid = INSN_SHUID (insn);
1080 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1081 memset (&flags, 0, sizeof (flags));
1082 flags.min_align = min_align;
1083 flags.base_after_vec = rel > insn_shuid;
1084 flags.min_after_vec = min > insn_shuid;
1085 flags.max_after_vec = max > insn_shuid;
1086 flags.min_after_base = min > rel;
1087 flags.max_after_base = max > rel;
1088 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1090 if (increasing)
1091 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1094 #endif /* CASE_VECTOR_SHORTEN_MODE */
1096 /* Compute initial lengths, addresses, and varying flags for each insn. */
1097 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1099 for (insn_current_address = 0, insn = first;
1100 insn != 0;
1101 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1103 uid = INSN_UID (insn);
1105 insn_lengths[uid] = 0;
1107 if (LABEL_P (insn))
1109 int log = LABEL_TO_ALIGNMENT (insn);
1110 if (log)
1112 int align = 1 << log;
1113 int new_address = (insn_current_address + align - 1) & -align;
1114 insn_lengths[uid] = new_address - insn_current_address;
1118 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1120 if (NOTE_P (insn) || BARRIER_P (insn)
1121 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1122 continue;
1123 if (insn->deleted ())
1124 continue;
1126 body = PATTERN (insn);
1127 if (JUMP_TABLE_DATA_P (insn))
1129 /* This only takes room if read-only data goes into the text
1130 section. */
1131 if (JUMP_TABLES_IN_TEXT_SECTION
1132 || readonly_data_section == text_section)
1133 insn_lengths[uid] = (XVECLEN (body,
1134 GET_CODE (body) == ADDR_DIFF_VEC)
1135 * GET_MODE_SIZE (GET_MODE (body)));
1136 /* Alignment is handled by ADDR_VEC_ALIGN. */
1138 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1139 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1140 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1142 int i;
1143 int const_delay_slots;
1144 if (DELAY_SLOTS)
1145 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1146 else
1147 const_delay_slots = 0;
1149 int (*inner_length_fun) (rtx_insn *)
1150 = const_delay_slots ? length_fun : insn_default_length;
1151 /* Inside a delay slot sequence, we do not do any branch shortening
1152 if the shortening could change the number of delay slots
1153 of the branch. */
1154 for (i = 0; i < body_seq->len (); i++)
1156 rtx_insn *inner_insn = body_seq->insn (i);
1157 int inner_uid = INSN_UID (inner_insn);
1158 int inner_length;
1160 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1161 || asm_noperands (PATTERN (inner_insn)) >= 0)
1162 inner_length = (asm_insn_count (PATTERN (inner_insn))
1163 * insn_default_length (inner_insn));
1164 else
1165 inner_length = inner_length_fun (inner_insn);
1167 insn_lengths[inner_uid] = inner_length;
1168 if (const_delay_slots)
1170 if ((varying_length[inner_uid]
1171 = insn_variable_length_p (inner_insn)) != 0)
1172 varying_length[uid] = 1;
1173 INSN_ADDRESSES (inner_uid) = (insn_current_address
1174 + insn_lengths[uid]);
1176 else
1177 varying_length[inner_uid] = 0;
1178 insn_lengths[uid] += inner_length;
1181 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1183 insn_lengths[uid] = length_fun (insn);
1184 varying_length[uid] = insn_variable_length_p (insn);
1187 /* If needed, do any adjustment. */
1188 #ifdef ADJUST_INSN_LENGTH
1189 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1190 if (insn_lengths[uid] < 0)
1191 fatal_insn ("negative insn length", insn);
1192 #endif
1195 /* Now loop over all the insns finding varying length insns. For each,
1196 get the current insn length. If it has changed, reflect the change.
1197 When nothing changes for a full pass, we are done. */
1199 while (something_changed)
1201 something_changed = 0;
1202 insn_current_align = MAX_CODE_ALIGN - 1;
1203 for (insn_current_address = 0, insn = first;
1204 insn != 0;
1205 insn = NEXT_INSN (insn))
1207 int new_length;
1208 #ifdef ADJUST_INSN_LENGTH
1209 int tmp_length;
1210 #endif
1211 int length_align;
1213 uid = INSN_UID (insn);
1215 if (LABEL_P (insn))
1217 int log = LABEL_TO_ALIGNMENT (insn);
1219 #ifdef CASE_VECTOR_SHORTEN_MODE
1220 /* If the mode of a following jump table was changed, we
1221 may need to update the alignment of this label. */
1222 rtx_insn *next;
1223 bool next_is_jumptable;
1225 next = next_nonnote_insn (insn);
1226 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1227 if ((JUMP_TABLES_IN_TEXT_SECTION
1228 || readonly_data_section == text_section)
1229 && next_is_jumptable)
1231 int newlog = ADDR_VEC_ALIGN (next);
1232 if (newlog != log)
1234 log = newlog;
1235 LABEL_TO_ALIGNMENT (insn) = log;
1236 something_changed = 1;
1239 #endif
1241 if (log > insn_current_align)
1243 int align = 1 << log;
1244 int new_address= (insn_current_address + align - 1) & -align;
1245 insn_lengths[uid] = new_address - insn_current_address;
1246 insn_current_align = log;
1247 insn_current_address = new_address;
1249 else
1250 insn_lengths[uid] = 0;
1251 INSN_ADDRESSES (uid) = insn_current_address;
1252 continue;
1255 length_align = INSN_LENGTH_ALIGNMENT (insn);
1256 if (length_align < insn_current_align)
1257 insn_current_align = length_align;
1259 insn_last_address = INSN_ADDRESSES (uid);
1260 INSN_ADDRESSES (uid) = insn_current_address;
1262 #ifdef CASE_VECTOR_SHORTEN_MODE
1263 if (optimize
1264 && JUMP_TABLE_DATA_P (insn)
1265 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1267 rtx body = PATTERN (insn);
1268 int old_length = insn_lengths[uid];
1269 rtx_insn *rel_lab =
1270 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1271 rtx min_lab = XEXP (XEXP (body, 2), 0);
1272 rtx max_lab = XEXP (XEXP (body, 3), 0);
1273 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1274 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1275 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1276 rtx_insn *prev;
1277 int rel_align = 0;
1278 addr_diff_vec_flags flags;
1279 machine_mode vec_mode;
1281 /* Avoid automatic aggregate initialization. */
1282 flags = ADDR_DIFF_VEC_FLAGS (body);
1284 /* Try to find a known alignment for rel_lab. */
1285 for (prev = rel_lab;
1286 prev
1287 && ! insn_lengths[INSN_UID (prev)]
1288 && ! (varying_length[INSN_UID (prev)] & 1);
1289 prev = PREV_INSN (prev))
1290 if (varying_length[INSN_UID (prev)] & 2)
1292 rel_align = LABEL_TO_ALIGNMENT (prev);
1293 break;
1296 /* See the comment on addr_diff_vec_flags in rtl.h for the
1297 meaning of the flags values. base: REL_LAB vec: INSN */
1298 /* Anything after INSN has still addresses from the last
1299 pass; adjust these so that they reflect our current
1300 estimate for this pass. */
1301 if (flags.base_after_vec)
1302 rel_addr += insn_current_address - insn_last_address;
1303 if (flags.min_after_vec)
1304 min_addr += insn_current_address - insn_last_address;
1305 if (flags.max_after_vec)
1306 max_addr += insn_current_address - insn_last_address;
1307 /* We want to know the worst case, i.e. lowest possible value
1308 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1309 its offset is positive, and we have to be wary of code shrink;
1310 otherwise, it is negative, and we have to be vary of code
1311 size increase. */
1312 if (flags.min_after_base)
1314 /* If INSN is between REL_LAB and MIN_LAB, the size
1315 changes we are about to make can change the alignment
1316 within the observed offset, therefore we have to break
1317 it up into two parts that are independent. */
1318 if (! flags.base_after_vec && flags.min_after_vec)
1320 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1321 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1323 else
1324 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1326 else
1328 if (flags.base_after_vec && ! flags.min_after_vec)
1330 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1331 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1333 else
1334 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1336 /* Likewise, determine the highest lowest possible value
1337 for the offset of MAX_LAB. */
1338 if (flags.max_after_base)
1340 if (! flags.base_after_vec && flags.max_after_vec)
1342 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1343 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1345 else
1346 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1348 else
1350 if (flags.base_after_vec && ! flags.max_after_vec)
1352 max_addr += align_fuzz (max_lab, insn, 0, 0);
1353 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1355 else
1356 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1358 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1359 max_addr - rel_addr, body);
1360 if (!increasing
1361 || (GET_MODE_SIZE (vec_mode)
1362 >= GET_MODE_SIZE (GET_MODE (body))))
1363 PUT_MODE (body, vec_mode);
1364 if (JUMP_TABLES_IN_TEXT_SECTION
1365 || readonly_data_section == text_section)
1367 insn_lengths[uid]
1368 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1369 insn_current_address += insn_lengths[uid];
1370 if (insn_lengths[uid] != old_length)
1371 something_changed = 1;
1374 continue;
1376 #endif /* CASE_VECTOR_SHORTEN_MODE */
1378 if (! (varying_length[uid]))
1380 if (NONJUMP_INSN_P (insn)
1381 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1383 int i;
1385 body = PATTERN (insn);
1386 for (i = 0; i < XVECLEN (body, 0); i++)
1388 rtx inner_insn = XVECEXP (body, 0, i);
1389 int inner_uid = INSN_UID (inner_insn);
1391 INSN_ADDRESSES (inner_uid) = insn_current_address;
1393 insn_current_address += insn_lengths[inner_uid];
1396 else
1397 insn_current_address += insn_lengths[uid];
1399 continue;
1402 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1404 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1405 int i;
1407 body = PATTERN (insn);
1408 new_length = 0;
1409 for (i = 0; i < seqn->len (); i++)
1411 rtx_insn *inner_insn = seqn->insn (i);
1412 int inner_uid = INSN_UID (inner_insn);
1413 int inner_length;
1415 INSN_ADDRESSES (inner_uid) = insn_current_address;
1417 /* insn_current_length returns 0 for insns with a
1418 non-varying length. */
1419 if (! varying_length[inner_uid])
1420 inner_length = insn_lengths[inner_uid];
1421 else
1422 inner_length = insn_current_length (inner_insn);
1424 if (inner_length != insn_lengths[inner_uid])
1426 if (!increasing || inner_length > insn_lengths[inner_uid])
1428 insn_lengths[inner_uid] = inner_length;
1429 something_changed = 1;
1431 else
1432 inner_length = insn_lengths[inner_uid];
1434 insn_current_address += inner_length;
1435 new_length += inner_length;
1438 else
1440 new_length = insn_current_length (insn);
1441 insn_current_address += new_length;
1444 #ifdef ADJUST_INSN_LENGTH
1445 /* If needed, do any adjustment. */
1446 tmp_length = new_length;
1447 ADJUST_INSN_LENGTH (insn, new_length);
1448 insn_current_address += (new_length - tmp_length);
1449 #endif
1451 if (new_length != insn_lengths[uid]
1452 && (!increasing || new_length > insn_lengths[uid]))
1454 insn_lengths[uid] = new_length;
1455 something_changed = 1;
1457 else
1458 insn_current_address += insn_lengths[uid] - new_length;
1460 /* For a non-optimizing compile, do only a single pass. */
1461 if (!increasing)
1462 break;
1465 free (varying_length);
1468 /* Given the body of an INSN known to be generated by an ASM statement, return
1469 the number of machine instructions likely to be generated for this insn.
1470 This is used to compute its length. */
1472 static int
1473 asm_insn_count (rtx body)
1475 const char *templ;
1477 if (GET_CODE (body) == ASM_INPUT)
1478 templ = XSTR (body, 0);
1479 else
1480 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1482 return asm_str_count (templ);
1485 /* Return the number of machine instructions likely to be generated for the
1486 inline-asm template. */
1488 asm_str_count (const char *templ)
1490 int count = 1;
1492 if (!*templ)
1493 return 0;
1495 for (; *templ; templ++)
1496 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1497 || *templ == '\n')
1498 count++;
1500 return count;
1503 /* ??? This is probably the wrong place for these. */
1504 /* Structure recording the mapping from source file and directory
1505 names at compile time to those to be embedded in debug
1506 information. */
1507 struct debug_prefix_map
1509 const char *old_prefix;
1510 const char *new_prefix;
1511 size_t old_len;
1512 size_t new_len;
1513 struct debug_prefix_map *next;
1516 /* Linked list of such structures. */
1517 static debug_prefix_map *debug_prefix_maps;
1520 /* Record a debug file prefix mapping. ARG is the argument to
1521 -fdebug-prefix-map and must be of the form OLD=NEW. */
1523 void
1524 add_debug_prefix_map (const char *arg)
1526 debug_prefix_map *map;
1527 const char *p;
1529 p = strchr (arg, '=');
1530 if (!p)
1532 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1533 return;
1535 map = XNEW (debug_prefix_map);
1536 map->old_prefix = xstrndup (arg, p - arg);
1537 map->old_len = p - arg;
1538 p++;
1539 map->new_prefix = xstrdup (p);
1540 map->new_len = strlen (p);
1541 map->next = debug_prefix_maps;
1542 debug_prefix_maps = map;
1545 /* Perform user-specified mapping of debug filename prefixes. Return
1546 the new name corresponding to FILENAME. */
1548 const char *
1549 remap_debug_filename (const char *filename)
1551 debug_prefix_map *map;
1552 char *s;
1553 const char *name;
1554 size_t name_len;
1556 for (map = debug_prefix_maps; map; map = map->next)
1557 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1558 break;
1559 if (!map)
1560 return filename;
1561 name = filename + map->old_len;
1562 name_len = strlen (name) + 1;
1563 s = (char *) alloca (name_len + map->new_len);
1564 memcpy (s, map->new_prefix, map->new_len);
1565 memcpy (s + map->new_len, name, name_len);
1566 return ggc_strdup (s);
1569 /* Return true if DWARF2 debug info can be emitted for DECL. */
1571 static bool
1572 dwarf2_debug_info_emitted_p (tree decl)
1574 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1575 return false;
1577 if (DECL_IGNORED_P (decl))
1578 return false;
1580 return true;
1583 /* Return scope resulting from combination of S1 and S2. */
1584 static tree
1585 choose_inner_scope (tree s1, tree s2)
1587 if (!s1)
1588 return s2;
1589 if (!s2)
1590 return s1;
1591 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1592 return s1;
1593 return s2;
1596 /* Emit lexical block notes needed to change scope from S1 to S2. */
1598 static void
1599 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1601 rtx_insn *insn = orig_insn;
1602 tree com = NULL_TREE;
1603 tree ts1 = s1, ts2 = s2;
1604 tree s;
1606 while (ts1 != ts2)
1608 gcc_assert (ts1 && ts2);
1609 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1610 ts1 = BLOCK_SUPERCONTEXT (ts1);
1611 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1612 ts2 = BLOCK_SUPERCONTEXT (ts2);
1613 else
1615 ts1 = BLOCK_SUPERCONTEXT (ts1);
1616 ts2 = BLOCK_SUPERCONTEXT (ts2);
1619 com = ts1;
1621 /* Close scopes. */
1622 s = s1;
1623 while (s != com)
1625 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1626 NOTE_BLOCK (note) = s;
1627 s = BLOCK_SUPERCONTEXT (s);
1630 /* Open scopes. */
1631 s = s2;
1632 while (s != com)
1634 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1635 NOTE_BLOCK (insn) = s;
1636 s = BLOCK_SUPERCONTEXT (s);
1640 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1641 on the scope tree and the newly reordered instructions. */
1643 static void
1644 reemit_insn_block_notes (void)
1646 tree cur_block = DECL_INITIAL (cfun->decl);
1647 rtx_insn *insn;
1648 rtx_note *note;
1650 insn = get_insns ();
1651 for (; insn; insn = NEXT_INSN (insn))
1653 tree this_block;
1655 /* Prevent lexical blocks from straddling section boundaries. */
1656 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1658 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1659 s = BLOCK_SUPERCONTEXT (s))
1661 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1662 NOTE_BLOCK (note) = s;
1663 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1664 NOTE_BLOCK (note) = s;
1668 if (!active_insn_p (insn))
1669 continue;
1671 /* Avoid putting scope notes between jump table and its label. */
1672 if (JUMP_TABLE_DATA_P (insn))
1673 continue;
1675 this_block = insn_scope (insn);
1676 /* For sequences compute scope resulting from merging all scopes
1677 of instructions nested inside. */
1678 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1680 int i;
1682 this_block = NULL;
1683 for (i = 0; i < body->len (); i++)
1684 this_block = choose_inner_scope (this_block,
1685 insn_scope (body->insn (i)));
1687 if (! this_block)
1689 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1690 continue;
1691 else
1692 this_block = DECL_INITIAL (cfun->decl);
1695 if (this_block != cur_block)
1697 change_scope (insn, cur_block, this_block);
1698 cur_block = this_block;
1702 /* change_scope emits before the insn, not after. */
1703 note = emit_note (NOTE_INSN_DELETED);
1704 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1705 delete_insn (note);
1707 reorder_blocks ();
1710 static const char *some_local_dynamic_name;
1712 /* Locate some local-dynamic symbol still in use by this function
1713 so that we can print its name in local-dynamic base patterns.
1714 Return null if there are no local-dynamic references. */
1716 const char *
1717 get_some_local_dynamic_name ()
1719 subrtx_iterator::array_type array;
1720 rtx_insn *insn;
1722 if (some_local_dynamic_name)
1723 return some_local_dynamic_name;
1725 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1726 if (NONDEBUG_INSN_P (insn))
1727 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1729 const_rtx x = *iter;
1730 if (GET_CODE (x) == SYMBOL_REF)
1732 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1733 return some_local_dynamic_name = XSTR (x, 0);
1734 if (CONSTANT_POOL_ADDRESS_P (x))
1735 iter.substitute (get_pool_constant (x));
1739 return 0;
1742 /* Output assembler code for the start of a function,
1743 and initialize some of the variables in this file
1744 for the new function. The label for the function and associated
1745 assembler pseudo-ops have already been output in `assemble_start_function'.
1747 FIRST is the first insn of the rtl for the function being compiled.
1748 FILE is the file to write assembler code to.
1749 OPTIMIZE_P is nonzero if we should eliminate redundant
1750 test and compare insns. */
1752 void
1753 final_start_function (rtx_insn *first, FILE *file,
1754 int optimize_p ATTRIBUTE_UNUSED)
1756 block_depth = 0;
1758 this_is_asm_operands = 0;
1760 need_profile_function = false;
1762 last_filename = LOCATION_FILE (prologue_location);
1763 last_linenum = LOCATION_LINE (prologue_location);
1764 last_discriminator = discriminator = 0;
1766 high_block_linenum = high_function_linenum = last_linenum;
1768 if (flag_sanitize & SANITIZE_ADDRESS)
1769 asan_function_start ();
1771 if (!DECL_IGNORED_P (current_function_decl))
1772 debug_hooks->begin_prologue (last_linenum, last_filename);
1774 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1775 dwarf2out_begin_prologue (0, NULL);
1777 #ifdef LEAF_REG_REMAP
1778 if (crtl->uses_only_leaf_regs)
1779 leaf_renumber_regs (first);
1780 #endif
1782 /* The Sun386i and perhaps other machines don't work right
1783 if the profiling code comes after the prologue. */
1784 if (targetm.profile_before_prologue () && crtl->profile)
1786 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1787 && targetm.have_prologue ())
1789 rtx_insn *insn;
1790 for (insn = first; insn; insn = NEXT_INSN (insn))
1791 if (!NOTE_P (insn))
1793 insn = NULL;
1794 break;
1796 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1797 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1798 break;
1799 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1800 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1801 continue;
1802 else
1804 insn = NULL;
1805 break;
1808 if (insn)
1809 need_profile_function = true;
1810 else
1811 profile_function (file);
1813 else
1814 profile_function (file);
1817 /* If debugging, assign block numbers to all of the blocks in this
1818 function. */
1819 if (write_symbols)
1821 reemit_insn_block_notes ();
1822 number_blocks (current_function_decl);
1823 /* We never actually put out begin/end notes for the top-level
1824 block in the function. But, conceptually, that block is
1825 always needed. */
1826 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1829 if (warn_frame_larger_than
1830 && get_frame_size () > frame_larger_than_size)
1832 /* Issue a warning */
1833 warning (OPT_Wframe_larger_than_,
1834 "the frame size of %wd bytes is larger than %wd bytes",
1835 get_frame_size (), frame_larger_than_size);
1838 /* First output the function prologue: code to set up the stack frame. */
1839 targetm.asm_out.function_prologue (file, get_frame_size ());
1841 /* If the machine represents the prologue as RTL, the profiling code must
1842 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1843 if (! targetm.have_prologue ())
1844 profile_after_prologue (file);
1847 static void
1848 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1850 if (!targetm.profile_before_prologue () && crtl->profile)
1851 profile_function (file);
1854 static void
1855 profile_function (FILE *file ATTRIBUTE_UNUSED)
1857 #ifndef NO_PROFILE_COUNTERS
1858 # define NO_PROFILE_COUNTERS 0
1859 #endif
1860 #ifdef ASM_OUTPUT_REG_PUSH
1861 rtx sval = NULL, chain = NULL;
1863 if (cfun->returns_struct)
1864 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1865 true);
1866 if (cfun->static_chain_decl)
1867 chain = targetm.calls.static_chain (current_function_decl, true);
1868 #endif /* ASM_OUTPUT_REG_PUSH */
1870 if (! NO_PROFILE_COUNTERS)
1872 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1873 switch_to_section (data_section);
1874 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1875 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1876 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1879 switch_to_section (current_function_section ());
1881 #ifdef ASM_OUTPUT_REG_PUSH
1882 if (sval && REG_P (sval))
1883 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1884 if (chain && REG_P (chain))
1885 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1886 #endif
1888 FUNCTION_PROFILER (file, current_function_funcdef_no);
1890 #ifdef ASM_OUTPUT_REG_PUSH
1891 if (chain && REG_P (chain))
1892 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1893 if (sval && REG_P (sval))
1894 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1895 #endif
1898 /* Output assembler code for the end of a function.
1899 For clarity, args are same as those of `final_start_function'
1900 even though not all of them are needed. */
1902 void
1903 final_end_function (void)
1905 app_disable ();
1907 if (!DECL_IGNORED_P (current_function_decl))
1908 debug_hooks->end_function (high_function_linenum);
1910 /* Finally, output the function epilogue:
1911 code to restore the stack frame and return to the caller. */
1912 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1914 /* And debug output. */
1915 if (!DECL_IGNORED_P (current_function_decl))
1916 debug_hooks->end_epilogue (last_linenum, last_filename);
1918 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1919 && dwarf2out_do_frame ())
1920 dwarf2out_end_epilogue (last_linenum, last_filename);
1922 some_local_dynamic_name = 0;
1926 /* Dumper helper for basic block information. FILE is the assembly
1927 output file, and INSN is the instruction being emitted. */
1929 static void
1930 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1931 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1933 basic_block bb;
1935 if (!flag_debug_asm)
1936 return;
1938 if (INSN_UID (insn) < bb_map_size
1939 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1941 edge e;
1942 edge_iterator ei;
1944 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1945 if (bb->frequency)
1946 fprintf (file, " freq:%d", bb->frequency);
1947 if (bb->count)
1948 fprintf (file, " count:%" PRId64,
1949 bb->count);
1950 fprintf (file, " seq:%d", (*bb_seqn)++);
1951 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1952 FOR_EACH_EDGE (e, ei, bb->preds)
1954 dump_edge_info (file, e, TDF_DETAILS, 0);
1956 fprintf (file, "\n");
1958 if (INSN_UID (insn) < bb_map_size
1959 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1961 edge e;
1962 edge_iterator ei;
1964 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1965 FOR_EACH_EDGE (e, ei, bb->succs)
1967 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1969 fprintf (file, "\n");
1973 /* Output assembler code for some insns: all or part of a function.
1974 For description of args, see `final_start_function', above. */
1976 void
1977 final (rtx_insn *first, FILE *file, int optimize_p)
1979 rtx_insn *insn, *next;
1980 int seen = 0;
1982 /* Used for -dA dump. */
1983 basic_block *start_to_bb = NULL;
1984 basic_block *end_to_bb = NULL;
1985 int bb_map_size = 0;
1986 int bb_seqn = 0;
1988 last_ignored_compare = 0;
1990 if (HAVE_cc0)
1991 for (insn = first; insn; insn = NEXT_INSN (insn))
1993 /* If CC tracking across branches is enabled, record the insn which
1994 jumps to each branch only reached from one place. */
1995 if (optimize_p && JUMP_P (insn))
1997 rtx lab = JUMP_LABEL (insn);
1998 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
2000 LABEL_REFS (lab) = insn;
2005 init_recog ();
2007 CC_STATUS_INIT;
2009 if (flag_debug_asm)
2011 basic_block bb;
2013 bb_map_size = get_max_uid () + 1;
2014 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
2015 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
2017 /* There is no cfg for a thunk. */
2018 if (!cfun->is_thunk)
2019 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2021 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2022 end_to_bb[INSN_UID (BB_END (bb))] = bb;
2026 /* Output the insns. */
2027 for (insn = first; insn;)
2029 if (HAVE_ATTR_length)
2031 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2033 /* This can be triggered by bugs elsewhere in the compiler if
2034 new insns are created after init_insn_lengths is called. */
2035 gcc_assert (NOTE_P (insn));
2036 insn_current_address = -1;
2038 else
2039 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2042 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2043 bb_map_size, &bb_seqn);
2044 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2047 if (flag_debug_asm)
2049 free (start_to_bb);
2050 free (end_to_bb);
2053 /* Remove CFI notes, to avoid compare-debug failures. */
2054 for (insn = first; insn; insn = next)
2056 next = NEXT_INSN (insn);
2057 if (NOTE_P (insn)
2058 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2059 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2060 delete_insn (insn);
2064 const char *
2065 get_insn_template (int code, rtx insn)
2067 switch (insn_data[code].output_format)
2069 case INSN_OUTPUT_FORMAT_SINGLE:
2070 return insn_data[code].output.single;
2071 case INSN_OUTPUT_FORMAT_MULTI:
2072 return insn_data[code].output.multi[which_alternative];
2073 case INSN_OUTPUT_FORMAT_FUNCTION:
2074 gcc_assert (insn);
2075 return (*insn_data[code].output.function) (recog_data.operand,
2076 as_a <rtx_insn *> (insn));
2078 default:
2079 gcc_unreachable ();
2083 /* Emit the appropriate declaration for an alternate-entry-point
2084 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2085 LABEL_KIND != LABEL_NORMAL.
2087 The case fall-through in this function is intentional. */
2088 static void
2089 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2091 const char *name = LABEL_NAME (insn);
2093 switch (LABEL_KIND (insn))
2095 case LABEL_WEAK_ENTRY:
2096 #ifdef ASM_WEAKEN_LABEL
2097 ASM_WEAKEN_LABEL (file, name);
2098 #endif
2099 case LABEL_GLOBAL_ENTRY:
2100 targetm.asm_out.globalize_label (file, name);
2101 case LABEL_STATIC_ENTRY:
2102 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2103 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2104 #endif
2105 ASM_OUTPUT_LABEL (file, name);
2106 break;
2108 case LABEL_NORMAL:
2109 default:
2110 gcc_unreachable ();
2114 /* Given a CALL_INSN, find and return the nested CALL. */
2115 static rtx
2116 call_from_call_insn (rtx_call_insn *insn)
2118 rtx x;
2119 gcc_assert (CALL_P (insn));
2120 x = PATTERN (insn);
2122 while (GET_CODE (x) != CALL)
2124 switch (GET_CODE (x))
2126 default:
2127 gcc_unreachable ();
2128 case COND_EXEC:
2129 x = COND_EXEC_CODE (x);
2130 break;
2131 case PARALLEL:
2132 x = XVECEXP (x, 0, 0);
2133 break;
2134 case SET:
2135 x = XEXP (x, 1);
2136 break;
2139 return x;
2142 /* The final scan for one insn, INSN.
2143 Args are same as in `final', except that INSN
2144 is the insn being scanned.
2145 Value returned is the next insn to be scanned.
2147 NOPEEPHOLES is the flag to disallow peephole processing (currently
2148 used for within delayed branch sequence output).
2150 SEEN is used to track the end of the prologue, for emitting
2151 debug information. We force the emission of a line note after
2152 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2154 rtx_insn *
2155 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2156 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2158 #if HAVE_cc0
2159 rtx set;
2160 #endif
2161 rtx_insn *next;
2163 insn_counter++;
2165 /* Ignore deleted insns. These can occur when we split insns (due to a
2166 template of "#") while not optimizing. */
2167 if (insn->deleted ())
2168 return NEXT_INSN (insn);
2170 switch (GET_CODE (insn))
2172 case NOTE:
2173 switch (NOTE_KIND (insn))
2175 case NOTE_INSN_DELETED:
2176 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2177 break;
2179 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2180 in_cold_section_p = !in_cold_section_p;
2182 if (dwarf2out_do_frame ())
2183 dwarf2out_switch_text_section ();
2184 else if (!DECL_IGNORED_P (current_function_decl))
2185 debug_hooks->switch_text_section ();
2187 switch_to_section (current_function_section ());
2188 targetm.asm_out.function_switched_text_sections (asm_out_file,
2189 current_function_decl,
2190 in_cold_section_p);
2191 /* Emit a label for the split cold section. Form label name by
2192 suffixing "cold" to the original function's name. */
2193 if (in_cold_section_p)
2195 cold_function_name
2196 = clone_function_name (current_function_decl, "cold");
2197 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2198 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2199 IDENTIFIER_POINTER
2200 (cold_function_name),
2201 current_function_decl);
2202 #else
2203 ASM_OUTPUT_LABEL (asm_out_file,
2204 IDENTIFIER_POINTER (cold_function_name));
2205 #endif
2207 break;
2209 case NOTE_INSN_BASIC_BLOCK:
2210 if (need_profile_function)
2212 profile_function (asm_out_file);
2213 need_profile_function = false;
2216 if (targetm.asm_out.unwind_emit)
2217 targetm.asm_out.unwind_emit (asm_out_file, insn);
2219 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2221 break;
2223 case NOTE_INSN_EH_REGION_BEG:
2224 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2225 NOTE_EH_HANDLER (insn));
2226 break;
2228 case NOTE_INSN_EH_REGION_END:
2229 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2230 NOTE_EH_HANDLER (insn));
2231 break;
2233 case NOTE_INSN_PROLOGUE_END:
2234 targetm.asm_out.function_end_prologue (file);
2235 profile_after_prologue (file);
2237 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2239 *seen |= SEEN_EMITTED;
2240 force_source_line = true;
2242 else
2243 *seen |= SEEN_NOTE;
2245 break;
2247 case NOTE_INSN_EPILOGUE_BEG:
2248 if (!DECL_IGNORED_P (current_function_decl))
2249 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2250 targetm.asm_out.function_begin_epilogue (file);
2251 break;
2253 case NOTE_INSN_CFI:
2254 dwarf2out_emit_cfi (NOTE_CFI (insn));
2255 break;
2257 case NOTE_INSN_CFI_LABEL:
2258 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2259 NOTE_LABEL_NUMBER (insn));
2260 break;
2262 case NOTE_INSN_FUNCTION_BEG:
2263 if (need_profile_function)
2265 profile_function (asm_out_file);
2266 need_profile_function = false;
2269 app_disable ();
2270 if (!DECL_IGNORED_P (current_function_decl))
2271 debug_hooks->end_prologue (last_linenum, last_filename);
2273 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2275 *seen |= SEEN_EMITTED;
2276 force_source_line = true;
2278 else
2279 *seen |= SEEN_NOTE;
2281 break;
2283 case NOTE_INSN_BLOCK_BEG:
2284 if (debug_info_level == DINFO_LEVEL_NORMAL
2285 || debug_info_level == DINFO_LEVEL_VERBOSE
2286 || write_symbols == DWARF2_DEBUG
2287 || write_symbols == VMS_AND_DWARF2_DEBUG
2288 || write_symbols == VMS_DEBUG)
2290 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2292 app_disable ();
2293 ++block_depth;
2294 high_block_linenum = last_linenum;
2296 /* Output debugging info about the symbol-block beginning. */
2297 if (!DECL_IGNORED_P (current_function_decl))
2298 debug_hooks->begin_block (last_linenum, n);
2300 /* Mark this block as output. */
2301 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2303 if (write_symbols == DBX_DEBUG
2304 || write_symbols == SDB_DEBUG)
2306 location_t *locus_ptr
2307 = block_nonartificial_location (NOTE_BLOCK (insn));
2309 if (locus_ptr != NULL)
2311 override_filename = LOCATION_FILE (*locus_ptr);
2312 override_linenum = LOCATION_LINE (*locus_ptr);
2315 break;
2317 case NOTE_INSN_BLOCK_END:
2318 if (debug_info_level == DINFO_LEVEL_NORMAL
2319 || debug_info_level == DINFO_LEVEL_VERBOSE
2320 || write_symbols == DWARF2_DEBUG
2321 || write_symbols == VMS_AND_DWARF2_DEBUG
2322 || write_symbols == VMS_DEBUG)
2324 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2326 app_disable ();
2328 /* End of a symbol-block. */
2329 --block_depth;
2330 gcc_assert (block_depth >= 0);
2332 if (!DECL_IGNORED_P (current_function_decl))
2333 debug_hooks->end_block (high_block_linenum, n);
2335 if (write_symbols == DBX_DEBUG
2336 || write_symbols == SDB_DEBUG)
2338 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2339 location_t *locus_ptr
2340 = block_nonartificial_location (outer_block);
2342 if (locus_ptr != NULL)
2344 override_filename = LOCATION_FILE (*locus_ptr);
2345 override_linenum = LOCATION_LINE (*locus_ptr);
2347 else
2349 override_filename = NULL;
2350 override_linenum = 0;
2353 break;
2355 case NOTE_INSN_DELETED_LABEL:
2356 /* Emit the label. We may have deleted the CODE_LABEL because
2357 the label could be proved to be unreachable, though still
2358 referenced (in the form of having its address taken. */
2359 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2360 break;
2362 case NOTE_INSN_DELETED_DEBUG_LABEL:
2363 /* Similarly, but need to use different namespace for it. */
2364 if (CODE_LABEL_NUMBER (insn) != -1)
2365 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2366 break;
2368 case NOTE_INSN_VAR_LOCATION:
2369 case NOTE_INSN_CALL_ARG_LOCATION:
2370 if (!DECL_IGNORED_P (current_function_decl))
2371 debug_hooks->var_location (insn);
2372 break;
2374 default:
2375 gcc_unreachable ();
2376 break;
2378 break;
2380 case BARRIER:
2381 break;
2383 case CODE_LABEL:
2384 /* The target port might emit labels in the output function for
2385 some insn, e.g. sh.c output_branchy_insn. */
2386 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2388 int align = LABEL_TO_ALIGNMENT (insn);
2389 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2390 int max_skip = LABEL_TO_MAX_SKIP (insn);
2391 #endif
2393 if (align && NEXT_INSN (insn))
2395 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2396 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2397 #else
2398 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2399 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2400 #else
2401 ASM_OUTPUT_ALIGN (file, align);
2402 #endif
2403 #endif
2406 CC_STATUS_INIT;
2408 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2409 debug_hooks->label (as_a <rtx_code_label *> (insn));
2411 app_disable ();
2413 next = next_nonnote_insn (insn);
2414 /* If this label is followed by a jump-table, make sure we put
2415 the label in the read-only section. Also possibly write the
2416 label and jump table together. */
2417 if (next != 0 && JUMP_TABLE_DATA_P (next))
2419 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2420 /* In this case, the case vector is being moved by the
2421 target, so don't output the label at all. Leave that
2422 to the back end macros. */
2423 #else
2424 if (! JUMP_TABLES_IN_TEXT_SECTION)
2426 int log_align;
2428 switch_to_section (targetm.asm_out.function_rodata_section
2429 (current_function_decl));
2431 #ifdef ADDR_VEC_ALIGN
2432 log_align = ADDR_VEC_ALIGN (next);
2433 #else
2434 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2435 #endif
2436 ASM_OUTPUT_ALIGN (file, log_align);
2438 else
2439 switch_to_section (current_function_section ());
2441 #ifdef ASM_OUTPUT_CASE_LABEL
2442 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2443 next);
2444 #else
2445 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2446 #endif
2447 #endif
2448 break;
2450 if (LABEL_ALT_ENTRY_P (insn))
2451 output_alternate_entry_point (file, insn);
2452 else
2453 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2454 break;
2456 default:
2458 rtx body = PATTERN (insn);
2459 int insn_code_number;
2460 const char *templ;
2461 bool is_stmt;
2463 /* Reset this early so it is correct for ASM statements. */
2464 current_insn_predicate = NULL_RTX;
2466 /* An INSN, JUMP_INSN or CALL_INSN.
2467 First check for special kinds that recog doesn't recognize. */
2469 if (GET_CODE (body) == USE /* These are just declarations. */
2470 || GET_CODE (body) == CLOBBER)
2471 break;
2473 #if HAVE_cc0
2475 /* If there is a REG_CC_SETTER note on this insn, it means that
2476 the setting of the condition code was done in the delay slot
2477 of the insn that branched here. So recover the cc status
2478 from the insn that set it. */
2480 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2481 if (note)
2483 rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2484 NOTICE_UPDATE_CC (PATTERN (other), other);
2485 cc_prev_status = cc_status;
2488 #endif
2490 /* Detect insns that are really jump-tables
2491 and output them as such. */
2493 if (JUMP_TABLE_DATA_P (insn))
2495 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2496 int vlen, idx;
2497 #endif
2499 if (! JUMP_TABLES_IN_TEXT_SECTION)
2500 switch_to_section (targetm.asm_out.function_rodata_section
2501 (current_function_decl));
2502 else
2503 switch_to_section (current_function_section ());
2505 app_disable ();
2507 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2508 if (GET_CODE (body) == ADDR_VEC)
2510 #ifdef ASM_OUTPUT_ADDR_VEC
2511 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2512 #else
2513 gcc_unreachable ();
2514 #endif
2516 else
2518 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2519 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2520 #else
2521 gcc_unreachable ();
2522 #endif
2524 #else
2525 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2526 for (idx = 0; idx < vlen; idx++)
2528 if (GET_CODE (body) == ADDR_VEC)
2530 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2531 ASM_OUTPUT_ADDR_VEC_ELT
2532 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2533 #else
2534 gcc_unreachable ();
2535 #endif
2537 else
2539 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2540 ASM_OUTPUT_ADDR_DIFF_ELT
2541 (file,
2542 body,
2543 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2544 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2545 #else
2546 gcc_unreachable ();
2547 #endif
2550 #ifdef ASM_OUTPUT_CASE_END
2551 ASM_OUTPUT_CASE_END (file,
2552 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2553 insn);
2554 #endif
2555 #endif
2557 switch_to_section (current_function_section ());
2559 break;
2561 /* Output this line note if it is the first or the last line
2562 note in a row. */
2563 if (!DECL_IGNORED_P (current_function_decl)
2564 && notice_source_line (insn, &is_stmt))
2565 (*debug_hooks->source_line) (last_linenum, last_filename,
2566 last_discriminator, is_stmt);
2568 if (GET_CODE (body) == ASM_INPUT)
2570 const char *string = XSTR (body, 0);
2572 /* There's no telling what that did to the condition codes. */
2573 CC_STATUS_INIT;
2575 if (string[0])
2577 expanded_location loc;
2579 app_enable ();
2580 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2581 if (*loc.file && loc.line)
2582 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2583 ASM_COMMENT_START, loc.line, loc.file);
2584 fprintf (asm_out_file, "\t%s\n", string);
2585 #if HAVE_AS_LINE_ZERO
2586 if (*loc.file && loc.line)
2587 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2588 #endif
2590 break;
2593 /* Detect `asm' construct with operands. */
2594 if (asm_noperands (body) >= 0)
2596 unsigned int noperands = asm_noperands (body);
2597 rtx *ops = XALLOCAVEC (rtx, noperands);
2598 const char *string;
2599 location_t loc;
2600 expanded_location expanded;
2602 /* There's no telling what that did to the condition codes. */
2603 CC_STATUS_INIT;
2605 /* Get out the operand values. */
2606 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2607 /* Inhibit dying on what would otherwise be compiler bugs. */
2608 insn_noperands = noperands;
2609 this_is_asm_operands = insn;
2610 expanded = expand_location (loc);
2612 #ifdef FINAL_PRESCAN_INSN
2613 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2614 #endif
2616 /* Output the insn using them. */
2617 if (string[0])
2619 app_enable ();
2620 if (expanded.file && expanded.line)
2621 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2622 ASM_COMMENT_START, expanded.line, expanded.file);
2623 output_asm_insn (string, ops);
2624 #if HAVE_AS_LINE_ZERO
2625 if (expanded.file && expanded.line)
2626 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2627 #endif
2630 if (targetm.asm_out.final_postscan_insn)
2631 targetm.asm_out.final_postscan_insn (file, insn, ops,
2632 insn_noperands);
2634 this_is_asm_operands = 0;
2635 break;
2638 app_disable ();
2640 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2642 /* A delayed-branch sequence */
2643 int i;
2645 final_sequence = seq;
2647 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2648 force the restoration of a comparison that was previously
2649 thought unnecessary. If that happens, cancel this sequence
2650 and cause that insn to be restored. */
2652 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2653 if (next != seq->insn (1))
2655 final_sequence = 0;
2656 return next;
2659 for (i = 1; i < seq->len (); i++)
2661 rtx_insn *insn = seq->insn (i);
2662 rtx_insn *next = NEXT_INSN (insn);
2663 /* We loop in case any instruction in a delay slot gets
2664 split. */
2666 insn = final_scan_insn (insn, file, 0, 1, seen);
2667 while (insn != next);
2669 #ifdef DBR_OUTPUT_SEQEND
2670 DBR_OUTPUT_SEQEND (file);
2671 #endif
2672 final_sequence = 0;
2674 /* If the insn requiring the delay slot was a CALL_INSN, the
2675 insns in the delay slot are actually executed before the
2676 called function. Hence we don't preserve any CC-setting
2677 actions in these insns and the CC must be marked as being
2678 clobbered by the function. */
2679 if (CALL_P (seq->insn (0)))
2681 CC_STATUS_INIT;
2683 break;
2686 /* We have a real machine instruction as rtl. */
2688 body = PATTERN (insn);
2690 #if HAVE_cc0
2691 set = single_set (insn);
2693 /* Check for redundant test and compare instructions
2694 (when the condition codes are already set up as desired).
2695 This is done only when optimizing; if not optimizing,
2696 it should be possible for the user to alter a variable
2697 with the debugger in between statements
2698 and the next statement should reexamine the variable
2699 to compute the condition codes. */
2701 if (optimize_p)
2703 if (set
2704 && GET_CODE (SET_DEST (set)) == CC0
2705 && insn != last_ignored_compare)
2707 rtx src1, src2;
2708 if (GET_CODE (SET_SRC (set)) == SUBREG)
2709 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2711 src1 = SET_SRC (set);
2712 src2 = NULL_RTX;
2713 if (GET_CODE (SET_SRC (set)) == COMPARE)
2715 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2716 XEXP (SET_SRC (set), 0)
2717 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2718 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2719 XEXP (SET_SRC (set), 1)
2720 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2721 if (XEXP (SET_SRC (set), 1)
2722 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2723 src2 = XEXP (SET_SRC (set), 0);
2725 if ((cc_status.value1 != 0
2726 && rtx_equal_p (src1, cc_status.value1))
2727 || (cc_status.value2 != 0
2728 && rtx_equal_p (src1, cc_status.value2))
2729 || (src2 != 0 && cc_status.value1 != 0
2730 && rtx_equal_p (src2, cc_status.value1))
2731 || (src2 != 0 && cc_status.value2 != 0
2732 && rtx_equal_p (src2, cc_status.value2)))
2734 /* Don't delete insn if it has an addressing side-effect. */
2735 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2736 /* or if anything in it is volatile. */
2737 && ! volatile_refs_p (PATTERN (insn)))
2739 /* We don't really delete the insn; just ignore it. */
2740 last_ignored_compare = insn;
2741 break;
2747 /* If this is a conditional branch, maybe modify it
2748 if the cc's are in a nonstandard state
2749 so that it accomplishes the same thing that it would
2750 do straightforwardly if the cc's were set up normally. */
2752 if (cc_status.flags != 0
2753 && JUMP_P (insn)
2754 && GET_CODE (body) == SET
2755 && SET_DEST (body) == pc_rtx
2756 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2757 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2758 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2760 /* This function may alter the contents of its argument
2761 and clear some of the cc_status.flags bits.
2762 It may also return 1 meaning condition now always true
2763 or -1 meaning condition now always false
2764 or 2 meaning condition nontrivial but altered. */
2765 int result = alter_cond (XEXP (SET_SRC (body), 0));
2766 /* If condition now has fixed value, replace the IF_THEN_ELSE
2767 with its then-operand or its else-operand. */
2768 if (result == 1)
2769 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2770 if (result == -1)
2771 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2773 /* The jump is now either unconditional or a no-op.
2774 If it has become a no-op, don't try to output it.
2775 (It would not be recognized.) */
2776 if (SET_SRC (body) == pc_rtx)
2778 delete_insn (insn);
2779 break;
2781 else if (ANY_RETURN_P (SET_SRC (body)))
2782 /* Replace (set (pc) (return)) with (return). */
2783 PATTERN (insn) = body = SET_SRC (body);
2785 /* Rerecognize the instruction if it has changed. */
2786 if (result != 0)
2787 INSN_CODE (insn) = -1;
2790 /* If this is a conditional trap, maybe modify it if the cc's
2791 are in a nonstandard state so that it accomplishes the same
2792 thing that it would do straightforwardly if the cc's were
2793 set up normally. */
2794 if (cc_status.flags != 0
2795 && NONJUMP_INSN_P (insn)
2796 && GET_CODE (body) == TRAP_IF
2797 && COMPARISON_P (TRAP_CONDITION (body))
2798 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2800 /* This function may alter the contents of its argument
2801 and clear some of the cc_status.flags bits.
2802 It may also return 1 meaning condition now always true
2803 or -1 meaning condition now always false
2804 or 2 meaning condition nontrivial but altered. */
2805 int result = alter_cond (TRAP_CONDITION (body));
2807 /* If TRAP_CONDITION has become always false, delete the
2808 instruction. */
2809 if (result == -1)
2811 delete_insn (insn);
2812 break;
2815 /* If TRAP_CONDITION has become always true, replace
2816 TRAP_CONDITION with const_true_rtx. */
2817 if (result == 1)
2818 TRAP_CONDITION (body) = const_true_rtx;
2820 /* Rerecognize the instruction if it has changed. */
2821 if (result != 0)
2822 INSN_CODE (insn) = -1;
2825 /* Make same adjustments to instructions that examine the
2826 condition codes without jumping and instructions that
2827 handle conditional moves (if this machine has either one). */
2829 if (cc_status.flags != 0
2830 && set != 0)
2832 rtx cond_rtx, then_rtx, else_rtx;
2834 if (!JUMP_P (insn)
2835 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2837 cond_rtx = XEXP (SET_SRC (set), 0);
2838 then_rtx = XEXP (SET_SRC (set), 1);
2839 else_rtx = XEXP (SET_SRC (set), 2);
2841 else
2843 cond_rtx = SET_SRC (set);
2844 then_rtx = const_true_rtx;
2845 else_rtx = const0_rtx;
2848 if (COMPARISON_P (cond_rtx)
2849 && XEXP (cond_rtx, 0) == cc0_rtx)
2851 int result;
2852 result = alter_cond (cond_rtx);
2853 if (result == 1)
2854 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2855 else if (result == -1)
2856 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2857 else if (result == 2)
2858 INSN_CODE (insn) = -1;
2859 if (SET_DEST (set) == SET_SRC (set))
2860 delete_insn (insn);
2864 #endif
2866 /* Do machine-specific peephole optimizations if desired. */
2868 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2870 rtx_insn *next = peephole (insn);
2871 /* When peepholing, if there were notes within the peephole,
2872 emit them before the peephole. */
2873 if (next != 0 && next != NEXT_INSN (insn))
2875 rtx_insn *note, *prev = PREV_INSN (insn);
2877 for (note = NEXT_INSN (insn); note != next;
2878 note = NEXT_INSN (note))
2879 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2881 /* Put the notes in the proper position for a later
2882 rescan. For example, the SH target can do this
2883 when generating a far jump in a delayed branch
2884 sequence. */
2885 note = NEXT_INSN (insn);
2886 SET_PREV_INSN (note) = prev;
2887 SET_NEXT_INSN (prev) = note;
2888 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2889 SET_PREV_INSN (insn) = PREV_INSN (next);
2890 SET_NEXT_INSN (insn) = next;
2891 SET_PREV_INSN (next) = insn;
2894 /* PEEPHOLE might have changed this. */
2895 body = PATTERN (insn);
2898 /* Try to recognize the instruction.
2899 If successful, verify that the operands satisfy the
2900 constraints for the instruction. Crash if they don't,
2901 since `reload' should have changed them so that they do. */
2903 insn_code_number = recog_memoized (insn);
2904 cleanup_subreg_operands (insn);
2906 /* Dump the insn in the assembly for debugging (-dAP).
2907 If the final dump is requested as slim RTL, dump slim
2908 RTL to the assembly file also. */
2909 if (flag_dump_rtl_in_asm)
2911 print_rtx_head = ASM_COMMENT_START;
2912 if (! (dump_flags & TDF_SLIM))
2913 print_rtl_single (asm_out_file, insn);
2914 else
2915 dump_insn_slim (asm_out_file, insn);
2916 print_rtx_head = "";
2919 if (! constrain_operands_cached (insn, 1))
2920 fatal_insn_not_found (insn);
2922 /* Some target machines need to prescan each insn before
2923 it is output. */
2925 #ifdef FINAL_PRESCAN_INSN
2926 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2927 #endif
2929 if (targetm.have_conditional_execution ()
2930 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2931 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2933 #if HAVE_cc0
2934 cc_prev_status = cc_status;
2936 /* Update `cc_status' for this instruction.
2937 The instruction's output routine may change it further.
2938 If the output routine for a jump insn needs to depend
2939 on the cc status, it should look at cc_prev_status. */
2941 NOTICE_UPDATE_CC (body, insn);
2942 #endif
2944 current_output_insn = debug_insn = insn;
2946 /* Find the proper template for this insn. */
2947 templ = get_insn_template (insn_code_number, insn);
2949 /* If the C code returns 0, it means that it is a jump insn
2950 which follows a deleted test insn, and that test insn
2951 needs to be reinserted. */
2952 if (templ == 0)
2954 rtx_insn *prev;
2956 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2958 /* We have already processed the notes between the setter and
2959 the user. Make sure we don't process them again, this is
2960 particularly important if one of the notes is a block
2961 scope note or an EH note. */
2962 for (prev = insn;
2963 prev != last_ignored_compare;
2964 prev = PREV_INSN (prev))
2966 if (NOTE_P (prev))
2967 delete_insn (prev); /* Use delete_note. */
2970 return prev;
2973 /* If the template is the string "#", it means that this insn must
2974 be split. */
2975 if (templ[0] == '#' && templ[1] == '\0')
2977 rtx_insn *new_rtx = try_split (body, insn, 0);
2979 /* If we didn't split the insn, go away. */
2980 if (new_rtx == insn && PATTERN (new_rtx) == body)
2981 fatal_insn ("could not split insn", insn);
2983 /* If we have a length attribute, this instruction should have
2984 been split in shorten_branches, to ensure that we would have
2985 valid length info for the splitees. */
2986 gcc_assert (!HAVE_ATTR_length);
2988 return new_rtx;
2991 /* ??? This will put the directives in the wrong place if
2992 get_insn_template outputs assembly directly. However calling it
2993 before get_insn_template breaks if the insns is split. */
2994 if (targetm.asm_out.unwind_emit_before_insn
2995 && targetm.asm_out.unwind_emit)
2996 targetm.asm_out.unwind_emit (asm_out_file, insn);
2998 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
2999 if (call_insn != NULL)
3001 rtx x = call_from_call_insn (call_insn);
3002 x = XEXP (x, 0);
3003 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3005 tree t;
3006 x = XEXP (x, 0);
3007 t = SYMBOL_REF_DECL (x);
3008 if (t)
3009 assemble_external (t);
3013 /* Output assembler code from the template. */
3014 output_asm_insn (templ, recog_data.operand);
3016 /* Some target machines need to postscan each insn after
3017 it is output. */
3018 if (targetm.asm_out.final_postscan_insn)
3019 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3020 recog_data.n_operands);
3022 if (!targetm.asm_out.unwind_emit_before_insn
3023 && targetm.asm_out.unwind_emit)
3024 targetm.asm_out.unwind_emit (asm_out_file, insn);
3026 /* Let the debug info back-end know about this call. We do this only
3027 after the instruction has been emitted because labels that may be
3028 created to reference the call instruction must appear after it. */
3029 if (call_insn != NULL && !DECL_IGNORED_P (current_function_decl))
3030 debug_hooks->var_location (insn);
3032 current_output_insn = debug_insn = 0;
3035 return NEXT_INSN (insn);
3038 /* Return whether a source line note needs to be emitted before INSN.
3039 Sets IS_STMT to TRUE if the line should be marked as a possible
3040 breakpoint location. */
3042 static bool
3043 notice_source_line (rtx_insn *insn, bool *is_stmt)
3045 const char *filename;
3046 int linenum;
3048 if (override_filename)
3050 filename = override_filename;
3051 linenum = override_linenum;
3053 else if (INSN_HAS_LOCATION (insn))
3055 expanded_location xloc = insn_location (insn);
3056 filename = xloc.file;
3057 linenum = xloc.line;
3059 else
3061 filename = NULL;
3062 linenum = 0;
3065 if (filename == NULL)
3066 return false;
3068 if (force_source_line
3069 || filename != last_filename
3070 || last_linenum != linenum)
3072 force_source_line = false;
3073 last_filename = filename;
3074 last_linenum = linenum;
3075 last_discriminator = discriminator;
3076 *is_stmt = true;
3077 high_block_linenum = MAX (last_linenum, high_block_linenum);
3078 high_function_linenum = MAX (last_linenum, high_function_linenum);
3079 return true;
3082 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3084 /* If the discriminator changed, but the line number did not,
3085 output the line table entry with is_stmt false so the
3086 debugger does not treat this as a breakpoint location. */
3087 last_discriminator = discriminator;
3088 *is_stmt = false;
3089 return true;
3092 return false;
3095 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3096 directly to the desired hard register. */
3098 void
3099 cleanup_subreg_operands (rtx_insn *insn)
3101 int i;
3102 bool changed = false;
3103 extract_insn_cached (insn);
3104 for (i = 0; i < recog_data.n_operands; i++)
3106 /* The following test cannot use recog_data.operand when testing
3107 for a SUBREG: the underlying object might have been changed
3108 already if we are inside a match_operator expression that
3109 matches the else clause. Instead we test the underlying
3110 expression directly. */
3111 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3113 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3114 changed = true;
3116 else if (GET_CODE (recog_data.operand[i]) == PLUS
3117 || GET_CODE (recog_data.operand[i]) == MULT
3118 || MEM_P (recog_data.operand[i]))
3119 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3122 for (i = 0; i < recog_data.n_dups; i++)
3124 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3126 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3127 changed = true;
3129 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3130 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3131 || MEM_P (*recog_data.dup_loc[i]))
3132 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3134 if (changed)
3135 df_insn_rescan (insn);
3138 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3139 the thing it is a subreg of. Do it anyway if FINAL_P. */
3142 alter_subreg (rtx *xp, bool final_p)
3144 rtx x = *xp;
3145 rtx y = SUBREG_REG (x);
3147 /* simplify_subreg does not remove subreg from volatile references.
3148 We are required to. */
3149 if (MEM_P (y))
3151 int offset = SUBREG_BYTE (x);
3153 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3154 contains 0 instead of the proper offset. See simplify_subreg. */
3155 if (offset == 0
3156 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3158 int difference = GET_MODE_SIZE (GET_MODE (y))
3159 - GET_MODE_SIZE (GET_MODE (x));
3160 if (WORDS_BIG_ENDIAN)
3161 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3162 if (BYTES_BIG_ENDIAN)
3163 offset += difference % UNITS_PER_WORD;
3166 if (final_p)
3167 *xp = adjust_address (y, GET_MODE (x), offset);
3168 else
3169 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3171 else if (REG_P (y) && HARD_REGISTER_P (y))
3173 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3174 SUBREG_BYTE (x));
3176 if (new_rtx != 0)
3177 *xp = new_rtx;
3178 else if (final_p && REG_P (y))
3180 /* Simplify_subreg can't handle some REG cases, but we have to. */
3181 unsigned int regno;
3182 HOST_WIDE_INT offset;
3184 regno = subreg_regno (x);
3185 if (subreg_lowpart_p (x))
3186 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3187 else
3188 offset = SUBREG_BYTE (x);
3189 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3193 return *xp;
3196 /* Do alter_subreg on all the SUBREGs contained in X. */
3198 static rtx
3199 walk_alter_subreg (rtx *xp, bool *changed)
3201 rtx x = *xp;
3202 switch (GET_CODE (x))
3204 case PLUS:
3205 case MULT:
3206 case AND:
3207 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3208 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3209 break;
3211 case MEM:
3212 case ZERO_EXTEND:
3213 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3214 break;
3216 case SUBREG:
3217 *changed = true;
3218 return alter_subreg (xp, true);
3220 default:
3221 break;
3224 return *xp;
3227 #if HAVE_cc0
3229 /* Given BODY, the body of a jump instruction, alter the jump condition
3230 as required by the bits that are set in cc_status.flags.
3231 Not all of the bits there can be handled at this level in all cases.
3233 The value is normally 0.
3234 1 means that the condition has become always true.
3235 -1 means that the condition has become always false.
3236 2 means that COND has been altered. */
3238 static int
3239 alter_cond (rtx cond)
3241 int value = 0;
3243 if (cc_status.flags & CC_REVERSED)
3245 value = 2;
3246 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3249 if (cc_status.flags & CC_INVERTED)
3251 value = 2;
3252 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3255 if (cc_status.flags & CC_NOT_POSITIVE)
3256 switch (GET_CODE (cond))
3258 case LE:
3259 case LEU:
3260 case GEU:
3261 /* Jump becomes unconditional. */
3262 return 1;
3264 case GT:
3265 case GTU:
3266 case LTU:
3267 /* Jump becomes no-op. */
3268 return -1;
3270 case GE:
3271 PUT_CODE (cond, EQ);
3272 value = 2;
3273 break;
3275 case LT:
3276 PUT_CODE (cond, NE);
3277 value = 2;
3278 break;
3280 default:
3281 break;
3284 if (cc_status.flags & CC_NOT_NEGATIVE)
3285 switch (GET_CODE (cond))
3287 case GE:
3288 case GEU:
3289 /* Jump becomes unconditional. */
3290 return 1;
3292 case LT:
3293 case LTU:
3294 /* Jump becomes no-op. */
3295 return -1;
3297 case LE:
3298 case LEU:
3299 PUT_CODE (cond, EQ);
3300 value = 2;
3301 break;
3303 case GT:
3304 case GTU:
3305 PUT_CODE (cond, NE);
3306 value = 2;
3307 break;
3309 default:
3310 break;
3313 if (cc_status.flags & CC_NO_OVERFLOW)
3314 switch (GET_CODE (cond))
3316 case GEU:
3317 /* Jump becomes unconditional. */
3318 return 1;
3320 case LEU:
3321 PUT_CODE (cond, EQ);
3322 value = 2;
3323 break;
3325 case GTU:
3326 PUT_CODE (cond, NE);
3327 value = 2;
3328 break;
3330 case LTU:
3331 /* Jump becomes no-op. */
3332 return -1;
3334 default:
3335 break;
3338 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3339 switch (GET_CODE (cond))
3341 default:
3342 gcc_unreachable ();
3344 case NE:
3345 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3346 value = 2;
3347 break;
3349 case EQ:
3350 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3351 value = 2;
3352 break;
3355 if (cc_status.flags & CC_NOT_SIGNED)
3356 /* The flags are valid if signed condition operators are converted
3357 to unsigned. */
3358 switch (GET_CODE (cond))
3360 case LE:
3361 PUT_CODE (cond, LEU);
3362 value = 2;
3363 break;
3365 case LT:
3366 PUT_CODE (cond, LTU);
3367 value = 2;
3368 break;
3370 case GT:
3371 PUT_CODE (cond, GTU);
3372 value = 2;
3373 break;
3375 case GE:
3376 PUT_CODE (cond, GEU);
3377 value = 2;
3378 break;
3380 default:
3381 break;
3384 return value;
3386 #endif
3388 /* Report inconsistency between the assembler template and the operands.
3389 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3391 void
3392 output_operand_lossage (const char *cmsgid, ...)
3394 char *fmt_string;
3395 char *new_message;
3396 const char *pfx_str;
3397 va_list ap;
3399 va_start (ap, cmsgid);
3401 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3402 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3403 new_message = xvasprintf (fmt_string, ap);
3405 if (this_is_asm_operands)
3406 error_for_asm (this_is_asm_operands, "%s", new_message);
3407 else
3408 internal_error ("%s", new_message);
3410 free (fmt_string);
3411 free (new_message);
3412 va_end (ap);
3415 /* Output of assembler code from a template, and its subroutines. */
3417 /* Annotate the assembly with a comment describing the pattern and
3418 alternative used. */
3420 static void
3421 output_asm_name (void)
3423 if (debug_insn)
3425 int num = INSN_CODE (debug_insn);
3426 fprintf (asm_out_file, "\t%s %d\t%s",
3427 ASM_COMMENT_START, INSN_UID (debug_insn),
3428 insn_data[num].name);
3429 if (insn_data[num].n_alternatives > 1)
3430 fprintf (asm_out_file, "/%d", which_alternative + 1);
3432 if (HAVE_ATTR_length)
3433 fprintf (asm_out_file, "\t[length = %d]",
3434 get_attr_length (debug_insn));
3436 /* Clear this so only the first assembler insn
3437 of any rtl insn will get the special comment for -dp. */
3438 debug_insn = 0;
3442 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3443 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3444 corresponds to the address of the object and 0 if to the object. */
3446 static tree
3447 get_mem_expr_from_op (rtx op, int *paddressp)
3449 tree expr;
3450 int inner_addressp;
3452 *paddressp = 0;
3454 if (REG_P (op))
3455 return REG_EXPR (op);
3456 else if (!MEM_P (op))
3457 return 0;
3459 if (MEM_EXPR (op) != 0)
3460 return MEM_EXPR (op);
3462 /* Otherwise we have an address, so indicate it and look at the address. */
3463 *paddressp = 1;
3464 op = XEXP (op, 0);
3466 /* First check if we have a decl for the address, then look at the right side
3467 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3468 But don't allow the address to itself be indirect. */
3469 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3470 return expr;
3471 else if (GET_CODE (op) == PLUS
3472 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3473 return expr;
3475 while (UNARY_P (op)
3476 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3477 op = XEXP (op, 0);
3479 expr = get_mem_expr_from_op (op, &inner_addressp);
3480 return inner_addressp ? 0 : expr;
3483 /* Output operand names for assembler instructions. OPERANDS is the
3484 operand vector, OPORDER is the order to write the operands, and NOPS
3485 is the number of operands to write. */
3487 static void
3488 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3490 int wrote = 0;
3491 int i;
3493 for (i = 0; i < nops; i++)
3495 int addressp;
3496 rtx op = operands[oporder[i]];
3497 tree expr = get_mem_expr_from_op (op, &addressp);
3499 fprintf (asm_out_file, "%c%s",
3500 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3501 wrote = 1;
3502 if (expr)
3504 fprintf (asm_out_file, "%s",
3505 addressp ? "*" : "");
3506 print_mem_expr (asm_out_file, expr);
3507 wrote = 1;
3509 else if (REG_P (op) && ORIGINAL_REGNO (op)
3510 && ORIGINAL_REGNO (op) != REGNO (op))
3511 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3515 #ifdef ASSEMBLER_DIALECT
3516 /* Helper function to parse assembler dialects in the asm string.
3517 This is called from output_asm_insn and asm_fprintf. */
3518 static const char *
3519 do_assembler_dialects (const char *p, int *dialect)
3521 char c = *(p - 1);
3523 switch (c)
3525 case '{':
3527 int i;
3529 if (*dialect)
3530 output_operand_lossage ("nested assembly dialect alternatives");
3531 else
3532 *dialect = 1;
3534 /* If we want the first dialect, do nothing. Otherwise, skip
3535 DIALECT_NUMBER of strings ending with '|'. */
3536 for (i = 0; i < dialect_number; i++)
3538 while (*p && *p != '}')
3540 if (*p == '|')
3542 p++;
3543 break;
3546 /* Skip over any character after a percent sign. */
3547 if (*p == '%')
3548 p++;
3549 if (*p)
3550 p++;
3553 if (*p == '}')
3554 break;
3557 if (*p == '\0')
3558 output_operand_lossage ("unterminated assembly dialect alternative");
3560 break;
3562 case '|':
3563 if (*dialect)
3565 /* Skip to close brace. */
3568 if (*p == '\0')
3570 output_operand_lossage ("unterminated assembly dialect alternative");
3571 break;
3574 /* Skip over any character after a percent sign. */
3575 if (*p == '%' && p[1])
3577 p += 2;
3578 continue;
3581 if (*p++ == '}')
3582 break;
3584 while (1);
3586 *dialect = 0;
3588 else
3589 putc (c, asm_out_file);
3590 break;
3592 case '}':
3593 if (! *dialect)
3594 putc (c, asm_out_file);
3595 *dialect = 0;
3596 break;
3597 default:
3598 gcc_unreachable ();
3601 return p;
3603 #endif
3605 /* Output text from TEMPLATE to the assembler output file,
3606 obeying %-directions to substitute operands taken from
3607 the vector OPERANDS.
3609 %N (for N a digit) means print operand N in usual manner.
3610 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3611 and print the label name with no punctuation.
3612 %cN means require operand N to be a constant
3613 and print the constant expression with no punctuation.
3614 %aN means expect operand N to be a memory address
3615 (not a memory reference!) and print a reference
3616 to that address.
3617 %nN means expect operand N to be a constant
3618 and print a constant expression for minus the value
3619 of the operand, with no other punctuation. */
3621 void
3622 output_asm_insn (const char *templ, rtx *operands)
3624 const char *p;
3625 int c;
3626 #ifdef ASSEMBLER_DIALECT
3627 int dialect = 0;
3628 #endif
3629 int oporder[MAX_RECOG_OPERANDS];
3630 char opoutput[MAX_RECOG_OPERANDS];
3631 int ops = 0;
3633 /* An insn may return a null string template
3634 in a case where no assembler code is needed. */
3635 if (*templ == 0)
3636 return;
3638 memset (opoutput, 0, sizeof opoutput);
3639 p = templ;
3640 putc ('\t', asm_out_file);
3642 #ifdef ASM_OUTPUT_OPCODE
3643 ASM_OUTPUT_OPCODE (asm_out_file, p);
3644 #endif
3646 while ((c = *p++))
3647 switch (c)
3649 case '\n':
3650 if (flag_verbose_asm)
3651 output_asm_operand_names (operands, oporder, ops);
3652 if (flag_print_asm_name)
3653 output_asm_name ();
3655 ops = 0;
3656 memset (opoutput, 0, sizeof opoutput);
3658 putc (c, asm_out_file);
3659 #ifdef ASM_OUTPUT_OPCODE
3660 while ((c = *p) == '\t')
3662 putc (c, asm_out_file);
3663 p++;
3665 ASM_OUTPUT_OPCODE (asm_out_file, p);
3666 #endif
3667 break;
3669 #ifdef ASSEMBLER_DIALECT
3670 case '{':
3671 case '}':
3672 case '|':
3673 p = do_assembler_dialects (p, &dialect);
3674 break;
3675 #endif
3677 case '%':
3678 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3679 if ASSEMBLER_DIALECT defined and these characters have a special
3680 meaning as dialect delimiters.*/
3681 if (*p == '%'
3682 #ifdef ASSEMBLER_DIALECT
3683 || *p == '{' || *p == '}' || *p == '|'
3684 #endif
3687 putc (*p, asm_out_file);
3688 p++;
3690 /* %= outputs a number which is unique to each insn in the entire
3691 compilation. This is useful for making local labels that are
3692 referred to more than once in a given insn. */
3693 else if (*p == '=')
3695 p++;
3696 fprintf (asm_out_file, "%d", insn_counter);
3698 /* % followed by a letter and some digits
3699 outputs an operand in a special way depending on the letter.
3700 Letters `acln' are implemented directly.
3701 Other letters are passed to `output_operand' so that
3702 the TARGET_PRINT_OPERAND hook can define them. */
3703 else if (ISALPHA (*p))
3705 int letter = *p++;
3706 unsigned long opnum;
3707 char *endptr;
3709 opnum = strtoul (p, &endptr, 10);
3711 if (endptr == p)
3712 output_operand_lossage ("operand number missing "
3713 "after %%-letter");
3714 else if (this_is_asm_operands && opnum >= insn_noperands)
3715 output_operand_lossage ("operand number out of range");
3716 else if (letter == 'l')
3717 output_asm_label (operands[opnum]);
3718 else if (letter == 'a')
3719 output_address (VOIDmode, operands[opnum]);
3720 else if (letter == 'c')
3722 if (CONSTANT_ADDRESS_P (operands[opnum]))
3723 output_addr_const (asm_out_file, operands[opnum]);
3724 else
3725 output_operand (operands[opnum], 'c');
3727 else if (letter == 'n')
3729 if (CONST_INT_P (operands[opnum]))
3730 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3731 - INTVAL (operands[opnum]));
3732 else
3734 putc ('-', asm_out_file);
3735 output_addr_const (asm_out_file, operands[opnum]);
3738 else
3739 output_operand (operands[opnum], letter);
3741 if (!opoutput[opnum])
3742 oporder[ops++] = opnum;
3743 opoutput[opnum] = 1;
3745 p = endptr;
3746 c = *p;
3748 /* % followed by a digit outputs an operand the default way. */
3749 else if (ISDIGIT (*p))
3751 unsigned long opnum;
3752 char *endptr;
3754 opnum = strtoul (p, &endptr, 10);
3755 if (this_is_asm_operands && opnum >= insn_noperands)
3756 output_operand_lossage ("operand number out of range");
3757 else
3758 output_operand (operands[opnum], 0);
3760 if (!opoutput[opnum])
3761 oporder[ops++] = opnum;
3762 opoutput[opnum] = 1;
3764 p = endptr;
3765 c = *p;
3767 /* % followed by punctuation: output something for that
3768 punctuation character alone, with no operand. The
3769 TARGET_PRINT_OPERAND hook decides what is actually done. */
3770 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3771 output_operand (NULL_RTX, *p++);
3772 else
3773 output_operand_lossage ("invalid %%-code");
3774 break;
3776 default:
3777 putc (c, asm_out_file);
3780 /* Write out the variable names for operands, if we know them. */
3781 if (flag_verbose_asm)
3782 output_asm_operand_names (operands, oporder, ops);
3783 if (flag_print_asm_name)
3784 output_asm_name ();
3786 putc ('\n', asm_out_file);
3789 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3791 void
3792 output_asm_label (rtx x)
3794 char buf[256];
3796 if (GET_CODE (x) == LABEL_REF)
3797 x = LABEL_REF_LABEL (x);
3798 if (LABEL_P (x)
3799 || (NOTE_P (x)
3800 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3801 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3802 else
3803 output_operand_lossage ("'%%l' operand isn't a label");
3805 assemble_name (asm_out_file, buf);
3808 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3810 void
3811 mark_symbol_refs_as_used (rtx x)
3813 subrtx_iterator::array_type array;
3814 FOR_EACH_SUBRTX (iter, array, x, ALL)
3816 const_rtx x = *iter;
3817 if (GET_CODE (x) == SYMBOL_REF)
3818 if (tree t = SYMBOL_REF_DECL (x))
3819 assemble_external (t);
3823 /* Print operand X using machine-dependent assembler syntax.
3824 CODE is a non-digit that preceded the operand-number in the % spec,
3825 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3826 between the % and the digits.
3827 When CODE is a non-letter, X is 0.
3829 The meanings of the letters are machine-dependent and controlled
3830 by TARGET_PRINT_OPERAND. */
3832 void
3833 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3835 if (x && GET_CODE (x) == SUBREG)
3836 x = alter_subreg (&x, true);
3838 /* X must not be a pseudo reg. */
3839 if (!targetm.no_register_allocation)
3840 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3842 targetm.asm_out.print_operand (asm_out_file, x, code);
3844 if (x == NULL_RTX)
3845 return;
3847 mark_symbol_refs_as_used (x);
3850 /* Print a memory reference operand for address X using
3851 machine-dependent assembler syntax. */
3853 void
3854 output_address (machine_mode mode, rtx x)
3856 bool changed = false;
3857 walk_alter_subreg (&x, &changed);
3858 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3861 /* Print an integer constant expression in assembler syntax.
3862 Addition and subtraction are the only arithmetic
3863 that may appear in these expressions. */
3865 void
3866 output_addr_const (FILE *file, rtx x)
3868 char buf[256];
3870 restart:
3871 switch (GET_CODE (x))
3873 case PC:
3874 putc ('.', file);
3875 break;
3877 case SYMBOL_REF:
3878 if (SYMBOL_REF_DECL (x))
3879 assemble_external (SYMBOL_REF_DECL (x));
3880 #ifdef ASM_OUTPUT_SYMBOL_REF
3881 ASM_OUTPUT_SYMBOL_REF (file, x);
3882 #else
3883 assemble_name (file, XSTR (x, 0));
3884 #endif
3885 break;
3887 case LABEL_REF:
3888 x = LABEL_REF_LABEL (x);
3889 /* Fall through. */
3890 case CODE_LABEL:
3891 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3892 #ifdef ASM_OUTPUT_LABEL_REF
3893 ASM_OUTPUT_LABEL_REF (file, buf);
3894 #else
3895 assemble_name (file, buf);
3896 #endif
3897 break;
3899 case CONST_INT:
3900 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3901 break;
3903 case CONST:
3904 /* This used to output parentheses around the expression,
3905 but that does not work on the 386 (either ATT or BSD assembler). */
3906 output_addr_const (file, XEXP (x, 0));
3907 break;
3909 case CONST_WIDE_INT:
3910 /* We do not know the mode here so we have to use a round about
3911 way to build a wide-int to get it printed properly. */
3913 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3914 CONST_WIDE_INT_NUNITS (x),
3915 CONST_WIDE_INT_NUNITS (x)
3916 * HOST_BITS_PER_WIDE_INT,
3917 false);
3918 print_decs (w, file);
3920 break;
3922 case CONST_DOUBLE:
3923 if (CONST_DOUBLE_AS_INT_P (x))
3925 /* We can use %d if the number is one word and positive. */
3926 if (CONST_DOUBLE_HIGH (x))
3927 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3928 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3929 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3930 else if (CONST_DOUBLE_LOW (x) < 0)
3931 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3932 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3933 else
3934 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3936 else
3937 /* We can't handle floating point constants;
3938 PRINT_OPERAND must handle them. */
3939 output_operand_lossage ("floating constant misused");
3940 break;
3942 case CONST_FIXED:
3943 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3944 break;
3946 case PLUS:
3947 /* Some assemblers need integer constants to appear last (eg masm). */
3948 if (CONST_INT_P (XEXP (x, 0)))
3950 output_addr_const (file, XEXP (x, 1));
3951 if (INTVAL (XEXP (x, 0)) >= 0)
3952 fprintf (file, "+");
3953 output_addr_const (file, XEXP (x, 0));
3955 else
3957 output_addr_const (file, XEXP (x, 0));
3958 if (!CONST_INT_P (XEXP (x, 1))
3959 || INTVAL (XEXP (x, 1)) >= 0)
3960 fprintf (file, "+");
3961 output_addr_const (file, XEXP (x, 1));
3963 break;
3965 case MINUS:
3966 /* Avoid outputting things like x-x or x+5-x,
3967 since some assemblers can't handle that. */
3968 x = simplify_subtraction (x);
3969 if (GET_CODE (x) != MINUS)
3970 goto restart;
3972 output_addr_const (file, XEXP (x, 0));
3973 fprintf (file, "-");
3974 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3975 || GET_CODE (XEXP (x, 1)) == PC
3976 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3977 output_addr_const (file, XEXP (x, 1));
3978 else
3980 fputs (targetm.asm_out.open_paren, file);
3981 output_addr_const (file, XEXP (x, 1));
3982 fputs (targetm.asm_out.close_paren, file);
3984 break;
3986 case ZERO_EXTEND:
3987 case SIGN_EXTEND:
3988 case SUBREG:
3989 case TRUNCATE:
3990 output_addr_const (file, XEXP (x, 0));
3991 break;
3993 default:
3994 if (targetm.asm_out.output_addr_const_extra (file, x))
3995 break;
3997 output_operand_lossage ("invalid expression as operand");
4001 /* Output a quoted string. */
4003 void
4004 output_quoted_string (FILE *asm_file, const char *string)
4006 #ifdef OUTPUT_QUOTED_STRING
4007 OUTPUT_QUOTED_STRING (asm_file, string);
4008 #else
4009 char c;
4011 putc ('\"', asm_file);
4012 while ((c = *string++) != 0)
4014 if (ISPRINT (c))
4016 if (c == '\"' || c == '\\')
4017 putc ('\\', asm_file);
4018 putc (c, asm_file);
4020 else
4021 fprintf (asm_file, "\\%03o", (unsigned char) c);
4023 putc ('\"', asm_file);
4024 #endif
4027 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4029 void
4030 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4032 char buf[2 + CHAR_BIT * sizeof (value) / 4];
4033 if (value == 0)
4034 putc ('0', f);
4035 else
4037 char *p = buf + sizeof (buf);
4039 *--p = "0123456789abcdef"[value % 16];
4040 while ((value /= 16) != 0);
4041 *--p = 'x';
4042 *--p = '0';
4043 fwrite (p, 1, buf + sizeof (buf) - p, f);
4047 /* Internal function that prints an unsigned long in decimal in reverse.
4048 The output string IS NOT null-terminated. */
4050 static int
4051 sprint_ul_rev (char *s, unsigned long value)
4053 int i = 0;
4056 s[i] = "0123456789"[value % 10];
4057 value /= 10;
4058 i++;
4059 /* alternate version, without modulo */
4060 /* oldval = value; */
4061 /* value /= 10; */
4062 /* s[i] = "0123456789" [oldval - 10*value]; */
4063 /* i++ */
4065 while (value != 0);
4066 return i;
4069 /* Write an unsigned long as decimal to a file, fast. */
4071 void
4072 fprint_ul (FILE *f, unsigned long value)
4074 /* python says: len(str(2**64)) == 20 */
4075 char s[20];
4076 int i;
4078 i = sprint_ul_rev (s, value);
4080 /* It's probably too small to bother with string reversal and fputs. */
4083 i--;
4084 putc (s[i], f);
4086 while (i != 0);
4089 /* Write an unsigned long as decimal to a string, fast.
4090 s must be wide enough to not overflow, at least 21 chars.
4091 Returns the length of the string (without terminating '\0'). */
4094 sprint_ul (char *s, unsigned long value)
4096 int len = sprint_ul_rev (s, value);
4097 s[len] = '\0';
4099 std::reverse (s, s + len);
4100 return len;
4103 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4104 %R prints the value of REGISTER_PREFIX.
4105 %L prints the value of LOCAL_LABEL_PREFIX.
4106 %U prints the value of USER_LABEL_PREFIX.
4107 %I prints the value of IMMEDIATE_PREFIX.
4108 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4109 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4111 We handle alternate assembler dialects here, just like output_asm_insn. */
4113 void
4114 asm_fprintf (FILE *file, const char *p, ...)
4116 char buf[10];
4117 char *q, c;
4118 #ifdef ASSEMBLER_DIALECT
4119 int dialect = 0;
4120 #endif
4121 va_list argptr;
4123 va_start (argptr, p);
4125 buf[0] = '%';
4127 while ((c = *p++))
4128 switch (c)
4130 #ifdef ASSEMBLER_DIALECT
4131 case '{':
4132 case '}':
4133 case '|':
4134 p = do_assembler_dialects (p, &dialect);
4135 break;
4136 #endif
4138 case '%':
4139 c = *p++;
4140 q = &buf[1];
4141 while (strchr ("-+ #0", c))
4143 *q++ = c;
4144 c = *p++;
4146 while (ISDIGIT (c) || c == '.')
4148 *q++ = c;
4149 c = *p++;
4151 switch (c)
4153 case '%':
4154 putc ('%', file);
4155 break;
4157 case 'd': case 'i': case 'u':
4158 case 'x': case 'X': case 'o':
4159 case 'c':
4160 *q++ = c;
4161 *q = 0;
4162 fprintf (file, buf, va_arg (argptr, int));
4163 break;
4165 case 'w':
4166 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4167 'o' cases, but we do not check for those cases. It
4168 means that the value is a HOST_WIDE_INT, which may be
4169 either `long' or `long long'. */
4170 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4171 q += strlen (HOST_WIDE_INT_PRINT);
4172 *q++ = *p++;
4173 *q = 0;
4174 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4175 break;
4177 case 'l':
4178 *q++ = c;
4179 #ifdef HAVE_LONG_LONG
4180 if (*p == 'l')
4182 *q++ = *p++;
4183 *q++ = *p++;
4184 *q = 0;
4185 fprintf (file, buf, va_arg (argptr, long long));
4187 else
4188 #endif
4190 *q++ = *p++;
4191 *q = 0;
4192 fprintf (file, buf, va_arg (argptr, long));
4195 break;
4197 case 's':
4198 *q++ = c;
4199 *q = 0;
4200 fprintf (file, buf, va_arg (argptr, char *));
4201 break;
4203 case 'O':
4204 #ifdef ASM_OUTPUT_OPCODE
4205 ASM_OUTPUT_OPCODE (asm_out_file, p);
4206 #endif
4207 break;
4209 case 'R':
4210 #ifdef REGISTER_PREFIX
4211 fprintf (file, "%s", REGISTER_PREFIX);
4212 #endif
4213 break;
4215 case 'I':
4216 #ifdef IMMEDIATE_PREFIX
4217 fprintf (file, "%s", IMMEDIATE_PREFIX);
4218 #endif
4219 break;
4221 case 'L':
4222 #ifdef LOCAL_LABEL_PREFIX
4223 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4224 #endif
4225 break;
4227 case 'U':
4228 fputs (user_label_prefix, file);
4229 break;
4231 #ifdef ASM_FPRINTF_EXTENSIONS
4232 /* Uppercase letters are reserved for general use by asm_fprintf
4233 and so are not available to target specific code. In order to
4234 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4235 they are defined here. As they get turned into real extensions
4236 to asm_fprintf they should be removed from this list. */
4237 case 'A': case 'B': case 'C': case 'D': case 'E':
4238 case 'F': case 'G': case 'H': case 'J': case 'K':
4239 case 'M': case 'N': case 'P': case 'Q': case 'S':
4240 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4241 break;
4243 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4244 #endif
4245 default:
4246 gcc_unreachable ();
4248 break;
4250 default:
4251 putc (c, file);
4253 va_end (argptr);
4256 /* Return nonzero if this function has no function calls. */
4259 leaf_function_p (void)
4261 rtx_insn *insn;
4263 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4264 functions even if they call mcount. */
4265 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4266 return 0;
4268 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4270 if (CALL_P (insn)
4271 && ! SIBLING_CALL_P (insn))
4272 return 0;
4273 if (NONJUMP_INSN_P (insn)
4274 && GET_CODE (PATTERN (insn)) == SEQUENCE
4275 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4276 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4277 return 0;
4280 return 1;
4283 /* Return 1 if branch is a forward branch.
4284 Uses insn_shuid array, so it works only in the final pass. May be used by
4285 output templates to customary add branch prediction hints.
4288 final_forward_branch_p (rtx_insn *insn)
4290 int insn_id, label_id;
4292 gcc_assert (uid_shuid);
4293 insn_id = INSN_SHUID (insn);
4294 label_id = INSN_SHUID (JUMP_LABEL (insn));
4295 /* We've hit some insns that does not have id information available. */
4296 gcc_assert (insn_id && label_id);
4297 return insn_id < label_id;
4300 /* On some machines, a function with no call insns
4301 can run faster if it doesn't create its own register window.
4302 When output, the leaf function should use only the "output"
4303 registers. Ordinarily, the function would be compiled to use
4304 the "input" registers to find its arguments; it is a candidate
4305 for leaf treatment if it uses only the "input" registers.
4306 Leaf function treatment means renumbering so the function
4307 uses the "output" registers instead. */
4309 #ifdef LEAF_REGISTERS
4311 /* Return 1 if this function uses only the registers that can be
4312 safely renumbered. */
4315 only_leaf_regs_used (void)
4317 int i;
4318 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4320 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4321 if ((df_regs_ever_live_p (i) || global_regs[i])
4322 && ! permitted_reg_in_leaf_functions[i])
4323 return 0;
4325 if (crtl->uses_pic_offset_table
4326 && pic_offset_table_rtx != 0
4327 && REG_P (pic_offset_table_rtx)
4328 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4329 return 0;
4331 return 1;
4334 /* Scan all instructions and renumber all registers into those
4335 available in leaf functions. */
4337 static void
4338 leaf_renumber_regs (rtx_insn *first)
4340 rtx_insn *insn;
4342 /* Renumber only the actual patterns.
4343 The reg-notes can contain frame pointer refs,
4344 and renumbering them could crash, and should not be needed. */
4345 for (insn = first; insn; insn = NEXT_INSN (insn))
4346 if (INSN_P (insn))
4347 leaf_renumber_regs_insn (PATTERN (insn));
4350 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4351 available in leaf functions. */
4353 void
4354 leaf_renumber_regs_insn (rtx in_rtx)
4356 int i, j;
4357 const char *format_ptr;
4359 if (in_rtx == 0)
4360 return;
4362 /* Renumber all input-registers into output-registers.
4363 renumbered_regs would be 1 for an output-register;
4364 they */
4366 if (REG_P (in_rtx))
4368 int newreg;
4370 /* Don't renumber the same reg twice. */
4371 if (in_rtx->used)
4372 return;
4374 newreg = REGNO (in_rtx);
4375 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4376 to reach here as part of a REG_NOTE. */
4377 if (newreg >= FIRST_PSEUDO_REGISTER)
4379 in_rtx->used = 1;
4380 return;
4382 newreg = LEAF_REG_REMAP (newreg);
4383 gcc_assert (newreg >= 0);
4384 df_set_regs_ever_live (REGNO (in_rtx), false);
4385 df_set_regs_ever_live (newreg, true);
4386 SET_REGNO (in_rtx, newreg);
4387 in_rtx->used = 1;
4388 return;
4391 if (INSN_P (in_rtx))
4393 /* Inside a SEQUENCE, we find insns.
4394 Renumber just the patterns of these insns,
4395 just as we do for the top-level insns. */
4396 leaf_renumber_regs_insn (PATTERN (in_rtx));
4397 return;
4400 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4402 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4403 switch (*format_ptr++)
4405 case 'e':
4406 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4407 break;
4409 case 'E':
4410 if (NULL != XVEC (in_rtx, i))
4412 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4413 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4415 break;
4417 case 'S':
4418 case 's':
4419 case '0':
4420 case 'i':
4421 case 'w':
4422 case 'n':
4423 case 'u':
4424 break;
4426 default:
4427 gcc_unreachable ();
4430 #endif
4432 /* Turn the RTL into assembly. */
4433 static unsigned int
4434 rest_of_handle_final (void)
4436 const char *fnname = get_fnname_from_decl (current_function_decl);
4438 assemble_start_function (current_function_decl, fnname);
4439 final_start_function (get_insns (), asm_out_file, optimize);
4440 final (get_insns (), asm_out_file, optimize);
4441 if (flag_ipa_ra)
4442 collect_fn_hard_reg_usage ();
4443 final_end_function ();
4445 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4446 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4447 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4448 output_function_exception_table (fnname);
4450 assemble_end_function (current_function_decl, fnname);
4452 user_defined_section_attribute = false;
4454 /* Free up reg info memory. */
4455 free_reg_info ();
4457 if (! quiet_flag)
4458 fflush (asm_out_file);
4460 /* Write DBX symbols if requested. */
4462 /* Note that for those inline functions where we don't initially
4463 know for certain that we will be generating an out-of-line copy,
4464 the first invocation of this routine (rest_of_compilation) will
4465 skip over this code by doing a `goto exit_rest_of_compilation;'.
4466 Later on, wrapup_global_declarations will (indirectly) call
4467 rest_of_compilation again for those inline functions that need
4468 to have out-of-line copies generated. During that call, we
4469 *will* be routed past here. */
4471 timevar_push (TV_SYMOUT);
4472 if (!DECL_IGNORED_P (current_function_decl))
4473 debug_hooks->function_decl (current_function_decl);
4474 timevar_pop (TV_SYMOUT);
4476 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4477 DECL_INITIAL (current_function_decl) = error_mark_node;
4479 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4480 && targetm.have_ctors_dtors)
4481 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4482 decl_init_priority_lookup
4483 (current_function_decl));
4484 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4485 && targetm.have_ctors_dtors)
4486 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4487 decl_fini_priority_lookup
4488 (current_function_decl));
4489 return 0;
4492 namespace {
4494 const pass_data pass_data_final =
4496 RTL_PASS, /* type */
4497 "final", /* name */
4498 OPTGROUP_NONE, /* optinfo_flags */
4499 TV_FINAL, /* tv_id */
4500 0, /* properties_required */
4501 0, /* properties_provided */
4502 0, /* properties_destroyed */
4503 0, /* todo_flags_start */
4504 0, /* todo_flags_finish */
4507 class pass_final : public rtl_opt_pass
4509 public:
4510 pass_final (gcc::context *ctxt)
4511 : rtl_opt_pass (pass_data_final, ctxt)
4514 /* opt_pass methods: */
4515 virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4517 }; // class pass_final
4519 } // anon namespace
4521 rtl_opt_pass *
4522 make_pass_final (gcc::context *ctxt)
4524 return new pass_final (ctxt);
4528 static unsigned int
4529 rest_of_handle_shorten_branches (void)
4531 /* Shorten branches. */
4532 shorten_branches (get_insns ());
4533 return 0;
4536 namespace {
4538 const pass_data pass_data_shorten_branches =
4540 RTL_PASS, /* type */
4541 "shorten", /* name */
4542 OPTGROUP_NONE, /* optinfo_flags */
4543 TV_SHORTEN_BRANCH, /* tv_id */
4544 0, /* properties_required */
4545 0, /* properties_provided */
4546 0, /* properties_destroyed */
4547 0, /* todo_flags_start */
4548 0, /* todo_flags_finish */
4551 class pass_shorten_branches : public rtl_opt_pass
4553 public:
4554 pass_shorten_branches (gcc::context *ctxt)
4555 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4558 /* opt_pass methods: */
4559 virtual unsigned int execute (function *)
4561 return rest_of_handle_shorten_branches ();
4564 }; // class pass_shorten_branches
4566 } // anon namespace
4568 rtl_opt_pass *
4569 make_pass_shorten_branches (gcc::context *ctxt)
4571 return new pass_shorten_branches (ctxt);
4575 static unsigned int
4576 rest_of_clean_state (void)
4578 rtx_insn *insn, *next;
4579 FILE *final_output = NULL;
4580 int save_unnumbered = flag_dump_unnumbered;
4581 int save_noaddr = flag_dump_noaddr;
4583 if (flag_dump_final_insns)
4585 final_output = fopen (flag_dump_final_insns, "a");
4586 if (!final_output)
4588 error ("could not open final insn dump file %qs: %m",
4589 flag_dump_final_insns);
4590 flag_dump_final_insns = NULL;
4592 else
4594 flag_dump_noaddr = flag_dump_unnumbered = 1;
4595 if (flag_compare_debug_opt || flag_compare_debug)
4596 dump_flags |= TDF_NOUID;
4597 dump_function_header (final_output, current_function_decl,
4598 dump_flags);
4599 final_insns_dump_p = true;
4601 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4602 if (LABEL_P (insn))
4603 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4604 else
4606 if (NOTE_P (insn))
4607 set_block_for_insn (insn, NULL);
4608 INSN_UID (insn) = 0;
4613 /* It is very important to decompose the RTL instruction chain here:
4614 debug information keeps pointing into CODE_LABEL insns inside the function
4615 body. If these remain pointing to the other insns, we end up preserving
4616 whole RTL chain and attached detailed debug info in memory. */
4617 for (insn = get_insns (); insn; insn = next)
4619 next = NEXT_INSN (insn);
4620 SET_NEXT_INSN (insn) = NULL;
4621 SET_PREV_INSN (insn) = NULL;
4623 if (final_output
4624 && (!NOTE_P (insn) ||
4625 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4626 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4627 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4628 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4629 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4630 print_rtl_single (final_output, insn);
4633 if (final_output)
4635 flag_dump_noaddr = save_noaddr;
4636 flag_dump_unnumbered = save_unnumbered;
4637 final_insns_dump_p = false;
4639 if (fclose (final_output))
4641 error ("could not close final insn dump file %qs: %m",
4642 flag_dump_final_insns);
4643 flag_dump_final_insns = NULL;
4647 /* In case the function was not output,
4648 don't leave any temporary anonymous types
4649 queued up for sdb output. */
4650 if (SDB_DEBUGGING_INFO && write_symbols == SDB_DEBUG)
4651 sdbout_types (NULL_TREE);
4653 flag_rerun_cse_after_global_opts = 0;
4654 reload_completed = 0;
4655 epilogue_completed = 0;
4656 #ifdef STACK_REGS
4657 regstack_completed = 0;
4658 #endif
4660 /* Clear out the insn_length contents now that they are no
4661 longer valid. */
4662 init_insn_lengths ();
4664 /* Show no temporary slots allocated. */
4665 init_temp_slots ();
4667 free_bb_for_insn ();
4669 delete_tree_ssa (cfun);
4671 /* We can reduce stack alignment on call site only when we are sure that
4672 the function body just produced will be actually used in the final
4673 executable. */
4674 if (decl_binds_to_current_def_p (current_function_decl))
4676 unsigned int pref = crtl->preferred_stack_boundary;
4677 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4678 pref = crtl->stack_alignment_needed;
4679 cgraph_node::rtl_info (current_function_decl)
4680 ->preferred_incoming_stack_boundary = pref;
4683 /* Make sure volatile mem refs aren't considered valid operands for
4684 arithmetic insns. We must call this here if this is a nested inline
4685 function, since the above code leaves us in the init_recog state,
4686 and the function context push/pop code does not save/restore volatile_ok.
4688 ??? Maybe it isn't necessary for expand_start_function to call this
4689 anymore if we do it here? */
4691 init_recog_no_volatile ();
4693 /* We're done with this function. Free up memory if we can. */
4694 free_after_parsing (cfun);
4695 free_after_compilation (cfun);
4696 return 0;
4699 namespace {
4701 const pass_data pass_data_clean_state =
4703 RTL_PASS, /* type */
4704 "*clean_state", /* name */
4705 OPTGROUP_NONE, /* optinfo_flags */
4706 TV_FINAL, /* tv_id */
4707 0, /* properties_required */
4708 0, /* properties_provided */
4709 PROP_rtl, /* properties_destroyed */
4710 0, /* todo_flags_start */
4711 0, /* todo_flags_finish */
4714 class pass_clean_state : public rtl_opt_pass
4716 public:
4717 pass_clean_state (gcc::context *ctxt)
4718 : rtl_opt_pass (pass_data_clean_state, ctxt)
4721 /* opt_pass methods: */
4722 virtual unsigned int execute (function *)
4724 return rest_of_clean_state ();
4727 }; // class pass_clean_state
4729 } // anon namespace
4731 rtl_opt_pass *
4732 make_pass_clean_state (gcc::context *ctxt)
4734 return new pass_clean_state (ctxt);
4737 /* Return true if INSN is a call to the current function. */
4739 static bool
4740 self_recursive_call_p (rtx_insn *insn)
4742 tree fndecl = get_call_fndecl (insn);
4743 return (fndecl == current_function_decl
4744 && decl_binds_to_current_def_p (fndecl));
4747 /* Collect hard register usage for the current function. */
4749 static void
4750 collect_fn_hard_reg_usage (void)
4752 rtx_insn *insn;
4753 #ifdef STACK_REGS
4754 int i;
4755 #endif
4756 struct cgraph_rtl_info *node;
4757 HARD_REG_SET function_used_regs;
4759 /* ??? To be removed when all the ports have been fixed. */
4760 if (!targetm.call_fusage_contains_non_callee_clobbers)
4761 return;
4763 CLEAR_HARD_REG_SET (function_used_regs);
4765 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4767 HARD_REG_SET insn_used_regs;
4769 if (!NONDEBUG_INSN_P (insn))
4770 continue;
4772 if (CALL_P (insn)
4773 && !self_recursive_call_p (insn))
4775 if (!get_call_reg_set_usage (insn, &insn_used_regs,
4776 call_used_reg_set))
4777 return;
4779 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4782 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4783 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4786 /* Be conservative - mark fixed and global registers as used. */
4787 IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
4789 #ifdef STACK_REGS
4790 /* Handle STACK_REGS conservatively, since the df-framework does not
4791 provide accurate information for them. */
4793 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4794 SET_HARD_REG_BIT (function_used_regs, i);
4795 #endif
4797 /* The information we have gathered is only interesting if it exposes a
4798 register from the call_used_regs that is not used in this function. */
4799 if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
4800 return;
4802 node = cgraph_node::rtl_info (current_function_decl);
4803 gcc_assert (node != NULL);
4805 COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
4806 node->function_used_regs_valid = 1;
4809 /* Get the declaration of the function called by INSN. */
4811 static tree
4812 get_call_fndecl (rtx_insn *insn)
4814 rtx note, datum;
4816 note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
4817 if (note == NULL_RTX)
4818 return NULL_TREE;
4820 datum = XEXP (note, 0);
4821 if (datum != NULL_RTX)
4822 return SYMBOL_REF_DECL (datum);
4824 return NULL_TREE;
4827 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4828 call targets that can be overwritten. */
4830 static struct cgraph_rtl_info *
4831 get_call_cgraph_rtl_info (rtx_insn *insn)
4833 tree fndecl;
4835 if (insn == NULL_RTX)
4836 return NULL;
4838 fndecl = get_call_fndecl (insn);
4839 if (fndecl == NULL_TREE
4840 || !decl_binds_to_current_def_p (fndecl))
4841 return NULL;
4843 return cgraph_node::rtl_info (fndecl);
4846 /* Find hard registers used by function call instruction INSN, and return them
4847 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4849 bool
4850 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
4851 HARD_REG_SET default_set)
4853 if (flag_ipa_ra)
4855 struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
4856 if (node != NULL
4857 && node->function_used_regs_valid)
4859 COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
4860 AND_HARD_REG_SET (*reg_set, default_set);
4861 return true;
4865 COPY_HARD_REG_SET (*reg_set, default_set);
4866 return false;