Properly handle __cxa_pure_virtual visibility (PR lto/79760).
[official-gcc.git] / gcc / final.c
blob820162b2d28d734901375017cf0c7a3095e8903e
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "params.h"
79 #include "asan.h"
80 #include "rtl-iter.h"
81 #include "print-rtl.h"
83 #ifdef XCOFF_DEBUGGING_INFO
84 #include "xcoffout.h" /* Needed for external data declarations. */
85 #endif
87 #include "dwarf2out.h"
89 #ifdef DBX_DEBUGGING_INFO
90 #include "dbxout.h"
91 #endif
93 #include "sdbout.h"
95 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
96 So define a null default for it to save conditionalization later. */
97 #ifndef CC_STATUS_INIT
98 #define CC_STATUS_INIT
99 #endif
101 /* Is the given character a logical line separator for the assembler? */
102 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
103 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
104 #endif
106 #ifndef JUMP_TABLES_IN_TEXT_SECTION
107 #define JUMP_TABLES_IN_TEXT_SECTION 0
108 #endif
110 /* Bitflags used by final_scan_insn. */
111 #define SEEN_NOTE 1
112 #define SEEN_EMITTED 2
114 /* Last insn processed by final_scan_insn. */
115 static rtx_insn *debug_insn;
116 rtx_insn *current_output_insn;
118 /* Line number of last NOTE. */
119 static int last_linenum;
121 /* Column number of last NOTE. */
122 static int last_columnnum;
124 /* Last discriminator written to assembly. */
125 static int last_discriminator;
127 /* Discriminator of current block. */
128 static int discriminator;
130 /* Highest line number in current block. */
131 static int high_block_linenum;
133 /* Likewise for function. */
134 static int high_function_linenum;
136 /* Filename of last NOTE. */
137 static const char *last_filename;
139 /* Override filename, line and column number. */
140 static const char *override_filename;
141 static int override_linenum;
142 static int override_columnnum;
144 /* Whether to force emission of a line note before the next insn. */
145 static bool force_source_line = false;
147 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
149 /* Nonzero while outputting an `asm' with operands.
150 This means that inconsistencies are the user's fault, so don't die.
151 The precise value is the insn being output, to pass to error_for_asm. */
152 const rtx_insn *this_is_asm_operands;
154 /* Number of operands of this insn, for an `asm' with operands. */
155 static unsigned int insn_noperands;
157 /* Compare optimization flag. */
159 static rtx last_ignored_compare = 0;
161 /* Assign a unique number to each insn that is output.
162 This can be used to generate unique local labels. */
164 static int insn_counter = 0;
166 /* This variable contains machine-dependent flags (defined in tm.h)
167 set and examined by output routines
168 that describe how to interpret the condition codes properly. */
170 CC_STATUS cc_status;
172 /* During output of an insn, this contains a copy of cc_status
173 from before the insn. */
175 CC_STATUS cc_prev_status;
177 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
179 static int block_depth;
181 /* Nonzero if have enabled APP processing of our assembler output. */
183 static int app_on;
185 /* If we are outputting an insn sequence, this contains the sequence rtx.
186 Zero otherwise. */
188 rtx_sequence *final_sequence;
190 #ifdef ASSEMBLER_DIALECT
192 /* Number of the assembler dialect to use, starting at 0. */
193 static int dialect_number;
194 #endif
196 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
197 rtx current_insn_predicate;
199 /* True if printing into -fdump-final-insns= dump. */
200 bool final_insns_dump_p;
202 /* True if profile_function should be called, but hasn't been called yet. */
203 static bool need_profile_function;
205 static int asm_insn_count (rtx);
206 static void profile_function (FILE *);
207 static void profile_after_prologue (FILE *);
208 static bool notice_source_line (rtx_insn *, bool *);
209 static rtx walk_alter_subreg (rtx *, bool *);
210 static void output_asm_name (void);
211 static void output_alternate_entry_point (FILE *, rtx_insn *);
212 static tree get_mem_expr_from_op (rtx, int *);
213 static void output_asm_operand_names (rtx *, int *, int);
214 #ifdef LEAF_REGISTERS
215 static void leaf_renumber_regs (rtx_insn *);
216 #endif
217 #if HAVE_cc0
218 static int alter_cond (rtx);
219 #endif
220 #ifndef ADDR_VEC_ALIGN
221 static int final_addr_vec_align (rtx_insn *);
222 #endif
223 static int align_fuzz (rtx, rtx, int, unsigned);
224 static void collect_fn_hard_reg_usage (void);
225 static tree get_call_fndecl (rtx_insn *);
227 /* Initialize data in final at the beginning of a compilation. */
229 void
230 init_final (const char *filename ATTRIBUTE_UNUSED)
232 app_on = 0;
233 final_sequence = 0;
235 #ifdef ASSEMBLER_DIALECT
236 dialect_number = ASSEMBLER_DIALECT;
237 #endif
240 /* Default target function prologue and epilogue assembler output.
242 If not overridden for epilogue code, then the function body itself
243 contains return instructions wherever needed. */
244 void
245 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
246 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
250 void
251 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
252 tree decl ATTRIBUTE_UNUSED,
253 bool new_is_cold ATTRIBUTE_UNUSED)
257 /* Default target hook that outputs nothing to a stream. */
258 void
259 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
263 /* Enable APP processing of subsequent output.
264 Used before the output from an `asm' statement. */
266 void
267 app_enable (void)
269 if (! app_on)
271 fputs (ASM_APP_ON, asm_out_file);
272 app_on = 1;
276 /* Disable APP processing of subsequent output.
277 Called from varasm.c before most kinds of output. */
279 void
280 app_disable (void)
282 if (app_on)
284 fputs (ASM_APP_OFF, asm_out_file);
285 app_on = 0;
289 /* Return the number of slots filled in the current
290 delayed branch sequence (we don't count the insn needing the
291 delay slot). Zero if not in a delayed branch sequence. */
294 dbr_sequence_length (void)
296 if (final_sequence != 0)
297 return XVECLEN (final_sequence, 0) - 1;
298 else
299 return 0;
302 /* The next two pages contain routines used to compute the length of an insn
303 and to shorten branches. */
305 /* Arrays for insn lengths, and addresses. The latter is referenced by
306 `insn_current_length'. */
308 static int *insn_lengths;
310 vec<int> insn_addresses_;
312 /* Max uid for which the above arrays are valid. */
313 static int insn_lengths_max_uid;
315 /* Address of insn being processed. Used by `insn_current_length'. */
316 int insn_current_address;
318 /* Address of insn being processed in previous iteration. */
319 int insn_last_address;
321 /* known invariant alignment of insn being processed. */
322 int insn_current_align;
324 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
325 gives the next following alignment insn that increases the known
326 alignment, or NULL_RTX if there is no such insn.
327 For any alignment obtained this way, we can again index uid_align with
328 its uid to obtain the next following align that in turn increases the
329 alignment, till we reach NULL_RTX; the sequence obtained this way
330 for each insn we'll call the alignment chain of this insn in the following
331 comments. */
333 struct label_alignment
335 short alignment;
336 short max_skip;
339 static rtx *uid_align;
340 static int *uid_shuid;
341 static struct label_alignment *label_align;
343 /* Indicate that branch shortening hasn't yet been done. */
345 void
346 init_insn_lengths (void)
348 if (uid_shuid)
350 free (uid_shuid);
351 uid_shuid = 0;
353 if (insn_lengths)
355 free (insn_lengths);
356 insn_lengths = 0;
357 insn_lengths_max_uid = 0;
359 if (HAVE_ATTR_length)
360 INSN_ADDRESSES_FREE ();
361 if (uid_align)
363 free (uid_align);
364 uid_align = 0;
368 /* Obtain the current length of an insn. If branch shortening has been done,
369 get its actual length. Otherwise, use FALLBACK_FN to calculate the
370 length. */
371 static int
372 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
374 rtx body;
375 int i;
376 int length = 0;
378 if (!HAVE_ATTR_length)
379 return 0;
381 if (insn_lengths_max_uid > INSN_UID (insn))
382 return insn_lengths[INSN_UID (insn)];
383 else
384 switch (GET_CODE (insn))
386 case NOTE:
387 case BARRIER:
388 case CODE_LABEL:
389 case DEBUG_INSN:
390 return 0;
392 case CALL_INSN:
393 case JUMP_INSN:
394 length = fallback_fn (insn);
395 break;
397 case INSN:
398 body = PATTERN (insn);
399 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
400 return 0;
402 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
403 length = asm_insn_count (body) * fallback_fn (insn);
404 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
405 for (i = 0; i < seq->len (); i++)
406 length += get_attr_length_1 (seq->insn (i), fallback_fn);
407 else
408 length = fallback_fn (insn);
409 break;
411 default:
412 break;
415 #ifdef ADJUST_INSN_LENGTH
416 ADJUST_INSN_LENGTH (insn, length);
417 #endif
418 return length;
421 /* Obtain the current length of an insn. If branch shortening has been done,
422 get its actual length. Otherwise, get its maximum length. */
424 get_attr_length (rtx_insn *insn)
426 return get_attr_length_1 (insn, insn_default_length);
429 /* Obtain the current length of an insn. If branch shortening has been done,
430 get its actual length. Otherwise, get its minimum length. */
432 get_attr_min_length (rtx_insn *insn)
434 return get_attr_length_1 (insn, insn_min_length);
437 /* Code to handle alignment inside shorten_branches. */
439 /* Here is an explanation how the algorithm in align_fuzz can give
440 proper results:
442 Call a sequence of instructions beginning with alignment point X
443 and continuing until the next alignment point `block X'. When `X'
444 is used in an expression, it means the alignment value of the
445 alignment point.
447 Call the distance between the start of the first insn of block X, and
448 the end of the last insn of block X `IX', for the `inner size of X'.
449 This is clearly the sum of the instruction lengths.
451 Likewise with the next alignment-delimited block following X, which we
452 shall call block Y.
454 Call the distance between the start of the first insn of block X, and
455 the start of the first insn of block Y `OX', for the `outer size of X'.
457 The estimated padding is then OX - IX.
459 OX can be safely estimated as
461 if (X >= Y)
462 OX = round_up(IX, Y)
463 else
464 OX = round_up(IX, X) + Y - X
466 Clearly est(IX) >= real(IX), because that only depends on the
467 instruction lengths, and those being overestimated is a given.
469 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
470 we needn't worry about that when thinking about OX.
472 When X >= Y, the alignment provided by Y adds no uncertainty factor
473 for branch ranges starting before X, so we can just round what we have.
474 But when X < Y, we don't know anything about the, so to speak,
475 `middle bits', so we have to assume the worst when aligning up from an
476 address mod X to one mod Y, which is Y - X. */
478 #ifndef LABEL_ALIGN
479 #define LABEL_ALIGN(LABEL) align_labels_log
480 #endif
482 #ifndef LOOP_ALIGN
483 #define LOOP_ALIGN(LABEL) align_loops_log
484 #endif
486 #ifndef LABEL_ALIGN_AFTER_BARRIER
487 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
488 #endif
490 #ifndef JUMP_ALIGN
491 #define JUMP_ALIGN(LABEL) align_jumps_log
492 #endif
495 default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
497 return 0;
501 default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
503 return align_loops_max_skip;
507 default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
509 return align_labels_max_skip;
513 default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
515 return align_jumps_max_skip;
518 #ifndef ADDR_VEC_ALIGN
519 static int
520 final_addr_vec_align (rtx_insn *addr_vec)
522 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
524 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
525 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
526 return exact_log2 (align);
530 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
531 #endif
533 #ifndef INSN_LENGTH_ALIGNMENT
534 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
535 #endif
537 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
539 static int min_labelno, max_labelno;
541 #define LABEL_TO_ALIGNMENT(LABEL) \
542 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
544 #define LABEL_TO_MAX_SKIP(LABEL) \
545 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
547 /* For the benefit of port specific code do this also as a function. */
550 label_to_alignment (rtx label)
552 if (CODE_LABEL_NUMBER (label) <= max_labelno)
553 return LABEL_TO_ALIGNMENT (label);
554 return 0;
558 label_to_max_skip (rtx label)
560 if (CODE_LABEL_NUMBER (label) <= max_labelno)
561 return LABEL_TO_MAX_SKIP (label);
562 return 0;
565 /* The differences in addresses
566 between a branch and its target might grow or shrink depending on
567 the alignment the start insn of the range (the branch for a forward
568 branch or the label for a backward branch) starts out on; if these
569 differences are used naively, they can even oscillate infinitely.
570 We therefore want to compute a 'worst case' address difference that
571 is independent of the alignment the start insn of the range end
572 up on, and that is at least as large as the actual difference.
573 The function align_fuzz calculates the amount we have to add to the
574 naively computed difference, by traversing the part of the alignment
575 chain of the start insn of the range that is in front of the end insn
576 of the range, and considering for each alignment the maximum amount
577 that it might contribute to a size increase.
579 For casesi tables, we also want to know worst case minimum amounts of
580 address difference, in case a machine description wants to introduce
581 some common offset that is added to all offsets in a table.
582 For this purpose, align_fuzz with a growth argument of 0 computes the
583 appropriate adjustment. */
585 /* Compute the maximum delta by which the difference of the addresses of
586 START and END might grow / shrink due to a different address for start
587 which changes the size of alignment insns between START and END.
588 KNOWN_ALIGN_LOG is the alignment known for START.
589 GROWTH should be ~0 if the objective is to compute potential code size
590 increase, and 0 if the objective is to compute potential shrink.
591 The return value is undefined for any other value of GROWTH. */
593 static int
594 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
596 int uid = INSN_UID (start);
597 rtx align_label;
598 int known_align = 1 << known_align_log;
599 int end_shuid = INSN_SHUID (end);
600 int fuzz = 0;
602 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
604 int align_addr, new_align;
606 uid = INSN_UID (align_label);
607 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
608 if (uid_shuid[uid] > end_shuid)
609 break;
610 known_align_log = LABEL_TO_ALIGNMENT (align_label);
611 new_align = 1 << known_align_log;
612 if (new_align < known_align)
613 continue;
614 fuzz += (-align_addr ^ growth) & (new_align - known_align);
615 known_align = new_align;
617 return fuzz;
620 /* Compute a worst-case reference address of a branch so that it
621 can be safely used in the presence of aligned labels. Since the
622 size of the branch itself is unknown, the size of the branch is
623 not included in the range. I.e. for a forward branch, the reference
624 address is the end address of the branch as known from the previous
625 branch shortening pass, minus a value to account for possible size
626 increase due to alignment. For a backward branch, it is the start
627 address of the branch as known from the current pass, plus a value
628 to account for possible size increase due to alignment.
629 NB.: Therefore, the maximum offset allowed for backward branches needs
630 to exclude the branch size. */
633 insn_current_reference_address (rtx_insn *branch)
635 rtx dest;
636 int seq_uid;
638 if (! INSN_ADDRESSES_SET_P ())
639 return 0;
641 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
642 seq_uid = INSN_UID (seq);
643 if (!JUMP_P (branch))
644 /* This can happen for example on the PA; the objective is to know the
645 offset to address something in front of the start of the function.
646 Thus, we can treat it like a backward branch.
647 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
648 any alignment we'd encounter, so we skip the call to align_fuzz. */
649 return insn_current_address;
650 dest = JUMP_LABEL (branch);
652 /* BRANCH has no proper alignment chain set, so use SEQ.
653 BRANCH also has no INSN_SHUID. */
654 if (INSN_SHUID (seq) < INSN_SHUID (dest))
656 /* Forward branch. */
657 return (insn_last_address + insn_lengths[seq_uid]
658 - align_fuzz (seq, dest, length_unit_log, ~0));
660 else
662 /* Backward branch. */
663 return (insn_current_address
664 + align_fuzz (dest, seq, length_unit_log, ~0));
668 /* Compute branch alignments based on frequency information in the
669 CFG. */
671 unsigned int
672 compute_alignments (void)
674 int log, max_skip, max_log;
675 basic_block bb;
676 int freq_max = 0;
677 int freq_threshold = 0;
679 if (label_align)
681 free (label_align);
682 label_align = 0;
685 max_labelno = max_label_num ();
686 min_labelno = get_first_label_num ();
687 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
689 /* If not optimizing or optimizing for size, don't assign any alignments. */
690 if (! optimize || optimize_function_for_size_p (cfun))
691 return 0;
693 if (dump_file)
695 dump_reg_info (dump_file);
696 dump_flow_info (dump_file, TDF_DETAILS);
697 flow_loops_dump (dump_file, NULL, 1);
699 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
700 FOR_EACH_BB_FN (bb, cfun)
701 if (bb->frequency > freq_max)
702 freq_max = bb->frequency;
703 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
705 if (dump_file)
706 fprintf (dump_file, "freq_max: %i\n",freq_max);
707 FOR_EACH_BB_FN (bb, cfun)
709 rtx_insn *label = BB_HEAD (bb);
710 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
711 edge e;
712 edge_iterator ei;
714 if (!LABEL_P (label)
715 || optimize_bb_for_size_p (bb))
717 if (dump_file)
718 fprintf (dump_file,
719 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
720 bb->index, bb->frequency, bb->loop_father->num,
721 bb_loop_depth (bb));
722 continue;
724 max_log = LABEL_ALIGN (label);
725 max_skip = targetm.asm_out.label_align_max_skip (label);
727 FOR_EACH_EDGE (e, ei, bb->preds)
729 if (e->flags & EDGE_FALLTHRU)
730 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
731 else
732 branch_frequency += EDGE_FREQUENCY (e);
734 if (dump_file)
736 fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
737 " %2i fall %4i branch %4i",
738 bb->index, bb->frequency, bb->loop_father->num,
739 bb_loop_depth (bb),
740 fallthru_frequency, branch_frequency);
741 if (!bb->loop_father->inner && bb->loop_father->num)
742 fprintf (dump_file, " inner_loop");
743 if (bb->loop_father->header == bb)
744 fprintf (dump_file, " loop_header");
745 fprintf (dump_file, "\n");
748 /* There are two purposes to align block with no fallthru incoming edge:
749 1) to avoid fetch stalls when branch destination is near cache boundary
750 2) to improve cache efficiency in case the previous block is not executed
751 (so it does not need to be in the cache).
753 We to catch first case, we align frequently executed blocks.
754 To catch the second, we align blocks that are executed more frequently
755 than the predecessor and the predecessor is likely to not be executed
756 when function is called. */
758 if (!has_fallthru
759 && (branch_frequency > freq_threshold
760 || (bb->frequency > bb->prev_bb->frequency * 10
761 && (bb->prev_bb->frequency
762 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
764 log = JUMP_ALIGN (label);
765 if (dump_file)
766 fprintf (dump_file, " jump alignment added.\n");
767 if (max_log < log)
769 max_log = log;
770 max_skip = targetm.asm_out.jump_align_max_skip (label);
773 /* In case block is frequent and reached mostly by non-fallthru edge,
774 align it. It is most likely a first block of loop. */
775 if (has_fallthru
776 && !(single_succ_p (bb)
777 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
778 && optimize_bb_for_speed_p (bb)
779 && branch_frequency + fallthru_frequency > freq_threshold
780 && (branch_frequency
781 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
783 log = LOOP_ALIGN (label);
784 if (dump_file)
785 fprintf (dump_file, " internal loop alignment added.\n");
786 if (max_log < log)
788 max_log = log;
789 max_skip = targetm.asm_out.loop_align_max_skip (label);
792 LABEL_TO_ALIGNMENT (label) = max_log;
793 LABEL_TO_MAX_SKIP (label) = max_skip;
796 loop_optimizer_finalize ();
797 free_dominance_info (CDI_DOMINATORS);
798 return 0;
801 /* Grow the LABEL_ALIGN array after new labels are created. */
803 static void
804 grow_label_align (void)
806 int old = max_labelno;
807 int n_labels;
808 int n_old_labels;
810 max_labelno = max_label_num ();
812 n_labels = max_labelno - min_labelno + 1;
813 n_old_labels = old - min_labelno + 1;
815 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
817 /* Range of labels grows monotonically in the function. Failing here
818 means that the initialization of array got lost. */
819 gcc_assert (n_old_labels <= n_labels);
821 memset (label_align + n_old_labels, 0,
822 (n_labels - n_old_labels) * sizeof (struct label_alignment));
825 /* Update the already computed alignment information. LABEL_PAIRS is a vector
826 made up of pairs of labels for which the alignment information of the first
827 element will be copied from that of the second element. */
829 void
830 update_alignments (vec<rtx> &label_pairs)
832 unsigned int i = 0;
833 rtx iter, label = NULL_RTX;
835 if (max_labelno != max_label_num ())
836 grow_label_align ();
838 FOR_EACH_VEC_ELT (label_pairs, i, iter)
839 if (i & 1)
841 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
842 LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
844 else
845 label = iter;
848 namespace {
850 const pass_data pass_data_compute_alignments =
852 RTL_PASS, /* type */
853 "alignments", /* name */
854 OPTGROUP_NONE, /* optinfo_flags */
855 TV_NONE, /* tv_id */
856 0, /* properties_required */
857 0, /* properties_provided */
858 0, /* properties_destroyed */
859 0, /* todo_flags_start */
860 0, /* todo_flags_finish */
863 class pass_compute_alignments : public rtl_opt_pass
865 public:
866 pass_compute_alignments (gcc::context *ctxt)
867 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
870 /* opt_pass methods: */
871 virtual unsigned int execute (function *) { return compute_alignments (); }
873 }; // class pass_compute_alignments
875 } // anon namespace
877 rtl_opt_pass *
878 make_pass_compute_alignments (gcc::context *ctxt)
880 return new pass_compute_alignments (ctxt);
884 /* Make a pass over all insns and compute their actual lengths by shortening
885 any branches of variable length if possible. */
887 /* shorten_branches might be called multiple times: for example, the SH
888 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
889 In order to do this, it needs proper length information, which it obtains
890 by calling shorten_branches. This cannot be collapsed with
891 shorten_branches itself into a single pass unless we also want to integrate
892 reorg.c, since the branch splitting exposes new instructions with delay
893 slots. */
895 void
896 shorten_branches (rtx_insn *first)
898 rtx_insn *insn;
899 int max_uid;
900 int i;
901 int max_log;
902 int max_skip;
903 #define MAX_CODE_ALIGN 16
904 rtx_insn *seq;
905 int something_changed = 1;
906 char *varying_length;
907 rtx body;
908 int uid;
909 rtx align_tab[MAX_CODE_ALIGN];
911 /* Compute maximum UID and allocate label_align / uid_shuid. */
912 max_uid = get_max_uid ();
914 /* Free uid_shuid before reallocating it. */
915 free (uid_shuid);
917 uid_shuid = XNEWVEC (int, max_uid);
919 if (max_labelno != max_label_num ())
920 grow_label_align ();
922 /* Initialize label_align and set up uid_shuid to be strictly
923 monotonically rising with insn order. */
924 /* We use max_log here to keep track of the maximum alignment we want to
925 impose on the next CODE_LABEL (or the current one if we are processing
926 the CODE_LABEL itself). */
928 max_log = 0;
929 max_skip = 0;
931 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
933 int log;
935 INSN_SHUID (insn) = i++;
936 if (INSN_P (insn))
937 continue;
939 if (LABEL_P (insn))
941 rtx_insn *next;
942 bool next_is_jumptable;
944 /* Merge in alignments computed by compute_alignments. */
945 log = LABEL_TO_ALIGNMENT (insn);
946 if (max_log < log)
948 max_log = log;
949 max_skip = LABEL_TO_MAX_SKIP (insn);
952 next = next_nonnote_insn (insn);
953 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
954 if (!next_is_jumptable)
956 log = LABEL_ALIGN (insn);
957 if (max_log < log)
959 max_log = log;
960 max_skip = targetm.asm_out.label_align_max_skip (insn);
963 /* ADDR_VECs only take room if read-only data goes into the text
964 section. */
965 if ((JUMP_TABLES_IN_TEXT_SECTION
966 || readonly_data_section == text_section)
967 && next_is_jumptable)
969 log = ADDR_VEC_ALIGN (next);
970 if (max_log < log)
972 max_log = log;
973 max_skip = targetm.asm_out.label_align_max_skip (insn);
976 LABEL_TO_ALIGNMENT (insn) = max_log;
977 LABEL_TO_MAX_SKIP (insn) = max_skip;
978 max_log = 0;
979 max_skip = 0;
981 else if (BARRIER_P (insn))
983 rtx_insn *label;
985 for (label = insn; label && ! INSN_P (label);
986 label = NEXT_INSN (label))
987 if (LABEL_P (label))
989 log = LABEL_ALIGN_AFTER_BARRIER (insn);
990 if (max_log < log)
992 max_log = log;
993 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
995 break;
999 if (!HAVE_ATTR_length)
1000 return;
1002 /* Allocate the rest of the arrays. */
1003 insn_lengths = XNEWVEC (int, max_uid);
1004 insn_lengths_max_uid = max_uid;
1005 /* Syntax errors can lead to labels being outside of the main insn stream.
1006 Initialize insn_addresses, so that we get reproducible results. */
1007 INSN_ADDRESSES_ALLOC (max_uid);
1009 varying_length = XCNEWVEC (char, max_uid);
1011 /* Initialize uid_align. We scan instructions
1012 from end to start, and keep in align_tab[n] the last seen insn
1013 that does an alignment of at least n+1, i.e. the successor
1014 in the alignment chain for an insn that does / has a known
1015 alignment of n. */
1016 uid_align = XCNEWVEC (rtx, max_uid);
1018 for (i = MAX_CODE_ALIGN; --i >= 0;)
1019 align_tab[i] = NULL_RTX;
1020 seq = get_last_insn ();
1021 for (; seq; seq = PREV_INSN (seq))
1023 int uid = INSN_UID (seq);
1024 int log;
1025 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1026 uid_align[uid] = align_tab[0];
1027 if (log)
1029 /* Found an alignment label. */
1030 uid_align[uid] = align_tab[log];
1031 for (i = log - 1; i >= 0; i--)
1032 align_tab[i] = seq;
1036 /* When optimizing, we start assuming minimum length, and keep increasing
1037 lengths as we find the need for this, till nothing changes.
1038 When not optimizing, we start assuming maximum lengths, and
1039 do a single pass to update the lengths. */
1040 bool increasing = optimize != 0;
1042 #ifdef CASE_VECTOR_SHORTEN_MODE
1043 if (optimize)
1045 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1046 label fields. */
1048 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1049 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1050 int rel;
1052 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1054 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1055 int len, i, min, max, insn_shuid;
1056 int min_align;
1057 addr_diff_vec_flags flags;
1059 if (! JUMP_TABLE_DATA_P (insn)
1060 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1061 continue;
1062 pat = PATTERN (insn);
1063 len = XVECLEN (pat, 1);
1064 gcc_assert (len > 0);
1065 min_align = MAX_CODE_ALIGN;
1066 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1068 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1069 int shuid = INSN_SHUID (lab);
1070 if (shuid < min)
1072 min = shuid;
1073 min_lab = lab;
1075 if (shuid > max)
1077 max = shuid;
1078 max_lab = lab;
1080 if (min_align > LABEL_TO_ALIGNMENT (lab))
1081 min_align = LABEL_TO_ALIGNMENT (lab);
1083 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1084 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1085 insn_shuid = INSN_SHUID (insn);
1086 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1087 memset (&flags, 0, sizeof (flags));
1088 flags.min_align = min_align;
1089 flags.base_after_vec = rel > insn_shuid;
1090 flags.min_after_vec = min > insn_shuid;
1091 flags.max_after_vec = max > insn_shuid;
1092 flags.min_after_base = min > rel;
1093 flags.max_after_base = max > rel;
1094 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1096 if (increasing)
1097 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1100 #endif /* CASE_VECTOR_SHORTEN_MODE */
1102 /* Compute initial lengths, addresses, and varying flags for each insn. */
1103 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1105 for (insn_current_address = 0, insn = first;
1106 insn != 0;
1107 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1109 uid = INSN_UID (insn);
1111 insn_lengths[uid] = 0;
1113 if (LABEL_P (insn))
1115 int log = LABEL_TO_ALIGNMENT (insn);
1116 if (log)
1118 int align = 1 << log;
1119 int new_address = (insn_current_address + align - 1) & -align;
1120 insn_lengths[uid] = new_address - insn_current_address;
1124 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1126 if (NOTE_P (insn) || BARRIER_P (insn)
1127 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1128 continue;
1129 if (insn->deleted ())
1130 continue;
1132 body = PATTERN (insn);
1133 if (JUMP_TABLE_DATA_P (insn))
1135 /* This only takes room if read-only data goes into the text
1136 section. */
1137 if (JUMP_TABLES_IN_TEXT_SECTION
1138 || readonly_data_section == text_section)
1139 insn_lengths[uid] = (XVECLEN (body,
1140 GET_CODE (body) == ADDR_DIFF_VEC)
1141 * GET_MODE_SIZE (GET_MODE (body)));
1142 /* Alignment is handled by ADDR_VEC_ALIGN. */
1144 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1145 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1146 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1148 int i;
1149 int const_delay_slots;
1150 if (DELAY_SLOTS)
1151 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1152 else
1153 const_delay_slots = 0;
1155 int (*inner_length_fun) (rtx_insn *)
1156 = const_delay_slots ? length_fun : insn_default_length;
1157 /* Inside a delay slot sequence, we do not do any branch shortening
1158 if the shortening could change the number of delay slots
1159 of the branch. */
1160 for (i = 0; i < body_seq->len (); i++)
1162 rtx_insn *inner_insn = body_seq->insn (i);
1163 int inner_uid = INSN_UID (inner_insn);
1164 int inner_length;
1166 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1167 || asm_noperands (PATTERN (inner_insn)) >= 0)
1168 inner_length = (asm_insn_count (PATTERN (inner_insn))
1169 * insn_default_length (inner_insn));
1170 else
1171 inner_length = inner_length_fun (inner_insn);
1173 insn_lengths[inner_uid] = inner_length;
1174 if (const_delay_slots)
1176 if ((varying_length[inner_uid]
1177 = insn_variable_length_p (inner_insn)) != 0)
1178 varying_length[uid] = 1;
1179 INSN_ADDRESSES (inner_uid) = (insn_current_address
1180 + insn_lengths[uid]);
1182 else
1183 varying_length[inner_uid] = 0;
1184 insn_lengths[uid] += inner_length;
1187 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1189 insn_lengths[uid] = length_fun (insn);
1190 varying_length[uid] = insn_variable_length_p (insn);
1193 /* If needed, do any adjustment. */
1194 #ifdef ADJUST_INSN_LENGTH
1195 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1196 if (insn_lengths[uid] < 0)
1197 fatal_insn ("negative insn length", insn);
1198 #endif
1201 /* Now loop over all the insns finding varying length insns. For each,
1202 get the current insn length. If it has changed, reflect the change.
1203 When nothing changes for a full pass, we are done. */
1205 while (something_changed)
1207 something_changed = 0;
1208 insn_current_align = MAX_CODE_ALIGN - 1;
1209 for (insn_current_address = 0, insn = first;
1210 insn != 0;
1211 insn = NEXT_INSN (insn))
1213 int new_length;
1214 #ifdef ADJUST_INSN_LENGTH
1215 int tmp_length;
1216 #endif
1217 int length_align;
1219 uid = INSN_UID (insn);
1221 if (LABEL_P (insn))
1223 int log = LABEL_TO_ALIGNMENT (insn);
1225 #ifdef CASE_VECTOR_SHORTEN_MODE
1226 /* If the mode of a following jump table was changed, we
1227 may need to update the alignment of this label. */
1228 rtx_insn *next;
1229 bool next_is_jumptable;
1231 next = next_nonnote_insn (insn);
1232 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1233 if ((JUMP_TABLES_IN_TEXT_SECTION
1234 || readonly_data_section == text_section)
1235 && next_is_jumptable)
1237 int newlog = ADDR_VEC_ALIGN (next);
1238 if (newlog != log)
1240 log = newlog;
1241 LABEL_TO_ALIGNMENT (insn) = log;
1242 something_changed = 1;
1245 #endif
1247 if (log > insn_current_align)
1249 int align = 1 << log;
1250 int new_address= (insn_current_address + align - 1) & -align;
1251 insn_lengths[uid] = new_address - insn_current_address;
1252 insn_current_align = log;
1253 insn_current_address = new_address;
1255 else
1256 insn_lengths[uid] = 0;
1257 INSN_ADDRESSES (uid) = insn_current_address;
1258 continue;
1261 length_align = INSN_LENGTH_ALIGNMENT (insn);
1262 if (length_align < insn_current_align)
1263 insn_current_align = length_align;
1265 insn_last_address = INSN_ADDRESSES (uid);
1266 INSN_ADDRESSES (uid) = insn_current_address;
1268 #ifdef CASE_VECTOR_SHORTEN_MODE
1269 if (optimize
1270 && JUMP_TABLE_DATA_P (insn)
1271 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1273 rtx body = PATTERN (insn);
1274 int old_length = insn_lengths[uid];
1275 rtx_insn *rel_lab =
1276 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1277 rtx min_lab = XEXP (XEXP (body, 2), 0);
1278 rtx max_lab = XEXP (XEXP (body, 3), 0);
1279 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1280 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1281 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1282 rtx_insn *prev;
1283 int rel_align = 0;
1284 addr_diff_vec_flags flags;
1285 machine_mode vec_mode;
1287 /* Avoid automatic aggregate initialization. */
1288 flags = ADDR_DIFF_VEC_FLAGS (body);
1290 /* Try to find a known alignment for rel_lab. */
1291 for (prev = rel_lab;
1292 prev
1293 && ! insn_lengths[INSN_UID (prev)]
1294 && ! (varying_length[INSN_UID (prev)] & 1);
1295 prev = PREV_INSN (prev))
1296 if (varying_length[INSN_UID (prev)] & 2)
1298 rel_align = LABEL_TO_ALIGNMENT (prev);
1299 break;
1302 /* See the comment on addr_diff_vec_flags in rtl.h for the
1303 meaning of the flags values. base: REL_LAB vec: INSN */
1304 /* Anything after INSN has still addresses from the last
1305 pass; adjust these so that they reflect our current
1306 estimate for this pass. */
1307 if (flags.base_after_vec)
1308 rel_addr += insn_current_address - insn_last_address;
1309 if (flags.min_after_vec)
1310 min_addr += insn_current_address - insn_last_address;
1311 if (flags.max_after_vec)
1312 max_addr += insn_current_address - insn_last_address;
1313 /* We want to know the worst case, i.e. lowest possible value
1314 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1315 its offset is positive, and we have to be wary of code shrink;
1316 otherwise, it is negative, and we have to be vary of code
1317 size increase. */
1318 if (flags.min_after_base)
1320 /* If INSN is between REL_LAB and MIN_LAB, the size
1321 changes we are about to make can change the alignment
1322 within the observed offset, therefore we have to break
1323 it up into two parts that are independent. */
1324 if (! flags.base_after_vec && flags.min_after_vec)
1326 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1327 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1329 else
1330 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1332 else
1334 if (flags.base_after_vec && ! flags.min_after_vec)
1336 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1337 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1339 else
1340 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1342 /* Likewise, determine the highest lowest possible value
1343 for the offset of MAX_LAB. */
1344 if (flags.max_after_base)
1346 if (! flags.base_after_vec && flags.max_after_vec)
1348 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1349 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1351 else
1352 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1354 else
1356 if (flags.base_after_vec && ! flags.max_after_vec)
1358 max_addr += align_fuzz (max_lab, insn, 0, 0);
1359 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1361 else
1362 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1364 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1365 max_addr - rel_addr, body);
1366 if (!increasing
1367 || (GET_MODE_SIZE (vec_mode)
1368 >= GET_MODE_SIZE (GET_MODE (body))))
1369 PUT_MODE (body, vec_mode);
1370 if (JUMP_TABLES_IN_TEXT_SECTION
1371 || readonly_data_section == text_section)
1373 insn_lengths[uid]
1374 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1375 insn_current_address += insn_lengths[uid];
1376 if (insn_lengths[uid] != old_length)
1377 something_changed = 1;
1380 continue;
1382 #endif /* CASE_VECTOR_SHORTEN_MODE */
1384 if (! (varying_length[uid]))
1386 if (NONJUMP_INSN_P (insn)
1387 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1389 int i;
1391 body = PATTERN (insn);
1392 for (i = 0; i < XVECLEN (body, 0); i++)
1394 rtx inner_insn = XVECEXP (body, 0, i);
1395 int inner_uid = INSN_UID (inner_insn);
1397 INSN_ADDRESSES (inner_uid) = insn_current_address;
1399 insn_current_address += insn_lengths[inner_uid];
1402 else
1403 insn_current_address += insn_lengths[uid];
1405 continue;
1408 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1410 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1411 int i;
1413 body = PATTERN (insn);
1414 new_length = 0;
1415 for (i = 0; i < seqn->len (); i++)
1417 rtx_insn *inner_insn = seqn->insn (i);
1418 int inner_uid = INSN_UID (inner_insn);
1419 int inner_length;
1421 INSN_ADDRESSES (inner_uid) = insn_current_address;
1423 /* insn_current_length returns 0 for insns with a
1424 non-varying length. */
1425 if (! varying_length[inner_uid])
1426 inner_length = insn_lengths[inner_uid];
1427 else
1428 inner_length = insn_current_length (inner_insn);
1430 if (inner_length != insn_lengths[inner_uid])
1432 if (!increasing || inner_length > insn_lengths[inner_uid])
1434 insn_lengths[inner_uid] = inner_length;
1435 something_changed = 1;
1437 else
1438 inner_length = insn_lengths[inner_uid];
1440 insn_current_address += inner_length;
1441 new_length += inner_length;
1444 else
1446 new_length = insn_current_length (insn);
1447 insn_current_address += new_length;
1450 #ifdef ADJUST_INSN_LENGTH
1451 /* If needed, do any adjustment. */
1452 tmp_length = new_length;
1453 ADJUST_INSN_LENGTH (insn, new_length);
1454 insn_current_address += (new_length - tmp_length);
1455 #endif
1457 if (new_length != insn_lengths[uid]
1458 && (!increasing || new_length > insn_lengths[uid]))
1460 insn_lengths[uid] = new_length;
1461 something_changed = 1;
1463 else
1464 insn_current_address += insn_lengths[uid] - new_length;
1466 /* For a non-optimizing compile, do only a single pass. */
1467 if (!increasing)
1468 break;
1470 crtl->max_insn_address = insn_current_address;
1471 free (varying_length);
1474 /* Given the body of an INSN known to be generated by an ASM statement, return
1475 the number of machine instructions likely to be generated for this insn.
1476 This is used to compute its length. */
1478 static int
1479 asm_insn_count (rtx body)
1481 const char *templ;
1483 if (GET_CODE (body) == ASM_INPUT)
1484 templ = XSTR (body, 0);
1485 else
1486 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1488 return asm_str_count (templ);
1491 /* Return the number of machine instructions likely to be generated for the
1492 inline-asm template. */
1494 asm_str_count (const char *templ)
1496 int count = 1;
1498 if (!*templ)
1499 return 0;
1501 for (; *templ; templ++)
1502 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1503 || *templ == '\n')
1504 count++;
1506 return count;
1509 /* ??? This is probably the wrong place for these. */
1510 /* Structure recording the mapping from source file and directory
1511 names at compile time to those to be embedded in debug
1512 information. */
1513 struct debug_prefix_map
1515 const char *old_prefix;
1516 const char *new_prefix;
1517 size_t old_len;
1518 size_t new_len;
1519 struct debug_prefix_map *next;
1522 /* Linked list of such structures. */
1523 static debug_prefix_map *debug_prefix_maps;
1526 /* Record a debug file prefix mapping. ARG is the argument to
1527 -fdebug-prefix-map and must be of the form OLD=NEW. */
1529 void
1530 add_debug_prefix_map (const char *arg)
1532 debug_prefix_map *map;
1533 const char *p;
1535 p = strchr (arg, '=');
1536 if (!p)
1538 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1539 return;
1541 map = XNEW (debug_prefix_map);
1542 map->old_prefix = xstrndup (arg, p - arg);
1543 map->old_len = p - arg;
1544 p++;
1545 map->new_prefix = xstrdup (p);
1546 map->new_len = strlen (p);
1547 map->next = debug_prefix_maps;
1548 debug_prefix_maps = map;
1551 /* Perform user-specified mapping of debug filename prefixes. Return
1552 the new name corresponding to FILENAME. */
1554 const char *
1555 remap_debug_filename (const char *filename)
1557 debug_prefix_map *map;
1558 char *s;
1559 const char *name;
1560 size_t name_len;
1562 for (map = debug_prefix_maps; map; map = map->next)
1563 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1564 break;
1565 if (!map)
1566 return filename;
1567 name = filename + map->old_len;
1568 name_len = strlen (name) + 1;
1569 s = (char *) alloca (name_len + map->new_len);
1570 memcpy (s, map->new_prefix, map->new_len);
1571 memcpy (s + map->new_len, name, name_len);
1572 return ggc_strdup (s);
1575 /* Return true if DWARF2 debug info can be emitted for DECL. */
1577 static bool
1578 dwarf2_debug_info_emitted_p (tree decl)
1580 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1581 return false;
1583 if (DECL_IGNORED_P (decl))
1584 return false;
1586 return true;
1589 /* Return scope resulting from combination of S1 and S2. */
1590 static tree
1591 choose_inner_scope (tree s1, tree s2)
1593 if (!s1)
1594 return s2;
1595 if (!s2)
1596 return s1;
1597 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1598 return s1;
1599 return s2;
1602 /* Emit lexical block notes needed to change scope from S1 to S2. */
1604 static void
1605 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1607 rtx_insn *insn = orig_insn;
1608 tree com = NULL_TREE;
1609 tree ts1 = s1, ts2 = s2;
1610 tree s;
1612 while (ts1 != ts2)
1614 gcc_assert (ts1 && ts2);
1615 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1616 ts1 = BLOCK_SUPERCONTEXT (ts1);
1617 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1618 ts2 = BLOCK_SUPERCONTEXT (ts2);
1619 else
1621 ts1 = BLOCK_SUPERCONTEXT (ts1);
1622 ts2 = BLOCK_SUPERCONTEXT (ts2);
1625 com = ts1;
1627 /* Close scopes. */
1628 s = s1;
1629 while (s != com)
1631 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1632 NOTE_BLOCK (note) = s;
1633 s = BLOCK_SUPERCONTEXT (s);
1636 /* Open scopes. */
1637 s = s2;
1638 while (s != com)
1640 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1641 NOTE_BLOCK (insn) = s;
1642 s = BLOCK_SUPERCONTEXT (s);
1646 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1647 on the scope tree and the newly reordered instructions. */
1649 static void
1650 reemit_insn_block_notes (void)
1652 tree cur_block = DECL_INITIAL (cfun->decl);
1653 rtx_insn *insn;
1654 rtx_note *note;
1656 insn = get_insns ();
1657 for (; insn; insn = NEXT_INSN (insn))
1659 tree this_block;
1661 /* Prevent lexical blocks from straddling section boundaries. */
1662 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1664 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1665 s = BLOCK_SUPERCONTEXT (s))
1667 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1668 NOTE_BLOCK (note) = s;
1669 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1670 NOTE_BLOCK (note) = s;
1674 if (!active_insn_p (insn))
1675 continue;
1677 /* Avoid putting scope notes between jump table and its label. */
1678 if (JUMP_TABLE_DATA_P (insn))
1679 continue;
1681 this_block = insn_scope (insn);
1682 /* For sequences compute scope resulting from merging all scopes
1683 of instructions nested inside. */
1684 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1686 int i;
1688 this_block = NULL;
1689 for (i = 0; i < body->len (); i++)
1690 this_block = choose_inner_scope (this_block,
1691 insn_scope (body->insn (i)));
1693 if (! this_block)
1695 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1696 continue;
1697 else
1698 this_block = DECL_INITIAL (cfun->decl);
1701 if (this_block != cur_block)
1703 change_scope (insn, cur_block, this_block);
1704 cur_block = this_block;
1708 /* change_scope emits before the insn, not after. */
1709 note = emit_note (NOTE_INSN_DELETED);
1710 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1711 delete_insn (note);
1713 reorder_blocks ();
1716 static const char *some_local_dynamic_name;
1718 /* Locate some local-dynamic symbol still in use by this function
1719 so that we can print its name in local-dynamic base patterns.
1720 Return null if there are no local-dynamic references. */
1722 const char *
1723 get_some_local_dynamic_name ()
1725 subrtx_iterator::array_type array;
1726 rtx_insn *insn;
1728 if (some_local_dynamic_name)
1729 return some_local_dynamic_name;
1731 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1732 if (NONDEBUG_INSN_P (insn))
1733 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1735 const_rtx x = *iter;
1736 if (GET_CODE (x) == SYMBOL_REF)
1738 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1739 return some_local_dynamic_name = XSTR (x, 0);
1740 if (CONSTANT_POOL_ADDRESS_P (x))
1741 iter.substitute (get_pool_constant (x));
1745 return 0;
1748 /* Output assembler code for the start of a function,
1749 and initialize some of the variables in this file
1750 for the new function. The label for the function and associated
1751 assembler pseudo-ops have already been output in `assemble_start_function'.
1753 FIRST is the first insn of the rtl for the function being compiled.
1754 FILE is the file to write assembler code to.
1755 OPTIMIZE_P is nonzero if we should eliminate redundant
1756 test and compare insns. */
1758 void
1759 final_start_function (rtx_insn *first, FILE *file,
1760 int optimize_p ATTRIBUTE_UNUSED)
1762 block_depth = 0;
1764 this_is_asm_operands = 0;
1766 need_profile_function = false;
1768 last_filename = LOCATION_FILE (prologue_location);
1769 last_linenum = LOCATION_LINE (prologue_location);
1770 last_columnnum = LOCATION_COLUMN (prologue_location);
1771 last_discriminator = discriminator = 0;
1773 high_block_linenum = high_function_linenum = last_linenum;
1775 if (flag_sanitize & SANITIZE_ADDRESS)
1776 asan_function_start ();
1778 if (!DECL_IGNORED_P (current_function_decl))
1779 debug_hooks->begin_prologue (last_linenum, last_columnnum, last_filename);
1781 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1782 dwarf2out_begin_prologue (0, 0, NULL);
1784 #ifdef LEAF_REG_REMAP
1785 if (crtl->uses_only_leaf_regs)
1786 leaf_renumber_regs (first);
1787 #endif
1789 /* The Sun386i and perhaps other machines don't work right
1790 if the profiling code comes after the prologue. */
1791 if (targetm.profile_before_prologue () && crtl->profile)
1793 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1794 && targetm.have_prologue ())
1796 rtx_insn *insn;
1797 for (insn = first; insn; insn = NEXT_INSN (insn))
1798 if (!NOTE_P (insn))
1800 insn = NULL;
1801 break;
1803 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1804 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1805 break;
1806 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1807 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1808 continue;
1809 else
1811 insn = NULL;
1812 break;
1815 if (insn)
1816 need_profile_function = true;
1817 else
1818 profile_function (file);
1820 else
1821 profile_function (file);
1824 /* If debugging, assign block numbers to all of the blocks in this
1825 function. */
1826 if (write_symbols)
1828 reemit_insn_block_notes ();
1829 number_blocks (current_function_decl);
1830 /* We never actually put out begin/end notes for the top-level
1831 block in the function. But, conceptually, that block is
1832 always needed. */
1833 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1836 if (warn_frame_larger_than
1837 && get_frame_size () > frame_larger_than_size)
1839 /* Issue a warning */
1840 warning (OPT_Wframe_larger_than_,
1841 "the frame size of %wd bytes is larger than %wd bytes",
1842 get_frame_size (), frame_larger_than_size);
1845 /* First output the function prologue: code to set up the stack frame. */
1846 targetm.asm_out.function_prologue (file, get_frame_size ());
1848 /* If the machine represents the prologue as RTL, the profiling code must
1849 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1850 if (! targetm.have_prologue ())
1851 profile_after_prologue (file);
1854 static void
1855 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1857 if (!targetm.profile_before_prologue () && crtl->profile)
1858 profile_function (file);
1861 static void
1862 profile_function (FILE *file ATTRIBUTE_UNUSED)
1864 #ifndef NO_PROFILE_COUNTERS
1865 # define NO_PROFILE_COUNTERS 0
1866 #endif
1867 #ifdef ASM_OUTPUT_REG_PUSH
1868 rtx sval = NULL, chain = NULL;
1870 if (cfun->returns_struct)
1871 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1872 true);
1873 if (cfun->static_chain_decl)
1874 chain = targetm.calls.static_chain (current_function_decl, true);
1875 #endif /* ASM_OUTPUT_REG_PUSH */
1877 if (! NO_PROFILE_COUNTERS)
1879 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1880 switch_to_section (data_section);
1881 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1882 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1883 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1886 switch_to_section (current_function_section ());
1888 #ifdef ASM_OUTPUT_REG_PUSH
1889 if (sval && REG_P (sval))
1890 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1891 if (chain && REG_P (chain))
1892 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1893 #endif
1895 FUNCTION_PROFILER (file, current_function_funcdef_no);
1897 #ifdef ASM_OUTPUT_REG_PUSH
1898 if (chain && REG_P (chain))
1899 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1900 if (sval && REG_P (sval))
1901 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1902 #endif
1905 /* Output assembler code for the end of a function.
1906 For clarity, args are same as those of `final_start_function'
1907 even though not all of them are needed. */
1909 void
1910 final_end_function (void)
1912 app_disable ();
1914 if (!DECL_IGNORED_P (current_function_decl))
1915 debug_hooks->end_function (high_function_linenum);
1917 /* Finally, output the function epilogue:
1918 code to restore the stack frame and return to the caller. */
1919 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1921 /* And debug output. */
1922 if (!DECL_IGNORED_P (current_function_decl))
1923 debug_hooks->end_epilogue (last_linenum, last_filename);
1925 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1926 && dwarf2out_do_frame ())
1927 dwarf2out_end_epilogue (last_linenum, last_filename);
1929 some_local_dynamic_name = 0;
1933 /* Dumper helper for basic block information. FILE is the assembly
1934 output file, and INSN is the instruction being emitted. */
1936 static void
1937 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1938 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1940 basic_block bb;
1942 if (!flag_debug_asm)
1943 return;
1945 if (INSN_UID (insn) < bb_map_size
1946 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1948 edge e;
1949 edge_iterator ei;
1951 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1952 if (bb->frequency)
1953 fprintf (file, " freq:%d", bb->frequency);
1954 if (bb->count)
1955 fprintf (file, " count:%" PRId64,
1956 bb->count);
1957 fprintf (file, " seq:%d", (*bb_seqn)++);
1958 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1959 FOR_EACH_EDGE (e, ei, bb->preds)
1961 dump_edge_info (file, e, TDF_DETAILS, 0);
1963 fprintf (file, "\n");
1965 if (INSN_UID (insn) < bb_map_size
1966 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1968 edge e;
1969 edge_iterator ei;
1971 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1972 FOR_EACH_EDGE (e, ei, bb->succs)
1974 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1976 fprintf (file, "\n");
1980 /* Output assembler code for some insns: all or part of a function.
1981 For description of args, see `final_start_function', above. */
1983 void
1984 final (rtx_insn *first, FILE *file, int optimize_p)
1986 rtx_insn *insn, *next;
1987 int seen = 0;
1989 /* Used for -dA dump. */
1990 basic_block *start_to_bb = NULL;
1991 basic_block *end_to_bb = NULL;
1992 int bb_map_size = 0;
1993 int bb_seqn = 0;
1995 last_ignored_compare = 0;
1997 if (HAVE_cc0)
1998 for (insn = first; insn; insn = NEXT_INSN (insn))
2000 /* If CC tracking across branches is enabled, record the insn which
2001 jumps to each branch only reached from one place. */
2002 if (optimize_p && JUMP_P (insn))
2004 rtx lab = JUMP_LABEL (insn);
2005 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
2007 LABEL_REFS (lab) = insn;
2012 init_recog ();
2014 CC_STATUS_INIT;
2016 if (flag_debug_asm)
2018 basic_block bb;
2020 bb_map_size = get_max_uid () + 1;
2021 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
2022 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
2024 /* There is no cfg for a thunk. */
2025 if (!cfun->is_thunk)
2026 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2028 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2029 end_to_bb[INSN_UID (BB_END (bb))] = bb;
2033 /* Output the insns. */
2034 for (insn = first; insn;)
2036 if (HAVE_ATTR_length)
2038 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2040 /* This can be triggered by bugs elsewhere in the compiler if
2041 new insns are created after init_insn_lengths is called. */
2042 gcc_assert (NOTE_P (insn));
2043 insn_current_address = -1;
2045 else
2046 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2049 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2050 bb_map_size, &bb_seqn);
2051 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2054 if (flag_debug_asm)
2056 free (start_to_bb);
2057 free (end_to_bb);
2060 /* Remove CFI notes, to avoid compare-debug failures. */
2061 for (insn = first; insn; insn = next)
2063 next = NEXT_INSN (insn);
2064 if (NOTE_P (insn)
2065 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2066 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2067 delete_insn (insn);
2071 const char *
2072 get_insn_template (int code, rtx insn)
2074 switch (insn_data[code].output_format)
2076 case INSN_OUTPUT_FORMAT_SINGLE:
2077 return insn_data[code].output.single;
2078 case INSN_OUTPUT_FORMAT_MULTI:
2079 return insn_data[code].output.multi[which_alternative];
2080 case INSN_OUTPUT_FORMAT_FUNCTION:
2081 gcc_assert (insn);
2082 return (*insn_data[code].output.function) (recog_data.operand,
2083 as_a <rtx_insn *> (insn));
2085 default:
2086 gcc_unreachable ();
2090 /* Emit the appropriate declaration for an alternate-entry-point
2091 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2092 LABEL_KIND != LABEL_NORMAL.
2094 The case fall-through in this function is intentional. */
2095 static void
2096 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2098 const char *name = LABEL_NAME (insn);
2100 switch (LABEL_KIND (insn))
2102 case LABEL_WEAK_ENTRY:
2103 #ifdef ASM_WEAKEN_LABEL
2104 ASM_WEAKEN_LABEL (file, name);
2105 gcc_fallthrough ();
2106 #endif
2107 case LABEL_GLOBAL_ENTRY:
2108 targetm.asm_out.globalize_label (file, name);
2109 gcc_fallthrough ();
2110 case LABEL_STATIC_ENTRY:
2111 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2112 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2113 #endif
2114 ASM_OUTPUT_LABEL (file, name);
2115 break;
2117 case LABEL_NORMAL:
2118 default:
2119 gcc_unreachable ();
2123 /* Given a CALL_INSN, find and return the nested CALL. */
2124 static rtx
2125 call_from_call_insn (rtx_call_insn *insn)
2127 rtx x;
2128 gcc_assert (CALL_P (insn));
2129 x = PATTERN (insn);
2131 while (GET_CODE (x) != CALL)
2133 switch (GET_CODE (x))
2135 default:
2136 gcc_unreachable ();
2137 case COND_EXEC:
2138 x = COND_EXEC_CODE (x);
2139 break;
2140 case PARALLEL:
2141 x = XVECEXP (x, 0, 0);
2142 break;
2143 case SET:
2144 x = XEXP (x, 1);
2145 break;
2148 return x;
2151 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2152 corresponding source line, if available. */
2154 static void
2155 asm_show_source (const char *filename, int linenum)
2157 if (!filename)
2158 return;
2160 int line_size;
2161 const char *line = location_get_source_line (filename, linenum, &line_size);
2162 if (!line)
2163 return;
2165 fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2166 /* "line" is not 0-terminated, so we must use line_size. */
2167 fwrite (line, 1, line_size, asm_out_file);
2168 fputc ('\n', asm_out_file);
2171 /* The final scan for one insn, INSN.
2172 Args are same as in `final', except that INSN
2173 is the insn being scanned.
2174 Value returned is the next insn to be scanned.
2176 NOPEEPHOLES is the flag to disallow peephole processing (currently
2177 used for within delayed branch sequence output).
2179 SEEN is used to track the end of the prologue, for emitting
2180 debug information. We force the emission of a line note after
2181 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2183 rtx_insn *
2184 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2185 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2187 #if HAVE_cc0
2188 rtx set;
2189 #endif
2190 rtx_insn *next;
2192 insn_counter++;
2194 /* Ignore deleted insns. These can occur when we split insns (due to a
2195 template of "#") while not optimizing. */
2196 if (insn->deleted ())
2197 return NEXT_INSN (insn);
2199 switch (GET_CODE (insn))
2201 case NOTE:
2202 switch (NOTE_KIND (insn))
2204 case NOTE_INSN_DELETED:
2205 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2206 break;
2208 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2209 in_cold_section_p = !in_cold_section_p;
2211 if (dwarf2out_do_frame ())
2212 dwarf2out_switch_text_section ();
2213 else if (!DECL_IGNORED_P (current_function_decl))
2214 debug_hooks->switch_text_section ();
2216 switch_to_section (current_function_section ());
2217 targetm.asm_out.function_switched_text_sections (asm_out_file,
2218 current_function_decl,
2219 in_cold_section_p);
2220 /* Emit a label for the split cold section. Form label name by
2221 suffixing "cold" to the original function's name. */
2222 if (in_cold_section_p)
2224 cold_function_name
2225 = clone_function_name (current_function_decl, "cold");
2226 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2227 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2228 IDENTIFIER_POINTER
2229 (cold_function_name),
2230 current_function_decl);
2231 #else
2232 ASM_OUTPUT_LABEL (asm_out_file,
2233 IDENTIFIER_POINTER (cold_function_name));
2234 #endif
2236 break;
2238 case NOTE_INSN_BASIC_BLOCK:
2239 if (need_profile_function)
2241 profile_function (asm_out_file);
2242 need_profile_function = false;
2245 if (targetm.asm_out.unwind_emit)
2246 targetm.asm_out.unwind_emit (asm_out_file, insn);
2248 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2250 break;
2252 case NOTE_INSN_EH_REGION_BEG:
2253 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2254 NOTE_EH_HANDLER (insn));
2255 break;
2257 case NOTE_INSN_EH_REGION_END:
2258 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2259 NOTE_EH_HANDLER (insn));
2260 break;
2262 case NOTE_INSN_PROLOGUE_END:
2263 targetm.asm_out.function_end_prologue (file);
2264 profile_after_prologue (file);
2266 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2268 *seen |= SEEN_EMITTED;
2269 force_source_line = true;
2271 else
2272 *seen |= SEEN_NOTE;
2274 break;
2276 case NOTE_INSN_EPILOGUE_BEG:
2277 if (!DECL_IGNORED_P (current_function_decl))
2278 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2279 targetm.asm_out.function_begin_epilogue (file);
2280 break;
2282 case NOTE_INSN_CFI:
2283 dwarf2out_emit_cfi (NOTE_CFI (insn));
2284 break;
2286 case NOTE_INSN_CFI_LABEL:
2287 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2288 NOTE_LABEL_NUMBER (insn));
2289 break;
2291 case NOTE_INSN_FUNCTION_BEG:
2292 if (need_profile_function)
2294 profile_function (asm_out_file);
2295 need_profile_function = false;
2298 app_disable ();
2299 if (!DECL_IGNORED_P (current_function_decl))
2300 debug_hooks->end_prologue (last_linenum, last_filename);
2302 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2304 *seen |= SEEN_EMITTED;
2305 force_source_line = true;
2307 else
2308 *seen |= SEEN_NOTE;
2310 break;
2312 case NOTE_INSN_BLOCK_BEG:
2313 if (debug_info_level == DINFO_LEVEL_NORMAL
2314 || debug_info_level == DINFO_LEVEL_VERBOSE
2315 || write_symbols == DWARF2_DEBUG
2316 || write_symbols == VMS_AND_DWARF2_DEBUG
2317 || write_symbols == VMS_DEBUG)
2319 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2321 app_disable ();
2322 ++block_depth;
2323 high_block_linenum = last_linenum;
2325 /* Output debugging info about the symbol-block beginning. */
2326 if (!DECL_IGNORED_P (current_function_decl))
2327 debug_hooks->begin_block (last_linenum, n);
2329 /* Mark this block as output. */
2330 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2331 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2333 if (write_symbols == DBX_DEBUG
2334 || write_symbols == SDB_DEBUG)
2336 location_t *locus_ptr
2337 = block_nonartificial_location (NOTE_BLOCK (insn));
2339 if (locus_ptr != NULL)
2341 override_filename = LOCATION_FILE (*locus_ptr);
2342 override_linenum = LOCATION_LINE (*locus_ptr);
2343 override_columnnum = LOCATION_COLUMN (*locus_ptr);
2346 break;
2348 case NOTE_INSN_BLOCK_END:
2349 if (debug_info_level == DINFO_LEVEL_NORMAL
2350 || debug_info_level == DINFO_LEVEL_VERBOSE
2351 || write_symbols == DWARF2_DEBUG
2352 || write_symbols == VMS_AND_DWARF2_DEBUG
2353 || write_symbols == VMS_DEBUG)
2355 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2357 app_disable ();
2359 /* End of a symbol-block. */
2360 --block_depth;
2361 gcc_assert (block_depth >= 0);
2363 if (!DECL_IGNORED_P (current_function_decl))
2364 debug_hooks->end_block (high_block_linenum, n);
2365 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2366 == in_cold_section_p);
2368 if (write_symbols == DBX_DEBUG
2369 || write_symbols == SDB_DEBUG)
2371 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2372 location_t *locus_ptr
2373 = block_nonartificial_location (outer_block);
2375 if (locus_ptr != NULL)
2377 override_filename = LOCATION_FILE (*locus_ptr);
2378 override_linenum = LOCATION_LINE (*locus_ptr);
2379 override_columnnum = LOCATION_COLUMN (*locus_ptr);
2381 else
2383 override_filename = NULL;
2384 override_linenum = 0;
2385 override_columnnum = 0;
2388 break;
2390 case NOTE_INSN_DELETED_LABEL:
2391 /* Emit the label. We may have deleted the CODE_LABEL because
2392 the label could be proved to be unreachable, though still
2393 referenced (in the form of having its address taken. */
2394 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2395 break;
2397 case NOTE_INSN_DELETED_DEBUG_LABEL:
2398 /* Similarly, but need to use different namespace for it. */
2399 if (CODE_LABEL_NUMBER (insn) != -1)
2400 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2401 break;
2403 case NOTE_INSN_VAR_LOCATION:
2404 case NOTE_INSN_CALL_ARG_LOCATION:
2405 if (!DECL_IGNORED_P (current_function_decl))
2406 debug_hooks->var_location (insn);
2407 break;
2409 default:
2410 gcc_unreachable ();
2411 break;
2413 break;
2415 case BARRIER:
2416 break;
2418 case CODE_LABEL:
2419 /* The target port might emit labels in the output function for
2420 some insn, e.g. sh.c output_branchy_insn. */
2421 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2423 int align = LABEL_TO_ALIGNMENT (insn);
2424 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2425 int max_skip = LABEL_TO_MAX_SKIP (insn);
2426 #endif
2428 if (align && NEXT_INSN (insn))
2430 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2431 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2432 #else
2433 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2434 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2435 #else
2436 ASM_OUTPUT_ALIGN (file, align);
2437 #endif
2438 #endif
2441 CC_STATUS_INIT;
2443 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2444 debug_hooks->label (as_a <rtx_code_label *> (insn));
2446 app_disable ();
2448 next = next_nonnote_insn (insn);
2449 /* If this label is followed by a jump-table, make sure we put
2450 the label in the read-only section. Also possibly write the
2451 label and jump table together. */
2452 if (next != 0 && JUMP_TABLE_DATA_P (next))
2454 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2455 /* In this case, the case vector is being moved by the
2456 target, so don't output the label at all. Leave that
2457 to the back end macros. */
2458 #else
2459 if (! JUMP_TABLES_IN_TEXT_SECTION)
2461 int log_align;
2463 switch_to_section (targetm.asm_out.function_rodata_section
2464 (current_function_decl));
2466 #ifdef ADDR_VEC_ALIGN
2467 log_align = ADDR_VEC_ALIGN (next);
2468 #else
2469 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2470 #endif
2471 ASM_OUTPUT_ALIGN (file, log_align);
2473 else
2474 switch_to_section (current_function_section ());
2476 #ifdef ASM_OUTPUT_CASE_LABEL
2477 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2478 next);
2479 #else
2480 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2481 #endif
2482 #endif
2483 break;
2485 if (LABEL_ALT_ENTRY_P (insn))
2486 output_alternate_entry_point (file, insn);
2487 else
2488 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2489 break;
2491 default:
2493 rtx body = PATTERN (insn);
2494 int insn_code_number;
2495 const char *templ;
2496 bool is_stmt;
2498 /* Reset this early so it is correct for ASM statements. */
2499 current_insn_predicate = NULL_RTX;
2501 /* An INSN, JUMP_INSN or CALL_INSN.
2502 First check for special kinds that recog doesn't recognize. */
2504 if (GET_CODE (body) == USE /* These are just declarations. */
2505 || GET_CODE (body) == CLOBBER)
2506 break;
2508 #if HAVE_cc0
2510 /* If there is a REG_CC_SETTER note on this insn, it means that
2511 the setting of the condition code was done in the delay slot
2512 of the insn that branched here. So recover the cc status
2513 from the insn that set it. */
2515 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2516 if (note)
2518 rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2519 NOTICE_UPDATE_CC (PATTERN (other), other);
2520 cc_prev_status = cc_status;
2523 #endif
2525 /* Detect insns that are really jump-tables
2526 and output them as such. */
2528 if (JUMP_TABLE_DATA_P (insn))
2530 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2531 int vlen, idx;
2532 #endif
2534 if (! JUMP_TABLES_IN_TEXT_SECTION)
2535 switch_to_section (targetm.asm_out.function_rodata_section
2536 (current_function_decl));
2537 else
2538 switch_to_section (current_function_section ());
2540 app_disable ();
2542 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2543 if (GET_CODE (body) == ADDR_VEC)
2545 #ifdef ASM_OUTPUT_ADDR_VEC
2546 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2547 #else
2548 gcc_unreachable ();
2549 #endif
2551 else
2553 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2554 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2555 #else
2556 gcc_unreachable ();
2557 #endif
2559 #else
2560 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2561 for (idx = 0; idx < vlen; idx++)
2563 if (GET_CODE (body) == ADDR_VEC)
2565 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2566 ASM_OUTPUT_ADDR_VEC_ELT
2567 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2568 #else
2569 gcc_unreachable ();
2570 #endif
2572 else
2574 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2575 ASM_OUTPUT_ADDR_DIFF_ELT
2576 (file,
2577 body,
2578 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2579 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2580 #else
2581 gcc_unreachable ();
2582 #endif
2585 #ifdef ASM_OUTPUT_CASE_END
2586 ASM_OUTPUT_CASE_END (file,
2587 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2588 insn);
2589 #endif
2590 #endif
2592 switch_to_section (current_function_section ());
2594 break;
2596 /* Output this line note if it is the first or the last line
2597 note in a row. */
2598 if (!DECL_IGNORED_P (current_function_decl)
2599 && notice_source_line (insn, &is_stmt))
2601 if (flag_verbose_asm)
2602 asm_show_source (last_filename, last_linenum);
2603 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2604 last_filename, last_discriminator,
2605 is_stmt);
2608 if (GET_CODE (body) == PARALLEL
2609 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2610 body = XVECEXP (body, 0, 0);
2612 if (GET_CODE (body) == ASM_INPUT)
2614 const char *string = XSTR (body, 0);
2616 /* There's no telling what that did to the condition codes. */
2617 CC_STATUS_INIT;
2619 if (string[0])
2621 expanded_location loc;
2623 app_enable ();
2624 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2625 if (*loc.file && loc.line)
2626 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2627 ASM_COMMENT_START, loc.line, loc.file);
2628 fprintf (asm_out_file, "\t%s\n", string);
2629 #if HAVE_AS_LINE_ZERO
2630 if (*loc.file && loc.line)
2631 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2632 #endif
2634 break;
2637 /* Detect `asm' construct with operands. */
2638 if (asm_noperands (body) >= 0)
2640 unsigned int noperands = asm_noperands (body);
2641 rtx *ops = XALLOCAVEC (rtx, noperands);
2642 const char *string;
2643 location_t loc;
2644 expanded_location expanded;
2646 /* There's no telling what that did to the condition codes. */
2647 CC_STATUS_INIT;
2649 /* Get out the operand values. */
2650 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2651 /* Inhibit dying on what would otherwise be compiler bugs. */
2652 insn_noperands = noperands;
2653 this_is_asm_operands = insn;
2654 expanded = expand_location (loc);
2656 #ifdef FINAL_PRESCAN_INSN
2657 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2658 #endif
2660 /* Output the insn using them. */
2661 if (string[0])
2663 app_enable ();
2664 if (expanded.file && expanded.line)
2665 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2666 ASM_COMMENT_START, expanded.line, expanded.file);
2667 output_asm_insn (string, ops);
2668 #if HAVE_AS_LINE_ZERO
2669 if (expanded.file && expanded.line)
2670 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2671 #endif
2674 if (targetm.asm_out.final_postscan_insn)
2675 targetm.asm_out.final_postscan_insn (file, insn, ops,
2676 insn_noperands);
2678 this_is_asm_operands = 0;
2679 break;
2682 app_disable ();
2684 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2686 /* A delayed-branch sequence */
2687 int i;
2689 final_sequence = seq;
2691 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2692 force the restoration of a comparison that was previously
2693 thought unnecessary. If that happens, cancel this sequence
2694 and cause that insn to be restored. */
2696 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2697 if (next != seq->insn (1))
2699 final_sequence = 0;
2700 return next;
2703 for (i = 1; i < seq->len (); i++)
2705 rtx_insn *insn = seq->insn (i);
2706 rtx_insn *next = NEXT_INSN (insn);
2707 /* We loop in case any instruction in a delay slot gets
2708 split. */
2710 insn = final_scan_insn (insn, file, 0, 1, seen);
2711 while (insn != next);
2713 #ifdef DBR_OUTPUT_SEQEND
2714 DBR_OUTPUT_SEQEND (file);
2715 #endif
2716 final_sequence = 0;
2718 /* If the insn requiring the delay slot was a CALL_INSN, the
2719 insns in the delay slot are actually executed before the
2720 called function. Hence we don't preserve any CC-setting
2721 actions in these insns and the CC must be marked as being
2722 clobbered by the function. */
2723 if (CALL_P (seq->insn (0)))
2725 CC_STATUS_INIT;
2727 break;
2730 /* We have a real machine instruction as rtl. */
2732 body = PATTERN (insn);
2734 #if HAVE_cc0
2735 set = single_set (insn);
2737 /* Check for redundant test and compare instructions
2738 (when the condition codes are already set up as desired).
2739 This is done only when optimizing; if not optimizing,
2740 it should be possible for the user to alter a variable
2741 with the debugger in between statements
2742 and the next statement should reexamine the variable
2743 to compute the condition codes. */
2745 if (optimize_p)
2747 if (set
2748 && GET_CODE (SET_DEST (set)) == CC0
2749 && insn != last_ignored_compare)
2751 rtx src1, src2;
2752 if (GET_CODE (SET_SRC (set)) == SUBREG)
2753 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2755 src1 = SET_SRC (set);
2756 src2 = NULL_RTX;
2757 if (GET_CODE (SET_SRC (set)) == COMPARE)
2759 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2760 XEXP (SET_SRC (set), 0)
2761 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2762 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2763 XEXP (SET_SRC (set), 1)
2764 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2765 if (XEXP (SET_SRC (set), 1)
2766 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2767 src2 = XEXP (SET_SRC (set), 0);
2769 if ((cc_status.value1 != 0
2770 && rtx_equal_p (src1, cc_status.value1))
2771 || (cc_status.value2 != 0
2772 && rtx_equal_p (src1, cc_status.value2))
2773 || (src2 != 0 && cc_status.value1 != 0
2774 && rtx_equal_p (src2, cc_status.value1))
2775 || (src2 != 0 && cc_status.value2 != 0
2776 && rtx_equal_p (src2, cc_status.value2)))
2778 /* Don't delete insn if it has an addressing side-effect. */
2779 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2780 /* or if anything in it is volatile. */
2781 && ! volatile_refs_p (PATTERN (insn)))
2783 /* We don't really delete the insn; just ignore it. */
2784 last_ignored_compare = insn;
2785 break;
2791 /* If this is a conditional branch, maybe modify it
2792 if the cc's are in a nonstandard state
2793 so that it accomplishes the same thing that it would
2794 do straightforwardly if the cc's were set up normally. */
2796 if (cc_status.flags != 0
2797 && JUMP_P (insn)
2798 && GET_CODE (body) == SET
2799 && SET_DEST (body) == pc_rtx
2800 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2801 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2802 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2804 /* This function may alter the contents of its argument
2805 and clear some of the cc_status.flags bits.
2806 It may also return 1 meaning condition now always true
2807 or -1 meaning condition now always false
2808 or 2 meaning condition nontrivial but altered. */
2809 int result = alter_cond (XEXP (SET_SRC (body), 0));
2810 /* If condition now has fixed value, replace the IF_THEN_ELSE
2811 with its then-operand or its else-operand. */
2812 if (result == 1)
2813 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2814 if (result == -1)
2815 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2817 /* The jump is now either unconditional or a no-op.
2818 If it has become a no-op, don't try to output it.
2819 (It would not be recognized.) */
2820 if (SET_SRC (body) == pc_rtx)
2822 delete_insn (insn);
2823 break;
2825 else if (ANY_RETURN_P (SET_SRC (body)))
2826 /* Replace (set (pc) (return)) with (return). */
2827 PATTERN (insn) = body = SET_SRC (body);
2829 /* Rerecognize the instruction if it has changed. */
2830 if (result != 0)
2831 INSN_CODE (insn) = -1;
2834 /* If this is a conditional trap, maybe modify it if the cc's
2835 are in a nonstandard state so that it accomplishes the same
2836 thing that it would do straightforwardly if the cc's were
2837 set up normally. */
2838 if (cc_status.flags != 0
2839 && NONJUMP_INSN_P (insn)
2840 && GET_CODE (body) == TRAP_IF
2841 && COMPARISON_P (TRAP_CONDITION (body))
2842 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2844 /* This function may alter the contents of its argument
2845 and clear some of the cc_status.flags bits.
2846 It may also return 1 meaning condition now always true
2847 or -1 meaning condition now always false
2848 or 2 meaning condition nontrivial but altered. */
2849 int result = alter_cond (TRAP_CONDITION (body));
2851 /* If TRAP_CONDITION has become always false, delete the
2852 instruction. */
2853 if (result == -1)
2855 delete_insn (insn);
2856 break;
2859 /* If TRAP_CONDITION has become always true, replace
2860 TRAP_CONDITION with const_true_rtx. */
2861 if (result == 1)
2862 TRAP_CONDITION (body) = const_true_rtx;
2864 /* Rerecognize the instruction if it has changed. */
2865 if (result != 0)
2866 INSN_CODE (insn) = -1;
2869 /* Make same adjustments to instructions that examine the
2870 condition codes without jumping and instructions that
2871 handle conditional moves (if this machine has either one). */
2873 if (cc_status.flags != 0
2874 && set != 0)
2876 rtx cond_rtx, then_rtx, else_rtx;
2878 if (!JUMP_P (insn)
2879 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2881 cond_rtx = XEXP (SET_SRC (set), 0);
2882 then_rtx = XEXP (SET_SRC (set), 1);
2883 else_rtx = XEXP (SET_SRC (set), 2);
2885 else
2887 cond_rtx = SET_SRC (set);
2888 then_rtx = const_true_rtx;
2889 else_rtx = const0_rtx;
2892 if (COMPARISON_P (cond_rtx)
2893 && XEXP (cond_rtx, 0) == cc0_rtx)
2895 int result;
2896 result = alter_cond (cond_rtx);
2897 if (result == 1)
2898 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2899 else if (result == -1)
2900 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2901 else if (result == 2)
2902 INSN_CODE (insn) = -1;
2903 if (SET_DEST (set) == SET_SRC (set))
2904 delete_insn (insn);
2908 #endif
2910 /* Do machine-specific peephole optimizations if desired. */
2912 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2914 rtx_insn *next = peephole (insn);
2915 /* When peepholing, if there were notes within the peephole,
2916 emit them before the peephole. */
2917 if (next != 0 && next != NEXT_INSN (insn))
2919 rtx_insn *note, *prev = PREV_INSN (insn);
2921 for (note = NEXT_INSN (insn); note != next;
2922 note = NEXT_INSN (note))
2923 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2925 /* Put the notes in the proper position for a later
2926 rescan. For example, the SH target can do this
2927 when generating a far jump in a delayed branch
2928 sequence. */
2929 note = NEXT_INSN (insn);
2930 SET_PREV_INSN (note) = prev;
2931 SET_NEXT_INSN (prev) = note;
2932 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2933 SET_PREV_INSN (insn) = PREV_INSN (next);
2934 SET_NEXT_INSN (insn) = next;
2935 SET_PREV_INSN (next) = insn;
2938 /* PEEPHOLE might have changed this. */
2939 body = PATTERN (insn);
2942 /* Try to recognize the instruction.
2943 If successful, verify that the operands satisfy the
2944 constraints for the instruction. Crash if they don't,
2945 since `reload' should have changed them so that they do. */
2947 insn_code_number = recog_memoized (insn);
2948 cleanup_subreg_operands (insn);
2950 /* Dump the insn in the assembly for debugging (-dAP).
2951 If the final dump is requested as slim RTL, dump slim
2952 RTL to the assembly file also. */
2953 if (flag_dump_rtl_in_asm)
2955 print_rtx_head = ASM_COMMENT_START;
2956 if (! (dump_flags & TDF_SLIM))
2957 print_rtl_single (asm_out_file, insn);
2958 else
2959 dump_insn_slim (asm_out_file, insn);
2960 print_rtx_head = "";
2963 if (! constrain_operands_cached (insn, 1))
2964 fatal_insn_not_found (insn);
2966 /* Some target machines need to prescan each insn before
2967 it is output. */
2969 #ifdef FINAL_PRESCAN_INSN
2970 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2971 #endif
2973 if (targetm.have_conditional_execution ()
2974 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2975 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2977 #if HAVE_cc0
2978 cc_prev_status = cc_status;
2980 /* Update `cc_status' for this instruction.
2981 The instruction's output routine may change it further.
2982 If the output routine for a jump insn needs to depend
2983 on the cc status, it should look at cc_prev_status. */
2985 NOTICE_UPDATE_CC (body, insn);
2986 #endif
2988 current_output_insn = debug_insn = insn;
2990 /* Find the proper template for this insn. */
2991 templ = get_insn_template (insn_code_number, insn);
2993 /* If the C code returns 0, it means that it is a jump insn
2994 which follows a deleted test insn, and that test insn
2995 needs to be reinserted. */
2996 if (templ == 0)
2998 rtx_insn *prev;
3000 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
3002 /* We have already processed the notes between the setter and
3003 the user. Make sure we don't process them again, this is
3004 particularly important if one of the notes is a block
3005 scope note or an EH note. */
3006 for (prev = insn;
3007 prev != last_ignored_compare;
3008 prev = PREV_INSN (prev))
3010 if (NOTE_P (prev))
3011 delete_insn (prev); /* Use delete_note. */
3014 return prev;
3017 /* If the template is the string "#", it means that this insn must
3018 be split. */
3019 if (templ[0] == '#' && templ[1] == '\0')
3021 rtx_insn *new_rtx = try_split (body, insn, 0);
3023 /* If we didn't split the insn, go away. */
3024 if (new_rtx == insn && PATTERN (new_rtx) == body)
3025 fatal_insn ("could not split insn", insn);
3027 /* If we have a length attribute, this instruction should have
3028 been split in shorten_branches, to ensure that we would have
3029 valid length info for the splitees. */
3030 gcc_assert (!HAVE_ATTR_length);
3032 return new_rtx;
3035 /* ??? This will put the directives in the wrong place if
3036 get_insn_template outputs assembly directly. However calling it
3037 before get_insn_template breaks if the insns is split. */
3038 if (targetm.asm_out.unwind_emit_before_insn
3039 && targetm.asm_out.unwind_emit)
3040 targetm.asm_out.unwind_emit (asm_out_file, insn);
3042 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
3043 if (call_insn != NULL)
3045 rtx x = call_from_call_insn (call_insn);
3046 x = XEXP (x, 0);
3047 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3049 tree t;
3050 x = XEXP (x, 0);
3051 t = SYMBOL_REF_DECL (x);
3052 if (t)
3053 assemble_external (t);
3057 /* Output assembler code from the template. */
3058 output_asm_insn (templ, recog_data.operand);
3060 /* Some target machines need to postscan each insn after
3061 it is output. */
3062 if (targetm.asm_out.final_postscan_insn)
3063 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3064 recog_data.n_operands);
3066 if (!targetm.asm_out.unwind_emit_before_insn
3067 && targetm.asm_out.unwind_emit)
3068 targetm.asm_out.unwind_emit (asm_out_file, insn);
3070 /* Let the debug info back-end know about this call. We do this only
3071 after the instruction has been emitted because labels that may be
3072 created to reference the call instruction must appear after it. */
3073 if (call_insn != NULL && !DECL_IGNORED_P (current_function_decl))
3074 debug_hooks->var_location (insn);
3076 current_output_insn = debug_insn = 0;
3079 return NEXT_INSN (insn);
3082 /* Return whether a source line note needs to be emitted before INSN.
3083 Sets IS_STMT to TRUE if the line should be marked as a possible
3084 breakpoint location. */
3086 static bool
3087 notice_source_line (rtx_insn *insn, bool *is_stmt)
3089 const char *filename;
3090 int linenum, columnnum;
3092 if (override_filename)
3094 filename = override_filename;
3095 linenum = override_linenum;
3096 columnnum = override_columnnum;
3098 else if (INSN_HAS_LOCATION (insn))
3100 expanded_location xloc = insn_location (insn);
3101 filename = xloc.file;
3102 linenum = xloc.line;
3103 columnnum = xloc.column;
3105 else
3107 filename = NULL;
3108 linenum = 0;
3109 columnnum = 0;
3112 if (filename == NULL)
3113 return false;
3115 if (force_source_line
3116 || filename != last_filename
3117 || last_linenum != linenum
3118 || (debug_column_info && last_columnnum != columnnum))
3120 force_source_line = false;
3121 last_filename = filename;
3122 last_linenum = linenum;
3123 last_columnnum = columnnum;
3124 last_discriminator = discriminator;
3125 *is_stmt = true;
3126 high_block_linenum = MAX (last_linenum, high_block_linenum);
3127 high_function_linenum = MAX (last_linenum, high_function_linenum);
3128 return true;
3131 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3133 /* If the discriminator changed, but the line number did not,
3134 output the line table entry with is_stmt false so the
3135 debugger does not treat this as a breakpoint location. */
3136 last_discriminator = discriminator;
3137 *is_stmt = false;
3138 return true;
3141 return false;
3144 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3145 directly to the desired hard register. */
3147 void
3148 cleanup_subreg_operands (rtx_insn *insn)
3150 int i;
3151 bool changed = false;
3152 extract_insn_cached (insn);
3153 for (i = 0; i < recog_data.n_operands; i++)
3155 /* The following test cannot use recog_data.operand when testing
3156 for a SUBREG: the underlying object might have been changed
3157 already if we are inside a match_operator expression that
3158 matches the else clause. Instead we test the underlying
3159 expression directly. */
3160 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3162 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3163 changed = true;
3165 else if (GET_CODE (recog_data.operand[i]) == PLUS
3166 || GET_CODE (recog_data.operand[i]) == MULT
3167 || MEM_P (recog_data.operand[i]))
3168 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3171 for (i = 0; i < recog_data.n_dups; i++)
3173 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3175 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3176 changed = true;
3178 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3179 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3180 || MEM_P (*recog_data.dup_loc[i]))
3181 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3183 if (changed)
3184 df_insn_rescan (insn);
3187 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3188 the thing it is a subreg of. Do it anyway if FINAL_P. */
3191 alter_subreg (rtx *xp, bool final_p)
3193 rtx x = *xp;
3194 rtx y = SUBREG_REG (x);
3196 /* simplify_subreg does not remove subreg from volatile references.
3197 We are required to. */
3198 if (MEM_P (y))
3200 int offset = SUBREG_BYTE (x);
3202 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3203 contains 0 instead of the proper offset. See simplify_subreg. */
3204 if (offset == 0
3205 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3207 int difference = GET_MODE_SIZE (GET_MODE (y))
3208 - GET_MODE_SIZE (GET_MODE (x));
3209 if (WORDS_BIG_ENDIAN)
3210 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3211 if (BYTES_BIG_ENDIAN)
3212 offset += difference % UNITS_PER_WORD;
3215 if (final_p)
3216 *xp = adjust_address (y, GET_MODE (x), offset);
3217 else
3218 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3220 else if (REG_P (y) && HARD_REGISTER_P (y))
3222 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3223 SUBREG_BYTE (x));
3225 if (new_rtx != 0)
3226 *xp = new_rtx;
3227 else if (final_p && REG_P (y))
3229 /* Simplify_subreg can't handle some REG cases, but we have to. */
3230 unsigned int regno;
3231 HOST_WIDE_INT offset;
3233 regno = subreg_regno (x);
3234 if (subreg_lowpart_p (x))
3235 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3236 else
3237 offset = SUBREG_BYTE (x);
3238 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3242 return *xp;
3245 /* Do alter_subreg on all the SUBREGs contained in X. */
3247 static rtx
3248 walk_alter_subreg (rtx *xp, bool *changed)
3250 rtx x = *xp;
3251 switch (GET_CODE (x))
3253 case PLUS:
3254 case MULT:
3255 case AND:
3256 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3257 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3258 break;
3260 case MEM:
3261 case ZERO_EXTEND:
3262 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3263 break;
3265 case SUBREG:
3266 *changed = true;
3267 return alter_subreg (xp, true);
3269 default:
3270 break;
3273 return *xp;
3276 #if HAVE_cc0
3278 /* Given BODY, the body of a jump instruction, alter the jump condition
3279 as required by the bits that are set in cc_status.flags.
3280 Not all of the bits there can be handled at this level in all cases.
3282 The value is normally 0.
3283 1 means that the condition has become always true.
3284 -1 means that the condition has become always false.
3285 2 means that COND has been altered. */
3287 static int
3288 alter_cond (rtx cond)
3290 int value = 0;
3292 if (cc_status.flags & CC_REVERSED)
3294 value = 2;
3295 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3298 if (cc_status.flags & CC_INVERTED)
3300 value = 2;
3301 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3304 if (cc_status.flags & CC_NOT_POSITIVE)
3305 switch (GET_CODE (cond))
3307 case LE:
3308 case LEU:
3309 case GEU:
3310 /* Jump becomes unconditional. */
3311 return 1;
3313 case GT:
3314 case GTU:
3315 case LTU:
3316 /* Jump becomes no-op. */
3317 return -1;
3319 case GE:
3320 PUT_CODE (cond, EQ);
3321 value = 2;
3322 break;
3324 case LT:
3325 PUT_CODE (cond, NE);
3326 value = 2;
3327 break;
3329 default:
3330 break;
3333 if (cc_status.flags & CC_NOT_NEGATIVE)
3334 switch (GET_CODE (cond))
3336 case GE:
3337 case GEU:
3338 /* Jump becomes unconditional. */
3339 return 1;
3341 case LT:
3342 case LTU:
3343 /* Jump becomes no-op. */
3344 return -1;
3346 case LE:
3347 case LEU:
3348 PUT_CODE (cond, EQ);
3349 value = 2;
3350 break;
3352 case GT:
3353 case GTU:
3354 PUT_CODE (cond, NE);
3355 value = 2;
3356 break;
3358 default:
3359 break;
3362 if (cc_status.flags & CC_NO_OVERFLOW)
3363 switch (GET_CODE (cond))
3365 case GEU:
3366 /* Jump becomes unconditional. */
3367 return 1;
3369 case LEU:
3370 PUT_CODE (cond, EQ);
3371 value = 2;
3372 break;
3374 case GTU:
3375 PUT_CODE (cond, NE);
3376 value = 2;
3377 break;
3379 case LTU:
3380 /* Jump becomes no-op. */
3381 return -1;
3383 default:
3384 break;
3387 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3388 switch (GET_CODE (cond))
3390 default:
3391 gcc_unreachable ();
3393 case NE:
3394 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3395 value = 2;
3396 break;
3398 case EQ:
3399 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3400 value = 2;
3401 break;
3404 if (cc_status.flags & CC_NOT_SIGNED)
3405 /* The flags are valid if signed condition operators are converted
3406 to unsigned. */
3407 switch (GET_CODE (cond))
3409 case LE:
3410 PUT_CODE (cond, LEU);
3411 value = 2;
3412 break;
3414 case LT:
3415 PUT_CODE (cond, LTU);
3416 value = 2;
3417 break;
3419 case GT:
3420 PUT_CODE (cond, GTU);
3421 value = 2;
3422 break;
3424 case GE:
3425 PUT_CODE (cond, GEU);
3426 value = 2;
3427 break;
3429 default:
3430 break;
3433 return value;
3435 #endif
3437 /* Report inconsistency between the assembler template and the operands.
3438 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3440 void
3441 output_operand_lossage (const char *cmsgid, ...)
3443 char *fmt_string;
3444 char *new_message;
3445 const char *pfx_str;
3446 va_list ap;
3448 va_start (ap, cmsgid);
3450 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3451 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3452 new_message = xvasprintf (fmt_string, ap);
3454 if (this_is_asm_operands)
3455 error_for_asm (this_is_asm_operands, "%s", new_message);
3456 else
3457 internal_error ("%s", new_message);
3459 free (fmt_string);
3460 free (new_message);
3461 va_end (ap);
3464 /* Output of assembler code from a template, and its subroutines. */
3466 /* Annotate the assembly with a comment describing the pattern and
3467 alternative used. */
3469 static void
3470 output_asm_name (void)
3472 if (debug_insn)
3474 int num = INSN_CODE (debug_insn);
3475 fprintf (asm_out_file, "\t%s %d\t%s",
3476 ASM_COMMENT_START, INSN_UID (debug_insn),
3477 insn_data[num].name);
3478 if (insn_data[num].n_alternatives > 1)
3479 fprintf (asm_out_file, "/%d", which_alternative + 1);
3481 if (HAVE_ATTR_length)
3482 fprintf (asm_out_file, "\t[length = %d]",
3483 get_attr_length (debug_insn));
3485 /* Clear this so only the first assembler insn
3486 of any rtl insn will get the special comment for -dp. */
3487 debug_insn = 0;
3491 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3492 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3493 corresponds to the address of the object and 0 if to the object. */
3495 static tree
3496 get_mem_expr_from_op (rtx op, int *paddressp)
3498 tree expr;
3499 int inner_addressp;
3501 *paddressp = 0;
3503 if (REG_P (op))
3504 return REG_EXPR (op);
3505 else if (!MEM_P (op))
3506 return 0;
3508 if (MEM_EXPR (op) != 0)
3509 return MEM_EXPR (op);
3511 /* Otherwise we have an address, so indicate it and look at the address. */
3512 *paddressp = 1;
3513 op = XEXP (op, 0);
3515 /* First check if we have a decl for the address, then look at the right side
3516 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3517 But don't allow the address to itself be indirect. */
3518 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3519 return expr;
3520 else if (GET_CODE (op) == PLUS
3521 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3522 return expr;
3524 while (UNARY_P (op)
3525 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3526 op = XEXP (op, 0);
3528 expr = get_mem_expr_from_op (op, &inner_addressp);
3529 return inner_addressp ? 0 : expr;
3532 /* Output operand names for assembler instructions. OPERANDS is the
3533 operand vector, OPORDER is the order to write the operands, and NOPS
3534 is the number of operands to write. */
3536 static void
3537 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3539 int wrote = 0;
3540 int i;
3542 for (i = 0; i < nops; i++)
3544 int addressp;
3545 rtx op = operands[oporder[i]];
3546 tree expr = get_mem_expr_from_op (op, &addressp);
3548 fprintf (asm_out_file, "%c%s",
3549 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3550 wrote = 1;
3551 if (expr)
3553 fprintf (asm_out_file, "%s",
3554 addressp ? "*" : "");
3555 print_mem_expr (asm_out_file, expr);
3556 wrote = 1;
3558 else if (REG_P (op) && ORIGINAL_REGNO (op)
3559 && ORIGINAL_REGNO (op) != REGNO (op))
3560 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3564 #ifdef ASSEMBLER_DIALECT
3565 /* Helper function to parse assembler dialects in the asm string.
3566 This is called from output_asm_insn and asm_fprintf. */
3567 static const char *
3568 do_assembler_dialects (const char *p, int *dialect)
3570 char c = *(p - 1);
3572 switch (c)
3574 case '{':
3576 int i;
3578 if (*dialect)
3579 output_operand_lossage ("nested assembly dialect alternatives");
3580 else
3581 *dialect = 1;
3583 /* If we want the first dialect, do nothing. Otherwise, skip
3584 DIALECT_NUMBER of strings ending with '|'. */
3585 for (i = 0; i < dialect_number; i++)
3587 while (*p && *p != '}')
3589 if (*p == '|')
3591 p++;
3592 break;
3595 /* Skip over any character after a percent sign. */
3596 if (*p == '%')
3597 p++;
3598 if (*p)
3599 p++;
3602 if (*p == '}')
3603 break;
3606 if (*p == '\0')
3607 output_operand_lossage ("unterminated assembly dialect alternative");
3609 break;
3611 case '|':
3612 if (*dialect)
3614 /* Skip to close brace. */
3617 if (*p == '\0')
3619 output_operand_lossage ("unterminated assembly dialect alternative");
3620 break;
3623 /* Skip over any character after a percent sign. */
3624 if (*p == '%' && p[1])
3626 p += 2;
3627 continue;
3630 if (*p++ == '}')
3631 break;
3633 while (1);
3635 *dialect = 0;
3637 else
3638 putc (c, asm_out_file);
3639 break;
3641 case '}':
3642 if (! *dialect)
3643 putc (c, asm_out_file);
3644 *dialect = 0;
3645 break;
3646 default:
3647 gcc_unreachable ();
3650 return p;
3652 #endif
3654 /* Output text from TEMPLATE to the assembler output file,
3655 obeying %-directions to substitute operands taken from
3656 the vector OPERANDS.
3658 %N (for N a digit) means print operand N in usual manner.
3659 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3660 and print the label name with no punctuation.
3661 %cN means require operand N to be a constant
3662 and print the constant expression with no punctuation.
3663 %aN means expect operand N to be a memory address
3664 (not a memory reference!) and print a reference
3665 to that address.
3666 %nN means expect operand N to be a constant
3667 and print a constant expression for minus the value
3668 of the operand, with no other punctuation. */
3670 void
3671 output_asm_insn (const char *templ, rtx *operands)
3673 const char *p;
3674 int c;
3675 #ifdef ASSEMBLER_DIALECT
3676 int dialect = 0;
3677 #endif
3678 int oporder[MAX_RECOG_OPERANDS];
3679 char opoutput[MAX_RECOG_OPERANDS];
3680 int ops = 0;
3682 /* An insn may return a null string template
3683 in a case where no assembler code is needed. */
3684 if (*templ == 0)
3685 return;
3687 memset (opoutput, 0, sizeof opoutput);
3688 p = templ;
3689 putc ('\t', asm_out_file);
3691 #ifdef ASM_OUTPUT_OPCODE
3692 ASM_OUTPUT_OPCODE (asm_out_file, p);
3693 #endif
3695 while ((c = *p++))
3696 switch (c)
3698 case '\n':
3699 if (flag_verbose_asm)
3700 output_asm_operand_names (operands, oporder, ops);
3701 if (flag_print_asm_name)
3702 output_asm_name ();
3704 ops = 0;
3705 memset (opoutput, 0, sizeof opoutput);
3707 putc (c, asm_out_file);
3708 #ifdef ASM_OUTPUT_OPCODE
3709 while ((c = *p) == '\t')
3711 putc (c, asm_out_file);
3712 p++;
3714 ASM_OUTPUT_OPCODE (asm_out_file, p);
3715 #endif
3716 break;
3718 #ifdef ASSEMBLER_DIALECT
3719 case '{':
3720 case '}':
3721 case '|':
3722 p = do_assembler_dialects (p, &dialect);
3723 break;
3724 #endif
3726 case '%':
3727 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3728 if ASSEMBLER_DIALECT defined and these characters have a special
3729 meaning as dialect delimiters.*/
3730 if (*p == '%'
3731 #ifdef ASSEMBLER_DIALECT
3732 || *p == '{' || *p == '}' || *p == '|'
3733 #endif
3736 putc (*p, asm_out_file);
3737 p++;
3739 /* %= outputs a number which is unique to each insn in the entire
3740 compilation. This is useful for making local labels that are
3741 referred to more than once in a given insn. */
3742 else if (*p == '=')
3744 p++;
3745 fprintf (asm_out_file, "%d", insn_counter);
3747 /* % followed by a letter and some digits
3748 outputs an operand in a special way depending on the letter.
3749 Letters `acln' are implemented directly.
3750 Other letters are passed to `output_operand' so that
3751 the TARGET_PRINT_OPERAND hook can define them. */
3752 else if (ISALPHA (*p))
3754 int letter = *p++;
3755 unsigned long opnum;
3756 char *endptr;
3758 opnum = strtoul (p, &endptr, 10);
3760 if (endptr == p)
3761 output_operand_lossage ("operand number missing "
3762 "after %%-letter");
3763 else if (this_is_asm_operands && opnum >= insn_noperands)
3764 output_operand_lossage ("operand number out of range");
3765 else if (letter == 'l')
3766 output_asm_label (operands[opnum]);
3767 else if (letter == 'a')
3768 output_address (VOIDmode, operands[opnum]);
3769 else if (letter == 'c')
3771 if (CONSTANT_ADDRESS_P (operands[opnum]))
3772 output_addr_const (asm_out_file, operands[opnum]);
3773 else
3774 output_operand (operands[opnum], 'c');
3776 else if (letter == 'n')
3778 if (CONST_INT_P (operands[opnum]))
3779 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3780 - INTVAL (operands[opnum]));
3781 else
3783 putc ('-', asm_out_file);
3784 output_addr_const (asm_out_file, operands[opnum]);
3787 else
3788 output_operand (operands[opnum], letter);
3790 if (!opoutput[opnum])
3791 oporder[ops++] = opnum;
3792 opoutput[opnum] = 1;
3794 p = endptr;
3795 c = *p;
3797 /* % followed by a digit outputs an operand the default way. */
3798 else if (ISDIGIT (*p))
3800 unsigned long opnum;
3801 char *endptr;
3803 opnum = strtoul (p, &endptr, 10);
3804 if (this_is_asm_operands && opnum >= insn_noperands)
3805 output_operand_lossage ("operand number out of range");
3806 else
3807 output_operand (operands[opnum], 0);
3809 if (!opoutput[opnum])
3810 oporder[ops++] = opnum;
3811 opoutput[opnum] = 1;
3813 p = endptr;
3814 c = *p;
3816 /* % followed by punctuation: output something for that
3817 punctuation character alone, with no operand. The
3818 TARGET_PRINT_OPERAND hook decides what is actually done. */
3819 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3820 output_operand (NULL_RTX, *p++);
3821 else
3822 output_operand_lossage ("invalid %%-code");
3823 break;
3825 default:
3826 putc (c, asm_out_file);
3829 /* Write out the variable names for operands, if we know them. */
3830 if (flag_verbose_asm)
3831 output_asm_operand_names (operands, oporder, ops);
3832 if (flag_print_asm_name)
3833 output_asm_name ();
3835 putc ('\n', asm_out_file);
3838 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3840 void
3841 output_asm_label (rtx x)
3843 char buf[256];
3845 if (GET_CODE (x) == LABEL_REF)
3846 x = label_ref_label (x);
3847 if (LABEL_P (x)
3848 || (NOTE_P (x)
3849 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3850 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3851 else
3852 output_operand_lossage ("'%%l' operand isn't a label");
3854 assemble_name (asm_out_file, buf);
3857 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3859 void
3860 mark_symbol_refs_as_used (rtx x)
3862 subrtx_iterator::array_type array;
3863 FOR_EACH_SUBRTX (iter, array, x, ALL)
3865 const_rtx x = *iter;
3866 if (GET_CODE (x) == SYMBOL_REF)
3867 if (tree t = SYMBOL_REF_DECL (x))
3868 assemble_external (t);
3872 /* Print operand X using machine-dependent assembler syntax.
3873 CODE is a non-digit that preceded the operand-number in the % spec,
3874 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3875 between the % and the digits.
3876 When CODE is a non-letter, X is 0.
3878 The meanings of the letters are machine-dependent and controlled
3879 by TARGET_PRINT_OPERAND. */
3881 void
3882 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3884 if (x && GET_CODE (x) == SUBREG)
3885 x = alter_subreg (&x, true);
3887 /* X must not be a pseudo reg. */
3888 if (!targetm.no_register_allocation)
3889 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3891 targetm.asm_out.print_operand (asm_out_file, x, code);
3893 if (x == NULL_RTX)
3894 return;
3896 mark_symbol_refs_as_used (x);
3899 /* Print a memory reference operand for address X using
3900 machine-dependent assembler syntax. */
3902 void
3903 output_address (machine_mode mode, rtx x)
3905 bool changed = false;
3906 walk_alter_subreg (&x, &changed);
3907 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3910 /* Print an integer constant expression in assembler syntax.
3911 Addition and subtraction are the only arithmetic
3912 that may appear in these expressions. */
3914 void
3915 output_addr_const (FILE *file, rtx x)
3917 char buf[256];
3919 restart:
3920 switch (GET_CODE (x))
3922 case PC:
3923 putc ('.', file);
3924 break;
3926 case SYMBOL_REF:
3927 if (SYMBOL_REF_DECL (x))
3928 assemble_external (SYMBOL_REF_DECL (x));
3929 #ifdef ASM_OUTPUT_SYMBOL_REF
3930 ASM_OUTPUT_SYMBOL_REF (file, x);
3931 #else
3932 assemble_name (file, XSTR (x, 0));
3933 #endif
3934 break;
3936 case LABEL_REF:
3937 x = label_ref_label (x);
3938 /* Fall through. */
3939 case CODE_LABEL:
3940 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3941 #ifdef ASM_OUTPUT_LABEL_REF
3942 ASM_OUTPUT_LABEL_REF (file, buf);
3943 #else
3944 assemble_name (file, buf);
3945 #endif
3946 break;
3948 case CONST_INT:
3949 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3950 break;
3952 case CONST:
3953 /* This used to output parentheses around the expression,
3954 but that does not work on the 386 (either ATT or BSD assembler). */
3955 output_addr_const (file, XEXP (x, 0));
3956 break;
3958 case CONST_WIDE_INT:
3959 /* We do not know the mode here so we have to use a round about
3960 way to build a wide-int to get it printed properly. */
3962 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3963 CONST_WIDE_INT_NUNITS (x),
3964 CONST_WIDE_INT_NUNITS (x)
3965 * HOST_BITS_PER_WIDE_INT,
3966 false);
3967 print_decs (w, file);
3969 break;
3971 case CONST_DOUBLE:
3972 if (CONST_DOUBLE_AS_INT_P (x))
3974 /* We can use %d if the number is one word and positive. */
3975 if (CONST_DOUBLE_HIGH (x))
3976 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3977 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3978 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3979 else if (CONST_DOUBLE_LOW (x) < 0)
3980 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3981 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3982 else
3983 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3985 else
3986 /* We can't handle floating point constants;
3987 PRINT_OPERAND must handle them. */
3988 output_operand_lossage ("floating constant misused");
3989 break;
3991 case CONST_FIXED:
3992 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3993 break;
3995 case PLUS:
3996 /* Some assemblers need integer constants to appear last (eg masm). */
3997 if (CONST_INT_P (XEXP (x, 0)))
3999 output_addr_const (file, XEXP (x, 1));
4000 if (INTVAL (XEXP (x, 0)) >= 0)
4001 fprintf (file, "+");
4002 output_addr_const (file, XEXP (x, 0));
4004 else
4006 output_addr_const (file, XEXP (x, 0));
4007 if (!CONST_INT_P (XEXP (x, 1))
4008 || INTVAL (XEXP (x, 1)) >= 0)
4009 fprintf (file, "+");
4010 output_addr_const (file, XEXP (x, 1));
4012 break;
4014 case MINUS:
4015 /* Avoid outputting things like x-x or x+5-x,
4016 since some assemblers can't handle that. */
4017 x = simplify_subtraction (x);
4018 if (GET_CODE (x) != MINUS)
4019 goto restart;
4021 output_addr_const (file, XEXP (x, 0));
4022 fprintf (file, "-");
4023 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
4024 || GET_CODE (XEXP (x, 1)) == PC
4025 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
4026 output_addr_const (file, XEXP (x, 1));
4027 else
4029 fputs (targetm.asm_out.open_paren, file);
4030 output_addr_const (file, XEXP (x, 1));
4031 fputs (targetm.asm_out.close_paren, file);
4033 break;
4035 case ZERO_EXTEND:
4036 case SIGN_EXTEND:
4037 case SUBREG:
4038 case TRUNCATE:
4039 output_addr_const (file, XEXP (x, 0));
4040 break;
4042 default:
4043 if (targetm.asm_out.output_addr_const_extra (file, x))
4044 break;
4046 output_operand_lossage ("invalid expression as operand");
4050 /* Output a quoted string. */
4052 void
4053 output_quoted_string (FILE *asm_file, const char *string)
4055 #ifdef OUTPUT_QUOTED_STRING
4056 OUTPUT_QUOTED_STRING (asm_file, string);
4057 #else
4058 char c;
4060 putc ('\"', asm_file);
4061 while ((c = *string++) != 0)
4063 if (ISPRINT (c))
4065 if (c == '\"' || c == '\\')
4066 putc ('\\', asm_file);
4067 putc (c, asm_file);
4069 else
4070 fprintf (asm_file, "\\%03o", (unsigned char) c);
4072 putc ('\"', asm_file);
4073 #endif
4076 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4078 void
4079 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4081 char buf[2 + CHAR_BIT * sizeof (value) / 4];
4082 if (value == 0)
4083 putc ('0', f);
4084 else
4086 char *p = buf + sizeof (buf);
4088 *--p = "0123456789abcdef"[value % 16];
4089 while ((value /= 16) != 0);
4090 *--p = 'x';
4091 *--p = '0';
4092 fwrite (p, 1, buf + sizeof (buf) - p, f);
4096 /* Internal function that prints an unsigned long in decimal in reverse.
4097 The output string IS NOT null-terminated. */
4099 static int
4100 sprint_ul_rev (char *s, unsigned long value)
4102 int i = 0;
4105 s[i] = "0123456789"[value % 10];
4106 value /= 10;
4107 i++;
4108 /* alternate version, without modulo */
4109 /* oldval = value; */
4110 /* value /= 10; */
4111 /* s[i] = "0123456789" [oldval - 10*value]; */
4112 /* i++ */
4114 while (value != 0);
4115 return i;
4118 /* Write an unsigned long as decimal to a file, fast. */
4120 void
4121 fprint_ul (FILE *f, unsigned long value)
4123 /* python says: len(str(2**64)) == 20 */
4124 char s[20];
4125 int i;
4127 i = sprint_ul_rev (s, value);
4129 /* It's probably too small to bother with string reversal and fputs. */
4132 i--;
4133 putc (s[i], f);
4135 while (i != 0);
4138 /* Write an unsigned long as decimal to a string, fast.
4139 s must be wide enough to not overflow, at least 21 chars.
4140 Returns the length of the string (without terminating '\0'). */
4143 sprint_ul (char *s, unsigned long value)
4145 int len = sprint_ul_rev (s, value);
4146 s[len] = '\0';
4148 std::reverse (s, s + len);
4149 return len;
4152 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4153 %R prints the value of REGISTER_PREFIX.
4154 %L prints the value of LOCAL_LABEL_PREFIX.
4155 %U prints the value of USER_LABEL_PREFIX.
4156 %I prints the value of IMMEDIATE_PREFIX.
4157 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4158 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4160 We handle alternate assembler dialects here, just like output_asm_insn. */
4162 void
4163 asm_fprintf (FILE *file, const char *p, ...)
4165 char buf[10];
4166 char *q, c;
4167 #ifdef ASSEMBLER_DIALECT
4168 int dialect = 0;
4169 #endif
4170 va_list argptr;
4172 va_start (argptr, p);
4174 buf[0] = '%';
4176 while ((c = *p++))
4177 switch (c)
4179 #ifdef ASSEMBLER_DIALECT
4180 case '{':
4181 case '}':
4182 case '|':
4183 p = do_assembler_dialects (p, &dialect);
4184 break;
4185 #endif
4187 case '%':
4188 c = *p++;
4189 q = &buf[1];
4190 while (strchr ("-+ #0", c))
4192 *q++ = c;
4193 c = *p++;
4195 while (ISDIGIT (c) || c == '.')
4197 *q++ = c;
4198 c = *p++;
4200 switch (c)
4202 case '%':
4203 putc ('%', file);
4204 break;
4206 case 'd': case 'i': case 'u':
4207 case 'x': case 'X': case 'o':
4208 case 'c':
4209 *q++ = c;
4210 *q = 0;
4211 fprintf (file, buf, va_arg (argptr, int));
4212 break;
4214 case 'w':
4215 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4216 'o' cases, but we do not check for those cases. It
4217 means that the value is a HOST_WIDE_INT, which may be
4218 either `long' or `long long'. */
4219 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4220 q += strlen (HOST_WIDE_INT_PRINT);
4221 *q++ = *p++;
4222 *q = 0;
4223 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4224 break;
4226 case 'l':
4227 *q++ = c;
4228 #ifdef HAVE_LONG_LONG
4229 if (*p == 'l')
4231 *q++ = *p++;
4232 *q++ = *p++;
4233 *q = 0;
4234 fprintf (file, buf, va_arg (argptr, long long));
4236 else
4237 #endif
4239 *q++ = *p++;
4240 *q = 0;
4241 fprintf (file, buf, va_arg (argptr, long));
4244 break;
4246 case 's':
4247 *q++ = c;
4248 *q = 0;
4249 fprintf (file, buf, va_arg (argptr, char *));
4250 break;
4252 case 'O':
4253 #ifdef ASM_OUTPUT_OPCODE
4254 ASM_OUTPUT_OPCODE (asm_out_file, p);
4255 #endif
4256 break;
4258 case 'R':
4259 #ifdef REGISTER_PREFIX
4260 fprintf (file, "%s", REGISTER_PREFIX);
4261 #endif
4262 break;
4264 case 'I':
4265 #ifdef IMMEDIATE_PREFIX
4266 fprintf (file, "%s", IMMEDIATE_PREFIX);
4267 #endif
4268 break;
4270 case 'L':
4271 #ifdef LOCAL_LABEL_PREFIX
4272 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4273 #endif
4274 break;
4276 case 'U':
4277 fputs (user_label_prefix, file);
4278 break;
4280 #ifdef ASM_FPRINTF_EXTENSIONS
4281 /* Uppercase letters are reserved for general use by asm_fprintf
4282 and so are not available to target specific code. In order to
4283 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4284 they are defined here. As they get turned into real extensions
4285 to asm_fprintf they should be removed from this list. */
4286 case 'A': case 'B': case 'C': case 'D': case 'E':
4287 case 'F': case 'G': case 'H': case 'J': case 'K':
4288 case 'M': case 'N': case 'P': case 'Q': case 'S':
4289 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4290 break;
4292 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4293 #endif
4294 default:
4295 gcc_unreachable ();
4297 break;
4299 default:
4300 putc (c, file);
4302 va_end (argptr);
4305 /* Return nonzero if this function has no function calls. */
4308 leaf_function_p (void)
4310 rtx_insn *insn;
4312 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4313 functions even if they call mcount. */
4314 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4315 return 0;
4317 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4319 if (CALL_P (insn)
4320 && ! SIBLING_CALL_P (insn))
4321 return 0;
4322 if (NONJUMP_INSN_P (insn)
4323 && GET_CODE (PATTERN (insn)) == SEQUENCE
4324 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4325 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4326 return 0;
4329 return 1;
4332 /* Return 1 if branch is a forward branch.
4333 Uses insn_shuid array, so it works only in the final pass. May be used by
4334 output templates to customary add branch prediction hints.
4337 final_forward_branch_p (rtx_insn *insn)
4339 int insn_id, label_id;
4341 gcc_assert (uid_shuid);
4342 insn_id = INSN_SHUID (insn);
4343 label_id = INSN_SHUID (JUMP_LABEL (insn));
4344 /* We've hit some insns that does not have id information available. */
4345 gcc_assert (insn_id && label_id);
4346 return insn_id < label_id;
4349 /* On some machines, a function with no call insns
4350 can run faster if it doesn't create its own register window.
4351 When output, the leaf function should use only the "output"
4352 registers. Ordinarily, the function would be compiled to use
4353 the "input" registers to find its arguments; it is a candidate
4354 for leaf treatment if it uses only the "input" registers.
4355 Leaf function treatment means renumbering so the function
4356 uses the "output" registers instead. */
4358 #ifdef LEAF_REGISTERS
4360 /* Return 1 if this function uses only the registers that can be
4361 safely renumbered. */
4364 only_leaf_regs_used (void)
4366 int i;
4367 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4369 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4370 if ((df_regs_ever_live_p (i) || global_regs[i])
4371 && ! permitted_reg_in_leaf_functions[i])
4372 return 0;
4374 if (crtl->uses_pic_offset_table
4375 && pic_offset_table_rtx != 0
4376 && REG_P (pic_offset_table_rtx)
4377 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4378 return 0;
4380 return 1;
4383 /* Scan all instructions and renumber all registers into those
4384 available in leaf functions. */
4386 static void
4387 leaf_renumber_regs (rtx_insn *first)
4389 rtx_insn *insn;
4391 /* Renumber only the actual patterns.
4392 The reg-notes can contain frame pointer refs,
4393 and renumbering them could crash, and should not be needed. */
4394 for (insn = first; insn; insn = NEXT_INSN (insn))
4395 if (INSN_P (insn))
4396 leaf_renumber_regs_insn (PATTERN (insn));
4399 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4400 available in leaf functions. */
4402 void
4403 leaf_renumber_regs_insn (rtx in_rtx)
4405 int i, j;
4406 const char *format_ptr;
4408 if (in_rtx == 0)
4409 return;
4411 /* Renumber all input-registers into output-registers.
4412 renumbered_regs would be 1 for an output-register;
4413 they */
4415 if (REG_P (in_rtx))
4417 int newreg;
4419 /* Don't renumber the same reg twice. */
4420 if (in_rtx->used)
4421 return;
4423 newreg = REGNO (in_rtx);
4424 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4425 to reach here as part of a REG_NOTE. */
4426 if (newreg >= FIRST_PSEUDO_REGISTER)
4428 in_rtx->used = 1;
4429 return;
4431 newreg = LEAF_REG_REMAP (newreg);
4432 gcc_assert (newreg >= 0);
4433 df_set_regs_ever_live (REGNO (in_rtx), false);
4434 df_set_regs_ever_live (newreg, true);
4435 SET_REGNO (in_rtx, newreg);
4436 in_rtx->used = 1;
4437 return;
4440 if (INSN_P (in_rtx))
4442 /* Inside a SEQUENCE, we find insns.
4443 Renumber just the patterns of these insns,
4444 just as we do for the top-level insns. */
4445 leaf_renumber_regs_insn (PATTERN (in_rtx));
4446 return;
4449 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4451 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4452 switch (*format_ptr++)
4454 case 'e':
4455 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4456 break;
4458 case 'E':
4459 if (NULL != XVEC (in_rtx, i))
4461 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4462 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4464 break;
4466 case 'S':
4467 case 's':
4468 case '0':
4469 case 'i':
4470 case 'w':
4471 case 'n':
4472 case 'u':
4473 break;
4475 default:
4476 gcc_unreachable ();
4479 #endif
4481 /* Turn the RTL into assembly. */
4482 static unsigned int
4483 rest_of_handle_final (void)
4485 const char *fnname = get_fnname_from_decl (current_function_decl);
4487 assemble_start_function (current_function_decl, fnname);
4488 final_start_function (get_insns (), asm_out_file, optimize);
4489 final (get_insns (), asm_out_file, optimize);
4490 if (flag_ipa_ra)
4491 collect_fn_hard_reg_usage ();
4492 final_end_function ();
4494 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4495 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4496 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4497 output_function_exception_table (fnname);
4499 assemble_end_function (current_function_decl, fnname);
4501 /* Free up reg info memory. */
4502 free_reg_info ();
4504 if (! quiet_flag)
4505 fflush (asm_out_file);
4507 /* Write DBX symbols if requested. */
4509 /* Note that for those inline functions where we don't initially
4510 know for certain that we will be generating an out-of-line copy,
4511 the first invocation of this routine (rest_of_compilation) will
4512 skip over this code by doing a `goto exit_rest_of_compilation;'.
4513 Later on, wrapup_global_declarations will (indirectly) call
4514 rest_of_compilation again for those inline functions that need
4515 to have out-of-line copies generated. During that call, we
4516 *will* be routed past here. */
4518 timevar_push (TV_SYMOUT);
4519 if (!DECL_IGNORED_P (current_function_decl))
4520 debug_hooks->function_decl (current_function_decl);
4521 timevar_pop (TV_SYMOUT);
4523 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4524 DECL_INITIAL (current_function_decl) = error_mark_node;
4526 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4527 && targetm.have_ctors_dtors)
4528 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4529 decl_init_priority_lookup
4530 (current_function_decl));
4531 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4532 && targetm.have_ctors_dtors)
4533 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4534 decl_fini_priority_lookup
4535 (current_function_decl));
4536 return 0;
4539 namespace {
4541 const pass_data pass_data_final =
4543 RTL_PASS, /* type */
4544 "final", /* name */
4545 OPTGROUP_NONE, /* optinfo_flags */
4546 TV_FINAL, /* tv_id */
4547 0, /* properties_required */
4548 0, /* properties_provided */
4549 0, /* properties_destroyed */
4550 0, /* todo_flags_start */
4551 0, /* todo_flags_finish */
4554 class pass_final : public rtl_opt_pass
4556 public:
4557 pass_final (gcc::context *ctxt)
4558 : rtl_opt_pass (pass_data_final, ctxt)
4561 /* opt_pass methods: */
4562 virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4564 }; // class pass_final
4566 } // anon namespace
4568 rtl_opt_pass *
4569 make_pass_final (gcc::context *ctxt)
4571 return new pass_final (ctxt);
4575 static unsigned int
4576 rest_of_handle_shorten_branches (void)
4578 /* Shorten branches. */
4579 shorten_branches (get_insns ());
4580 return 0;
4583 namespace {
4585 const pass_data pass_data_shorten_branches =
4587 RTL_PASS, /* type */
4588 "shorten", /* name */
4589 OPTGROUP_NONE, /* optinfo_flags */
4590 TV_SHORTEN_BRANCH, /* tv_id */
4591 0, /* properties_required */
4592 0, /* properties_provided */
4593 0, /* properties_destroyed */
4594 0, /* todo_flags_start */
4595 0, /* todo_flags_finish */
4598 class pass_shorten_branches : public rtl_opt_pass
4600 public:
4601 pass_shorten_branches (gcc::context *ctxt)
4602 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4605 /* opt_pass methods: */
4606 virtual unsigned int execute (function *)
4608 return rest_of_handle_shorten_branches ();
4611 }; // class pass_shorten_branches
4613 } // anon namespace
4615 rtl_opt_pass *
4616 make_pass_shorten_branches (gcc::context *ctxt)
4618 return new pass_shorten_branches (ctxt);
4622 static unsigned int
4623 rest_of_clean_state (void)
4625 rtx_insn *insn, *next;
4626 FILE *final_output = NULL;
4627 int save_unnumbered = flag_dump_unnumbered;
4628 int save_noaddr = flag_dump_noaddr;
4630 if (flag_dump_final_insns)
4632 final_output = fopen (flag_dump_final_insns, "a");
4633 if (!final_output)
4635 error ("could not open final insn dump file %qs: %m",
4636 flag_dump_final_insns);
4637 flag_dump_final_insns = NULL;
4639 else
4641 flag_dump_noaddr = flag_dump_unnumbered = 1;
4642 if (flag_compare_debug_opt || flag_compare_debug)
4643 dump_flags |= TDF_NOUID;
4644 dump_function_header (final_output, current_function_decl,
4645 dump_flags);
4646 final_insns_dump_p = true;
4648 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4649 if (LABEL_P (insn))
4650 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4651 else
4653 if (NOTE_P (insn))
4654 set_block_for_insn (insn, NULL);
4655 INSN_UID (insn) = 0;
4660 /* It is very important to decompose the RTL instruction chain here:
4661 debug information keeps pointing into CODE_LABEL insns inside the function
4662 body. If these remain pointing to the other insns, we end up preserving
4663 whole RTL chain and attached detailed debug info in memory. */
4664 for (insn = get_insns (); insn; insn = next)
4666 next = NEXT_INSN (insn);
4667 SET_NEXT_INSN (insn) = NULL;
4668 SET_PREV_INSN (insn) = NULL;
4670 if (final_output
4671 && (!NOTE_P (insn) ||
4672 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4673 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4674 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4675 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4676 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4677 print_rtl_single (final_output, insn);
4680 if (final_output)
4682 flag_dump_noaddr = save_noaddr;
4683 flag_dump_unnumbered = save_unnumbered;
4684 final_insns_dump_p = false;
4686 if (fclose (final_output))
4688 error ("could not close final insn dump file %qs: %m",
4689 flag_dump_final_insns);
4690 flag_dump_final_insns = NULL;
4694 /* In case the function was not output,
4695 don't leave any temporary anonymous types
4696 queued up for sdb output. */
4697 if (SDB_DEBUGGING_INFO && write_symbols == SDB_DEBUG)
4698 sdbout_types (NULL_TREE);
4700 flag_rerun_cse_after_global_opts = 0;
4701 reload_completed = 0;
4702 epilogue_completed = 0;
4703 #ifdef STACK_REGS
4704 regstack_completed = 0;
4705 #endif
4707 /* Clear out the insn_length contents now that they are no
4708 longer valid. */
4709 init_insn_lengths ();
4711 /* Show no temporary slots allocated. */
4712 init_temp_slots ();
4714 free_bb_for_insn ();
4716 if (cfun->gimple_df)
4717 delete_tree_ssa (cfun);
4719 /* We can reduce stack alignment on call site only when we are sure that
4720 the function body just produced will be actually used in the final
4721 executable. */
4722 if (decl_binds_to_current_def_p (current_function_decl))
4724 unsigned int pref = crtl->preferred_stack_boundary;
4725 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4726 pref = crtl->stack_alignment_needed;
4727 cgraph_node::rtl_info (current_function_decl)
4728 ->preferred_incoming_stack_boundary = pref;
4731 /* Make sure volatile mem refs aren't considered valid operands for
4732 arithmetic insns. We must call this here if this is a nested inline
4733 function, since the above code leaves us in the init_recog state,
4734 and the function context push/pop code does not save/restore volatile_ok.
4736 ??? Maybe it isn't necessary for expand_start_function to call this
4737 anymore if we do it here? */
4739 init_recog_no_volatile ();
4741 /* We're done with this function. Free up memory if we can. */
4742 free_after_parsing (cfun);
4743 free_after_compilation (cfun);
4744 return 0;
4747 namespace {
4749 const pass_data pass_data_clean_state =
4751 RTL_PASS, /* type */
4752 "*clean_state", /* name */
4753 OPTGROUP_NONE, /* optinfo_flags */
4754 TV_FINAL, /* tv_id */
4755 0, /* properties_required */
4756 0, /* properties_provided */
4757 PROP_rtl, /* properties_destroyed */
4758 0, /* todo_flags_start */
4759 0, /* todo_flags_finish */
4762 class pass_clean_state : public rtl_opt_pass
4764 public:
4765 pass_clean_state (gcc::context *ctxt)
4766 : rtl_opt_pass (pass_data_clean_state, ctxt)
4769 /* opt_pass methods: */
4770 virtual unsigned int execute (function *)
4772 return rest_of_clean_state ();
4775 }; // class pass_clean_state
4777 } // anon namespace
4779 rtl_opt_pass *
4780 make_pass_clean_state (gcc::context *ctxt)
4782 return new pass_clean_state (ctxt);
4785 /* Return true if INSN is a call to the current function. */
4787 static bool
4788 self_recursive_call_p (rtx_insn *insn)
4790 tree fndecl = get_call_fndecl (insn);
4791 return (fndecl == current_function_decl
4792 && decl_binds_to_current_def_p (fndecl));
4795 /* Collect hard register usage for the current function. */
4797 static void
4798 collect_fn_hard_reg_usage (void)
4800 rtx_insn *insn;
4801 #ifdef STACK_REGS
4802 int i;
4803 #endif
4804 struct cgraph_rtl_info *node;
4805 HARD_REG_SET function_used_regs;
4807 /* ??? To be removed when all the ports have been fixed. */
4808 if (!targetm.call_fusage_contains_non_callee_clobbers)
4809 return;
4811 CLEAR_HARD_REG_SET (function_used_regs);
4813 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4815 HARD_REG_SET insn_used_regs;
4817 if (!NONDEBUG_INSN_P (insn))
4818 continue;
4820 if (CALL_P (insn)
4821 && !self_recursive_call_p (insn))
4823 if (!get_call_reg_set_usage (insn, &insn_used_regs,
4824 call_used_reg_set))
4825 return;
4827 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4830 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4831 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4834 /* Be conservative - mark fixed and global registers as used. */
4835 IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
4837 #ifdef STACK_REGS
4838 /* Handle STACK_REGS conservatively, since the df-framework does not
4839 provide accurate information for them. */
4841 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4842 SET_HARD_REG_BIT (function_used_regs, i);
4843 #endif
4845 /* The information we have gathered is only interesting if it exposes a
4846 register from the call_used_regs that is not used in this function. */
4847 if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
4848 return;
4850 node = cgraph_node::rtl_info (current_function_decl);
4851 gcc_assert (node != NULL);
4853 COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
4854 node->function_used_regs_valid = 1;
4857 /* Get the declaration of the function called by INSN. */
4859 static tree
4860 get_call_fndecl (rtx_insn *insn)
4862 rtx note, datum;
4864 note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
4865 if (note == NULL_RTX)
4866 return NULL_TREE;
4868 datum = XEXP (note, 0);
4869 if (datum != NULL_RTX)
4870 return SYMBOL_REF_DECL (datum);
4872 return NULL_TREE;
4875 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4876 call targets that can be overwritten. */
4878 static struct cgraph_rtl_info *
4879 get_call_cgraph_rtl_info (rtx_insn *insn)
4881 tree fndecl;
4883 if (insn == NULL_RTX)
4884 return NULL;
4886 fndecl = get_call_fndecl (insn);
4887 if (fndecl == NULL_TREE
4888 || !decl_binds_to_current_def_p (fndecl))
4889 return NULL;
4891 return cgraph_node::rtl_info (fndecl);
4894 /* Find hard registers used by function call instruction INSN, and return them
4895 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4897 bool
4898 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
4899 HARD_REG_SET default_set)
4901 if (flag_ipa_ra)
4903 struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
4904 if (node != NULL
4905 && node->function_used_regs_valid)
4907 COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
4908 AND_HARD_REG_SET (*reg_set, default_set);
4909 return true;
4913 COPY_HARD_REG_SET (*reg_set, default_set);
4914 return false;