Backport r203445 from v17
[official-gcc.git] / gcc-4_8 / gcc / final.c
blob77d68a8807ee904b3a5197510db052fb26670ff3
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
50 #include "tree.h"
51 #include "rtl.h"
52 #include "tm_p.h"
53 #include "regs.h"
54 #include "insn-config.h"
55 #include "insn-attr.h"
56 #include "recog.h"
57 #include "conditions.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "output.h"
61 #include "except.h"
62 #include "function.h"
63 #include "rtl-error.h"
64 #include "toplev.h" /* exact_log2, floor_log2 */
65 #include "reload.h"
66 #include "intl.h"
67 #include "basic-block.h"
68 #include "target.h"
69 #include "targhooks.h"
70 #include "debug.h"
71 #include "expr.h"
72 #include "tree-pass.h"
73 #include "tree-flow.h"
74 #include "cgraph.h"
75 #include "coverage.h"
76 #include "df.h"
77 #include "ggc.h"
78 #include "cfgloop.h"
79 #include "params.h"
80 #include "tree-pretty-print.h" /* for dump_function_header */
82 #ifdef XCOFF_DEBUGGING_INFO
83 #include "xcoffout.h" /* Needed for external data
84 declarations for e.g. AIX 4.x. */
85 #endif
87 #include "dwarf2out.h"
89 #ifdef DBX_DEBUGGING_INFO
90 #include "dbxout.h"
91 #endif
93 #ifdef SDB_DEBUGGING_INFO
94 #include "sdbout.h"
95 #endif
97 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
98 So define a null default for it to save conditionalization later. */
99 #ifndef CC_STATUS_INIT
100 #define CC_STATUS_INIT
101 #endif
103 /* Is the given character a logical line separator for the assembler? */
104 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
105 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
106 #endif
108 #ifndef JUMP_TABLES_IN_TEXT_SECTION
109 #define JUMP_TABLES_IN_TEXT_SECTION 0
110 #endif
112 /* Bitflags used by final_scan_insn. */
113 #define SEEN_BB 1
114 #define SEEN_NOTE 2
115 #define SEEN_EMITTED 4
117 /* Last insn processed by final_scan_insn. */
118 static rtx debug_insn;
119 rtx current_output_insn;
121 /* Line number of last NOTE. */
122 static int last_linenum;
124 /* Last discriminator written to assembly. */
125 static int last_discriminator;
127 /* Highest line number in current block. */
128 static int high_block_linenum;
130 /* Likewise for function. */
131 static int high_function_linenum;
133 /* Filename of last NOTE. */
134 static const char *last_filename;
136 /* Override filename, line number, and discriminator. */
137 static const char *override_filename;
138 static int override_linenum;
139 static int override_discriminator;
141 /* Whether to force emission of a line note before the next insn. */
142 static bool force_source_line = false;
144 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
146 /* Nonzero while outputting an `asm' with operands.
147 This means that inconsistencies are the user's fault, so don't die.
148 The precise value is the insn being output, to pass to error_for_asm. */
149 rtx this_is_asm_operands;
151 /* Number of operands of this insn, for an `asm' with operands. */
152 static unsigned int insn_noperands;
154 /* Compare optimization flag. */
156 static rtx last_ignored_compare = 0;
158 /* Assign a unique number to each insn that is output.
159 This can be used to generate unique local labels. */
161 static int insn_counter = 0;
163 #ifdef HAVE_cc0
164 /* This variable contains machine-dependent flags (defined in tm.h)
165 set and examined by output routines
166 that describe how to interpret the condition codes properly. */
168 CC_STATUS cc_status;
170 /* During output of an insn, this contains a copy of cc_status
171 from before the insn. */
173 CC_STATUS cc_prev_status;
174 #endif
176 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
178 static int block_depth;
180 /* Nonzero if have enabled APP processing of our assembler output. */
182 static int app_on;
184 /* If we are outputting an insn sequence, this contains the sequence rtx.
185 Zero otherwise. */
187 rtx final_sequence;
189 #ifdef ASSEMBLER_DIALECT
191 /* Number of the assembler dialect to use, starting at 0. */
192 static int dialect_number;
193 #endif
195 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
196 rtx current_insn_predicate;
198 /* True if printing into -fdump-final-insns= dump. */
199 bool final_insns_dump_p;
201 /* True if profile_function should be called, but hasn't been called yet. */
202 static bool need_profile_function;
204 /* True if the function has a split cold section. */
205 static bool has_cold_section_p;
207 static int asm_insn_count (rtx);
208 static void profile_function (FILE *);
209 static void profile_after_prologue (FILE *);
210 static bool notice_source_line (rtx, bool *);
211 static rtx walk_alter_subreg (rtx *, bool *);
212 static void output_asm_name (void);
213 static void output_alternate_entry_point (FILE *, rtx);
214 static tree get_mem_expr_from_op (rtx, int *);
215 static void output_asm_operand_names (rtx *, int *, int);
216 #ifdef LEAF_REGISTERS
217 static void leaf_renumber_regs (rtx);
218 #endif
219 #ifdef HAVE_cc0
220 static int alter_cond (rtx);
221 #endif
222 #ifndef ADDR_VEC_ALIGN
223 static int final_addr_vec_align (rtx);
224 #endif
225 static int align_fuzz (rtx, rtx, int, unsigned);
227 /* Initialize data in final at the beginning of a compilation. */
229 void
230 init_final (const char *filename ATTRIBUTE_UNUSED)
232 app_on = 0;
233 final_sequence = 0;
235 #ifdef ASSEMBLER_DIALECT
236 dialect_number = ASSEMBLER_DIALECT;
237 #endif
240 /* Default target function prologue and epilogue assembler output.
242 If not overridden for epilogue code, then the function body itself
243 contains return instructions wherever needed. */
244 void
245 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
246 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
250 void
251 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
252 tree decl ATTRIBUTE_UNUSED,
253 bool new_is_cold ATTRIBUTE_UNUSED)
257 /* Default target hook that outputs nothing to a stream. */
258 void
259 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
263 /* Enable APP processing of subsequent output.
264 Used before the output from an `asm' statement. */
266 void
267 app_enable (void)
269 if (! app_on)
271 fputs (ASM_APP_ON, asm_out_file);
272 app_on = 1;
276 /* Disable APP processing of subsequent output.
277 Called from varasm.c before most kinds of output. */
279 void
280 app_disable (void)
282 if (app_on)
284 fputs (ASM_APP_OFF, asm_out_file);
285 app_on = 0;
289 /* Return the number of slots filled in the current
290 delayed branch sequence (we don't count the insn needing the
291 delay slot). Zero if not in a delayed branch sequence. */
293 #ifdef DELAY_SLOTS
295 dbr_sequence_length (void)
297 if (final_sequence != 0)
298 return XVECLEN (final_sequence, 0) - 1;
299 else
300 return 0;
302 #endif
304 /* The next two pages contain routines used to compute the length of an insn
305 and to shorten branches. */
307 /* Arrays for insn lengths, and addresses. The latter is referenced by
308 `insn_current_length'. */
310 static int *insn_lengths;
312 vec<int> insn_addresses_;
314 /* Max uid for which the above arrays are valid. */
315 static int insn_lengths_max_uid;
317 /* Address of insn being processed. Used by `insn_current_length'. */
318 int insn_current_address;
320 /* Address of insn being processed in previous iteration. */
321 int insn_last_address;
323 /* known invariant alignment of insn being processed. */
324 int insn_current_align;
326 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
327 gives the next following alignment insn that increases the known
328 alignment, or NULL_RTX if there is no such insn.
329 For any alignment obtained this way, we can again index uid_align with
330 its uid to obtain the next following align that in turn increases the
331 alignment, till we reach NULL_RTX; the sequence obtained this way
332 for each insn we'll call the alignment chain of this insn in the following
333 comments. */
335 struct label_alignment
337 short alignment;
338 short max_skip;
341 static rtx *uid_align;
342 static int *uid_shuid;
343 static struct label_alignment *label_align;
345 /* Indicate that branch shortening hasn't yet been done. */
347 void
348 init_insn_lengths (void)
350 if (uid_shuid)
352 free (uid_shuid);
353 uid_shuid = 0;
355 if (insn_lengths)
357 free (insn_lengths);
358 insn_lengths = 0;
359 insn_lengths_max_uid = 0;
361 if (HAVE_ATTR_length)
362 INSN_ADDRESSES_FREE ();
363 if (uid_align)
365 free (uid_align);
366 uid_align = 0;
370 /* Obtain the current length of an insn. If branch shortening has been done,
371 get its actual length. Otherwise, use FALLBACK_FN to calculate the
372 length. */
373 static inline int
374 get_attr_length_1 (rtx insn, int (*fallback_fn) (rtx))
376 rtx body;
377 int i;
378 int length = 0;
380 if (!HAVE_ATTR_length)
381 return 0;
383 if (insn_lengths_max_uid > INSN_UID (insn))
384 return insn_lengths[INSN_UID (insn)];
385 else
386 switch (GET_CODE (insn))
388 case NOTE:
389 case BARRIER:
390 case CODE_LABEL:
391 case DEBUG_INSN:
392 return 0;
394 case CALL_INSN:
395 length = fallback_fn (insn);
396 break;
398 case JUMP_INSN:
399 body = PATTERN (insn);
400 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
402 /* Alignment is machine-dependent and should be handled by
403 ADDR_VEC_ALIGN. */
405 else
406 length = fallback_fn (insn);
407 break;
409 case INSN:
410 body = PATTERN (insn);
411 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
412 return 0;
414 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
415 length = asm_insn_count (body) * fallback_fn (insn);
416 else if (GET_CODE (body) == SEQUENCE)
417 for (i = 0; i < XVECLEN (body, 0); i++)
418 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
419 else
420 length = fallback_fn (insn);
421 break;
423 default:
424 break;
427 #ifdef ADJUST_INSN_LENGTH
428 ADJUST_INSN_LENGTH (insn, length);
429 #endif
430 return length;
433 /* Obtain the current length of an insn. If branch shortening has been done,
434 get its actual length. Otherwise, get its maximum length. */
436 get_attr_length (rtx insn)
438 return get_attr_length_1 (insn, insn_default_length);
441 /* Obtain the current length of an insn. If branch shortening has been done,
442 get its actual length. Otherwise, get its minimum length. */
444 get_attr_min_length (rtx insn)
446 return get_attr_length_1 (insn, insn_min_length);
449 /* Code to handle alignment inside shorten_branches. */
451 /* Here is an explanation how the algorithm in align_fuzz can give
452 proper results:
454 Call a sequence of instructions beginning with alignment point X
455 and continuing until the next alignment point `block X'. When `X'
456 is used in an expression, it means the alignment value of the
457 alignment point.
459 Call the distance between the start of the first insn of block X, and
460 the end of the last insn of block X `IX', for the `inner size of X'.
461 This is clearly the sum of the instruction lengths.
463 Likewise with the next alignment-delimited block following X, which we
464 shall call block Y.
466 Call the distance between the start of the first insn of block X, and
467 the start of the first insn of block Y `OX', for the `outer size of X'.
469 The estimated padding is then OX - IX.
471 OX can be safely estimated as
473 if (X >= Y)
474 OX = round_up(IX, Y)
475 else
476 OX = round_up(IX, X) + Y - X
478 Clearly est(IX) >= real(IX), because that only depends on the
479 instruction lengths, and those being overestimated is a given.
481 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
482 we needn't worry about that when thinking about OX.
484 When X >= Y, the alignment provided by Y adds no uncertainty factor
485 for branch ranges starting before X, so we can just round what we have.
486 But when X < Y, we don't know anything about the, so to speak,
487 `middle bits', so we have to assume the worst when aligning up from an
488 address mod X to one mod Y, which is Y - X. */
490 #ifndef LABEL_ALIGN
491 #define LABEL_ALIGN(LABEL) align_labels_log
492 #endif
494 #ifndef LOOP_ALIGN
495 #define LOOP_ALIGN(LABEL) align_loops_log
496 #endif
498 #ifndef LABEL_ALIGN_AFTER_BARRIER
499 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
500 #endif
502 #ifndef JUMP_ALIGN
503 #define JUMP_ALIGN(LABEL) align_jumps_log
504 #endif
507 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
509 return 0;
513 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
515 return align_loops_max_skip;
519 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
521 return align_labels_max_skip;
525 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
527 return align_jumps_max_skip;
530 #ifndef ADDR_VEC_ALIGN
531 static int
532 final_addr_vec_align (rtx addr_vec)
534 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
536 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
537 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
538 return exact_log2 (align);
542 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
543 #endif
545 #ifndef INSN_LENGTH_ALIGNMENT
546 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
547 #endif
549 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
551 static int min_labelno, max_labelno;
553 #define LABEL_TO_ALIGNMENT(LABEL) \
554 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
556 #define LABEL_TO_MAX_SKIP(LABEL) \
557 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
559 /* For the benefit of port specific code do this also as a function. */
562 label_to_alignment (rtx label)
564 if (CODE_LABEL_NUMBER (label) <= max_labelno)
565 return LABEL_TO_ALIGNMENT (label);
566 return 0;
570 label_to_max_skip (rtx label)
572 if (CODE_LABEL_NUMBER (label) <= max_labelno)
573 return LABEL_TO_MAX_SKIP (label);
574 return 0;
577 /* The differences in addresses
578 between a branch and its target might grow or shrink depending on
579 the alignment the start insn of the range (the branch for a forward
580 branch or the label for a backward branch) starts out on; if these
581 differences are used naively, they can even oscillate infinitely.
582 We therefore want to compute a 'worst case' address difference that
583 is independent of the alignment the start insn of the range end
584 up on, and that is at least as large as the actual difference.
585 The function align_fuzz calculates the amount we have to add to the
586 naively computed difference, by traversing the part of the alignment
587 chain of the start insn of the range that is in front of the end insn
588 of the range, and considering for each alignment the maximum amount
589 that it might contribute to a size increase.
591 For casesi tables, we also want to know worst case minimum amounts of
592 address difference, in case a machine description wants to introduce
593 some common offset that is added to all offsets in a table.
594 For this purpose, align_fuzz with a growth argument of 0 computes the
595 appropriate adjustment. */
597 /* Compute the maximum delta by which the difference of the addresses of
598 START and END might grow / shrink due to a different address for start
599 which changes the size of alignment insns between START and END.
600 KNOWN_ALIGN_LOG is the alignment known for START.
601 GROWTH should be ~0 if the objective is to compute potential code size
602 increase, and 0 if the objective is to compute potential shrink.
603 The return value is undefined for any other value of GROWTH. */
605 static int
606 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
608 int uid = INSN_UID (start);
609 rtx align_label;
610 int known_align = 1 << known_align_log;
611 int end_shuid = INSN_SHUID (end);
612 int fuzz = 0;
614 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
616 int align_addr, new_align;
618 uid = INSN_UID (align_label);
619 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
620 if (uid_shuid[uid] > end_shuid)
621 break;
622 known_align_log = LABEL_TO_ALIGNMENT (align_label);
623 new_align = 1 << known_align_log;
624 if (new_align < known_align)
625 continue;
626 fuzz += (-align_addr ^ growth) & (new_align - known_align);
627 known_align = new_align;
629 return fuzz;
632 /* Compute a worst-case reference address of a branch so that it
633 can be safely used in the presence of aligned labels. Since the
634 size of the branch itself is unknown, the size of the branch is
635 not included in the range. I.e. for a forward branch, the reference
636 address is the end address of the branch as known from the previous
637 branch shortening pass, minus a value to account for possible size
638 increase due to alignment. For a backward branch, it is the start
639 address of the branch as known from the current pass, plus a value
640 to account for possible size increase due to alignment.
641 NB.: Therefore, the maximum offset allowed for backward branches needs
642 to exclude the branch size. */
645 insn_current_reference_address (rtx branch)
647 rtx dest, seq;
648 int seq_uid;
650 if (! INSN_ADDRESSES_SET_P ())
651 return 0;
653 seq = NEXT_INSN (PREV_INSN (branch));
654 seq_uid = INSN_UID (seq);
655 if (!JUMP_P (branch))
656 /* This can happen for example on the PA; the objective is to know the
657 offset to address something in front of the start of the function.
658 Thus, we can treat it like a backward branch.
659 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
660 any alignment we'd encounter, so we skip the call to align_fuzz. */
661 return insn_current_address;
662 dest = JUMP_LABEL (branch);
664 /* BRANCH has no proper alignment chain set, so use SEQ.
665 BRANCH also has no INSN_SHUID. */
666 if (INSN_SHUID (seq) < INSN_SHUID (dest))
668 /* Forward branch. */
669 return (insn_last_address + insn_lengths[seq_uid]
670 - align_fuzz (seq, dest, length_unit_log, ~0));
672 else
674 /* Backward branch. */
675 return (insn_current_address
676 + align_fuzz (dest, seq, length_unit_log, ~0));
680 /* Compute branch alignments based on frequency information in the
681 CFG. */
683 unsigned int
684 compute_alignments (void)
686 int log, max_skip, max_log;
687 basic_block bb;
688 int freq_max = 0;
689 int freq_threshold = 0;
691 if (label_align)
693 free (label_align);
694 label_align = 0;
697 max_labelno = max_label_num ();
698 min_labelno = get_first_label_num ();
699 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
701 /* If not optimizing or optimizing for size, don't assign any alignments. */
702 if (! optimize || optimize_function_for_size_p (cfun))
703 return 0;
705 if (dump_file)
707 dump_reg_info (dump_file);
708 dump_flow_info (dump_file, TDF_DETAILS);
709 flow_loops_dump (dump_file, NULL, 1);
711 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
712 FOR_EACH_BB (bb)
713 if (bb->frequency > freq_max)
714 freq_max = bb->frequency;
715 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
717 if (dump_file)
718 fprintf(dump_file, "freq_max: %i\n",freq_max);
719 FOR_EACH_BB (bb)
721 rtx label = BB_HEAD (bb);
722 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
723 edge e;
724 edge_iterator ei;
726 if (!LABEL_P (label)
727 || optimize_bb_for_size_p (bb))
729 if (dump_file)
730 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
731 bb->index, bb->frequency, bb->loop_father->num,
732 bb_loop_depth (bb));
733 continue;
735 max_log = LABEL_ALIGN (label);
736 max_skip = targetm.asm_out.label_align_max_skip (label);
738 FOR_EACH_EDGE (e, ei, bb->preds)
740 if (e->flags & EDGE_FALLTHRU)
741 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
742 else
743 branch_frequency += EDGE_FREQUENCY (e);
745 if (dump_file)
747 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
748 bb->index, bb->frequency, bb->loop_father->num,
749 bb_loop_depth (bb),
750 fallthru_frequency, branch_frequency);
751 if (!bb->loop_father->inner && bb->loop_father->num)
752 fprintf (dump_file, " inner_loop");
753 if (bb->loop_father->header == bb)
754 fprintf (dump_file, " loop_header");
755 fprintf (dump_file, "\n");
758 /* There are two purposes to align block with no fallthru incoming edge:
759 1) to avoid fetch stalls when branch destination is near cache boundary
760 2) to improve cache efficiency in case the previous block is not executed
761 (so it does not need to be in the cache).
763 We to catch first case, we align frequently executed blocks.
764 To catch the second, we align blocks that are executed more frequently
765 than the predecessor and the predecessor is likely to not be executed
766 when function is called. */
768 if (!has_fallthru
769 && (branch_frequency > freq_threshold
770 || (bb->frequency > bb->prev_bb->frequency * 10
771 && (bb->prev_bb->frequency
772 <= ENTRY_BLOCK_PTR->frequency / 2))))
774 log = JUMP_ALIGN (label);
775 if (dump_file)
776 fprintf(dump_file, " jump alignment added.\n");
777 if (max_log < log)
779 max_log = log;
780 max_skip = targetm.asm_out.jump_align_max_skip (label);
783 /* In case block is frequent and reached mostly by non-fallthru edge,
784 align it. It is most likely a first block of loop. */
785 if (has_fallthru
786 && optimize_bb_for_speed_p (bb)
787 && branch_frequency + fallthru_frequency > freq_threshold
788 && (branch_frequency
789 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
791 log = LOOP_ALIGN (label);
792 if (dump_file)
793 fprintf(dump_file, " internal loop alignment added.\n");
794 if (max_log < log)
796 max_log = log;
797 max_skip = targetm.asm_out.loop_align_max_skip (label);
800 LABEL_TO_ALIGNMENT (label) = max_log;
801 LABEL_TO_MAX_SKIP (label) = max_skip;
804 loop_optimizer_finalize ();
805 free_dominance_info (CDI_DOMINATORS);
806 return 0;
809 struct rtl_opt_pass pass_compute_alignments =
812 RTL_PASS,
813 "alignments", /* name */
814 OPTGROUP_NONE, /* optinfo_flags */
815 NULL, /* gate */
816 compute_alignments, /* execute */
817 NULL, /* sub */
818 NULL, /* next */
819 0, /* static_pass_number */
820 TV_NONE, /* tv_id */
821 0, /* properties_required */
822 0, /* properties_provided */
823 0, /* properties_destroyed */
824 0, /* todo_flags_start */
825 TODO_verify_rtl_sharing
826 | TODO_ggc_collect /* todo_flags_finish */
831 /* Make a pass over all insns and compute their actual lengths by shortening
832 any branches of variable length if possible. */
834 /* shorten_branches might be called multiple times: for example, the SH
835 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
836 In order to do this, it needs proper length information, which it obtains
837 by calling shorten_branches. This cannot be collapsed with
838 shorten_branches itself into a single pass unless we also want to integrate
839 reorg.c, since the branch splitting exposes new instructions with delay
840 slots. */
842 void
843 shorten_branches (rtx first)
845 rtx insn;
846 int max_uid;
847 int i;
848 int max_log;
849 int max_skip;
850 #define MAX_CODE_ALIGN 16
851 rtx seq;
852 int something_changed = 1;
853 char *varying_length;
854 rtx body;
855 int uid;
856 rtx align_tab[MAX_CODE_ALIGN];
858 /* Compute maximum UID and allocate label_align / uid_shuid. */
859 max_uid = get_max_uid ();
861 /* Free uid_shuid before reallocating it. */
862 free (uid_shuid);
864 uid_shuid = XNEWVEC (int, max_uid);
866 if (max_labelno != max_label_num ())
868 int old = max_labelno;
869 int n_labels;
870 int n_old_labels;
872 max_labelno = max_label_num ();
874 n_labels = max_labelno - min_labelno + 1;
875 n_old_labels = old - min_labelno + 1;
877 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
879 /* Range of labels grows monotonically in the function. Failing here
880 means that the initialization of array got lost. */
881 gcc_assert (n_old_labels <= n_labels);
883 memset (label_align + n_old_labels, 0,
884 (n_labels - n_old_labels) * sizeof (struct label_alignment));
887 /* Initialize label_align and set up uid_shuid to be strictly
888 monotonically rising with insn order. */
889 /* We use max_log here to keep track of the maximum alignment we want to
890 impose on the next CODE_LABEL (or the current one if we are processing
891 the CODE_LABEL itself). */
893 max_log = 0;
894 max_skip = 0;
896 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
898 int log;
900 INSN_SHUID (insn) = i++;
901 if (INSN_P (insn))
902 continue;
904 if (LABEL_P (insn))
906 rtx next;
907 bool next_is_jumptable;
909 /* Merge in alignments computed by compute_alignments. */
910 log = LABEL_TO_ALIGNMENT (insn);
911 if (max_log < log)
913 max_log = log;
914 max_skip = LABEL_TO_MAX_SKIP (insn);
917 next = next_nonnote_insn (insn);
918 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
919 if (!next_is_jumptable)
921 log = LABEL_ALIGN (insn);
922 if (max_log < log)
924 max_log = log;
925 max_skip = targetm.asm_out.label_align_max_skip (insn);
928 /* ADDR_VECs only take room if read-only data goes into the text
929 section. */
930 if ((JUMP_TABLES_IN_TEXT_SECTION
931 || readonly_data_section == text_section)
932 && next_is_jumptable)
934 log = ADDR_VEC_ALIGN (next);
935 if (max_log < log)
937 max_log = log;
938 max_skip = targetm.asm_out.label_align_max_skip (insn);
941 LABEL_TO_ALIGNMENT (insn) = max_log;
942 LABEL_TO_MAX_SKIP (insn) = max_skip;
943 max_log = 0;
944 max_skip = 0;
946 else if (BARRIER_P (insn))
948 rtx label;
950 for (label = insn; label && ! INSN_P (label);
951 label = NEXT_INSN (label))
952 if (LABEL_P (label))
954 log = LABEL_ALIGN_AFTER_BARRIER (insn);
955 if (max_log < log)
957 max_log = log;
958 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
960 break;
964 if (!HAVE_ATTR_length)
965 return;
967 /* Allocate the rest of the arrays. */
968 insn_lengths = XNEWVEC (int, max_uid);
969 insn_lengths_max_uid = max_uid;
970 /* Syntax errors can lead to labels being outside of the main insn stream.
971 Initialize insn_addresses, so that we get reproducible results. */
972 INSN_ADDRESSES_ALLOC (max_uid);
974 varying_length = XCNEWVEC (char, max_uid);
976 /* Initialize uid_align. We scan instructions
977 from end to start, and keep in align_tab[n] the last seen insn
978 that does an alignment of at least n+1, i.e. the successor
979 in the alignment chain for an insn that does / has a known
980 alignment of n. */
981 uid_align = XCNEWVEC (rtx, max_uid);
983 for (i = MAX_CODE_ALIGN; --i >= 0;)
984 align_tab[i] = NULL_RTX;
985 seq = get_last_insn ();
986 for (; seq; seq = PREV_INSN (seq))
988 int uid = INSN_UID (seq);
989 int log;
990 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
991 uid_align[uid] = align_tab[0];
992 if (log)
994 /* Found an alignment label. */
995 uid_align[uid] = align_tab[log];
996 for (i = log - 1; i >= 0; i--)
997 align_tab[i] = seq;
1001 /* When optimizing, we start assuming minimum length, and keep increasing
1002 lengths as we find the need for this, till nothing changes.
1003 When not optimizing, we start assuming maximum lengths, and
1004 do a single pass to update the lengths. */
1005 bool increasing = optimize != 0;
1007 #ifdef CASE_VECTOR_SHORTEN_MODE
1008 if (optimize)
1010 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1011 label fields. */
1013 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1014 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1015 int rel;
1017 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1019 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1020 int len, i, min, max, insn_shuid;
1021 int min_align;
1022 addr_diff_vec_flags flags;
1024 if (!JUMP_P (insn)
1025 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1026 continue;
1027 pat = PATTERN (insn);
1028 len = XVECLEN (pat, 1);
1029 gcc_assert (len > 0);
1030 min_align = MAX_CODE_ALIGN;
1031 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1033 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1034 int shuid = INSN_SHUID (lab);
1035 if (shuid < min)
1037 min = shuid;
1038 min_lab = lab;
1040 if (shuid > max)
1042 max = shuid;
1043 max_lab = lab;
1045 if (min_align > LABEL_TO_ALIGNMENT (lab))
1046 min_align = LABEL_TO_ALIGNMENT (lab);
1048 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1049 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1050 insn_shuid = INSN_SHUID (insn);
1051 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1052 memset (&flags, 0, sizeof (flags));
1053 flags.min_align = min_align;
1054 flags.base_after_vec = rel > insn_shuid;
1055 flags.min_after_vec = min > insn_shuid;
1056 flags.max_after_vec = max > insn_shuid;
1057 flags.min_after_base = min > rel;
1058 flags.max_after_base = max > rel;
1059 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1061 if (increasing)
1062 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1065 #endif /* CASE_VECTOR_SHORTEN_MODE */
1067 /* Compute initial lengths, addresses, and varying flags for each insn. */
1068 int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
1070 for (insn_current_address = 0, insn = first;
1071 insn != 0;
1072 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1074 uid = INSN_UID (insn);
1076 insn_lengths[uid] = 0;
1078 if (LABEL_P (insn))
1080 int log = LABEL_TO_ALIGNMENT (insn);
1081 if (log)
1083 int align = 1 << log;
1084 int new_address = (insn_current_address + align - 1) & -align;
1085 insn_lengths[uid] = new_address - insn_current_address;
1089 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1091 if (NOTE_P (insn) || BARRIER_P (insn)
1092 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1093 continue;
1094 if (INSN_DELETED_P (insn))
1095 continue;
1097 body = PATTERN (insn);
1098 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1100 /* This only takes room if read-only data goes into the text
1101 section. */
1102 if (JUMP_TABLES_IN_TEXT_SECTION
1103 || readonly_data_section == text_section)
1104 insn_lengths[uid] = (XVECLEN (body,
1105 GET_CODE (body) == ADDR_DIFF_VEC)
1106 * GET_MODE_SIZE (GET_MODE (body)));
1107 /* Alignment is handled by ADDR_VEC_ALIGN. */
1109 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1110 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1111 else if (GET_CODE (body) == SEQUENCE)
1113 int i;
1114 int const_delay_slots;
1115 #ifdef DELAY_SLOTS
1116 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1117 #else
1118 const_delay_slots = 0;
1119 #endif
1120 int (*inner_length_fun) (rtx)
1121 = const_delay_slots ? length_fun : insn_default_length;
1122 /* Inside a delay slot sequence, we do not do any branch shortening
1123 if the shortening could change the number of delay slots
1124 of the branch. */
1125 for (i = 0; i < XVECLEN (body, 0); i++)
1127 rtx inner_insn = XVECEXP (body, 0, i);
1128 int inner_uid = INSN_UID (inner_insn);
1129 int inner_length;
1131 if (GET_CODE (body) == ASM_INPUT
1132 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1133 inner_length = (asm_insn_count (PATTERN (inner_insn))
1134 * insn_default_length (inner_insn));
1135 else
1136 inner_length = inner_length_fun (inner_insn);
1138 insn_lengths[inner_uid] = inner_length;
1139 if (const_delay_slots)
1141 if ((varying_length[inner_uid]
1142 = insn_variable_length_p (inner_insn)) != 0)
1143 varying_length[uid] = 1;
1144 INSN_ADDRESSES (inner_uid) = (insn_current_address
1145 + insn_lengths[uid]);
1147 else
1148 varying_length[inner_uid] = 0;
1149 insn_lengths[uid] += inner_length;
1152 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1154 insn_lengths[uid] = length_fun (insn);
1155 varying_length[uid] = insn_variable_length_p (insn);
1158 /* If needed, do any adjustment. */
1159 #ifdef ADJUST_INSN_LENGTH
1160 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1161 if (insn_lengths[uid] < 0)
1162 fatal_insn ("negative insn length", insn);
1163 #endif
1166 /* Now loop over all the insns finding varying length insns. For each,
1167 get the current insn length. If it has changed, reflect the change.
1168 When nothing changes for a full pass, we are done. */
1170 while (something_changed)
1172 something_changed = 0;
1173 insn_current_align = MAX_CODE_ALIGN - 1;
1174 for (insn_current_address = 0, insn = first;
1175 insn != 0;
1176 insn = NEXT_INSN (insn))
1178 int new_length;
1179 #ifdef ADJUST_INSN_LENGTH
1180 int tmp_length;
1181 #endif
1182 int length_align;
1184 uid = INSN_UID (insn);
1186 if (LABEL_P (insn))
1188 int log = LABEL_TO_ALIGNMENT (insn);
1190 #ifdef CASE_VECTOR_SHORTEN_MODE
1191 /* If the mode of a following jump table was changed, we
1192 may need to update the alignment of this label. */
1193 rtx next;
1194 bool next_is_jumptable;
1196 next = next_nonnote_insn (insn);
1197 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1198 if ((JUMP_TABLES_IN_TEXT_SECTION
1199 || readonly_data_section == text_section)
1200 && next_is_jumptable)
1202 int newlog = ADDR_VEC_ALIGN (next);
1203 if (newlog != log)
1205 log = newlog;
1206 LABEL_TO_ALIGNMENT (insn) = log;
1207 something_changed = 1;
1210 #endif
1212 if (log > insn_current_align)
1214 int align = 1 << log;
1215 int new_address= (insn_current_address + align - 1) & -align;
1216 insn_lengths[uid] = new_address - insn_current_address;
1217 insn_current_align = log;
1218 insn_current_address = new_address;
1220 else
1221 insn_lengths[uid] = 0;
1222 INSN_ADDRESSES (uid) = insn_current_address;
1223 continue;
1226 length_align = INSN_LENGTH_ALIGNMENT (insn);
1227 if (length_align < insn_current_align)
1228 insn_current_align = length_align;
1230 insn_last_address = INSN_ADDRESSES (uid);
1231 INSN_ADDRESSES (uid) = insn_current_address;
1233 #ifdef CASE_VECTOR_SHORTEN_MODE
1234 if (optimize && JUMP_P (insn)
1235 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1237 rtx body = PATTERN (insn);
1238 int old_length = insn_lengths[uid];
1239 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1240 rtx min_lab = XEXP (XEXP (body, 2), 0);
1241 rtx max_lab = XEXP (XEXP (body, 3), 0);
1242 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1243 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1244 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1245 rtx prev;
1246 int rel_align = 0;
1247 addr_diff_vec_flags flags;
1248 enum machine_mode vec_mode;
1250 /* Avoid automatic aggregate initialization. */
1251 flags = ADDR_DIFF_VEC_FLAGS (body);
1253 /* Try to find a known alignment for rel_lab. */
1254 for (prev = rel_lab;
1255 prev
1256 && ! insn_lengths[INSN_UID (prev)]
1257 && ! (varying_length[INSN_UID (prev)] & 1);
1258 prev = PREV_INSN (prev))
1259 if (varying_length[INSN_UID (prev)] & 2)
1261 rel_align = LABEL_TO_ALIGNMENT (prev);
1262 break;
1265 /* See the comment on addr_diff_vec_flags in rtl.h for the
1266 meaning of the flags values. base: REL_LAB vec: INSN */
1267 /* Anything after INSN has still addresses from the last
1268 pass; adjust these so that they reflect our current
1269 estimate for this pass. */
1270 if (flags.base_after_vec)
1271 rel_addr += insn_current_address - insn_last_address;
1272 if (flags.min_after_vec)
1273 min_addr += insn_current_address - insn_last_address;
1274 if (flags.max_after_vec)
1275 max_addr += insn_current_address - insn_last_address;
1276 /* We want to know the worst case, i.e. lowest possible value
1277 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1278 its offset is positive, and we have to be wary of code shrink;
1279 otherwise, it is negative, and we have to be vary of code
1280 size increase. */
1281 if (flags.min_after_base)
1283 /* If INSN is between REL_LAB and MIN_LAB, the size
1284 changes we are about to make can change the alignment
1285 within the observed offset, therefore we have to break
1286 it up into two parts that are independent. */
1287 if (! flags.base_after_vec && flags.min_after_vec)
1289 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1290 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1292 else
1293 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1295 else
1297 if (flags.base_after_vec && ! flags.min_after_vec)
1299 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1300 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1302 else
1303 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1305 /* Likewise, determine the highest lowest possible value
1306 for the offset of MAX_LAB. */
1307 if (flags.max_after_base)
1309 if (! flags.base_after_vec && flags.max_after_vec)
1311 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1312 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1314 else
1315 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1317 else
1319 if (flags.base_after_vec && ! flags.max_after_vec)
1321 max_addr += align_fuzz (max_lab, insn, 0, 0);
1322 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1324 else
1325 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1327 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1328 max_addr - rel_addr, body);
1329 if (!increasing
1330 || (GET_MODE_SIZE (vec_mode)
1331 >= GET_MODE_SIZE (GET_MODE (body))))
1332 PUT_MODE (body, vec_mode);
1333 if (JUMP_TABLES_IN_TEXT_SECTION
1334 || readonly_data_section == text_section)
1336 insn_lengths[uid]
1337 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1338 insn_current_address += insn_lengths[uid];
1339 if (insn_lengths[uid] != old_length)
1340 something_changed = 1;
1343 continue;
1345 #endif /* CASE_VECTOR_SHORTEN_MODE */
1347 if (! (varying_length[uid]))
1349 if (NONJUMP_INSN_P (insn)
1350 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1352 int i;
1354 body = PATTERN (insn);
1355 for (i = 0; i < XVECLEN (body, 0); i++)
1357 rtx inner_insn = XVECEXP (body, 0, i);
1358 int inner_uid = INSN_UID (inner_insn);
1360 INSN_ADDRESSES (inner_uid) = insn_current_address;
1362 insn_current_address += insn_lengths[inner_uid];
1365 else
1366 insn_current_address += insn_lengths[uid];
1368 continue;
1371 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1373 int i;
1375 body = PATTERN (insn);
1376 new_length = 0;
1377 for (i = 0; i < XVECLEN (body, 0); i++)
1379 rtx inner_insn = XVECEXP (body, 0, i);
1380 int inner_uid = INSN_UID (inner_insn);
1381 int inner_length;
1383 INSN_ADDRESSES (inner_uid) = insn_current_address;
1385 /* insn_current_length returns 0 for insns with a
1386 non-varying length. */
1387 if (! varying_length[inner_uid])
1388 inner_length = insn_lengths[inner_uid];
1389 else
1390 inner_length = insn_current_length (inner_insn);
1392 if (inner_length != insn_lengths[inner_uid])
1394 if (!increasing || inner_length > insn_lengths[inner_uid])
1396 insn_lengths[inner_uid] = inner_length;
1397 something_changed = 1;
1399 else
1400 inner_length = insn_lengths[inner_uid];
1402 insn_current_address += inner_length;
1403 new_length += inner_length;
1406 else
1408 new_length = insn_current_length (insn);
1409 insn_current_address += new_length;
1412 #ifdef ADJUST_INSN_LENGTH
1413 /* If needed, do any adjustment. */
1414 tmp_length = new_length;
1415 ADJUST_INSN_LENGTH (insn, new_length);
1416 insn_current_address += (new_length - tmp_length);
1417 #endif
1419 if (new_length != insn_lengths[uid]
1420 && (!increasing || new_length > insn_lengths[uid]))
1422 insn_lengths[uid] = new_length;
1423 something_changed = 1;
1425 else
1426 insn_current_address += insn_lengths[uid] - new_length;
1428 /* For a non-optimizing compile, do only a single pass. */
1429 if (!increasing)
1430 break;
1433 free (varying_length);
1436 /* Given the body of an INSN known to be generated by an ASM statement, return
1437 the number of machine instructions likely to be generated for this insn.
1438 This is used to compute its length. */
1440 static int
1441 asm_insn_count (rtx body)
1443 const char *templ;
1445 if (GET_CODE (body) == ASM_INPUT)
1446 templ = XSTR (body, 0);
1447 else
1448 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1450 return asm_str_count (templ);
1453 /* Return the number of machine instructions likely to be generated for the
1454 inline-asm template. */
1456 asm_str_count (const char *templ)
1458 int count = 1;
1460 if (!*templ)
1461 return 0;
1463 for (; *templ; templ++)
1464 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1465 || *templ == '\n')
1466 count++;
1468 return count;
1471 /* ??? This is probably the wrong place for these. */
1472 /* Structure recording the mapping from source file and directory
1473 names at compile time to those to be embedded in debug
1474 information. */
1475 typedef struct debug_prefix_map
1477 const char *old_prefix;
1478 const char *new_prefix;
1479 size_t old_len;
1480 size_t new_len;
1481 struct debug_prefix_map *next;
1482 } debug_prefix_map;
1484 /* Linked list of such structures. */
1485 debug_prefix_map *debug_prefix_maps;
1488 /* Record a debug file prefix mapping. ARG is the argument to
1489 -fdebug-prefix-map and must be of the form OLD=NEW. */
1491 void
1492 add_debug_prefix_map (const char *arg)
1494 debug_prefix_map *map;
1495 const char *p;
1497 p = strchr (arg, '=');
1498 if (!p)
1500 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1501 return;
1503 map = XNEW (debug_prefix_map);
1504 map->old_prefix = xstrndup (arg, p - arg);
1505 map->old_len = p - arg;
1506 p++;
1507 map->new_prefix = xstrdup (p);
1508 map->new_len = strlen (p);
1509 map->next = debug_prefix_maps;
1510 debug_prefix_maps = map;
1513 /* Perform user-specified mapping of debug filename prefixes. Return
1514 the new name corresponding to FILENAME. */
1516 const char *
1517 remap_debug_filename (const char *filename)
1519 debug_prefix_map *map;
1520 char *s;
1521 const char *name;
1522 size_t name_len;
1524 for (map = debug_prefix_maps; map; map = map->next)
1525 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1526 break;
1527 if (!map)
1528 return filename;
1529 name = filename + map->old_len;
1530 name_len = strlen (name) + 1;
1531 s = (char *) alloca (name_len + map->new_len);
1532 memcpy (s, map->new_prefix, map->new_len);
1533 memcpy (s + map->new_len, name, name_len);
1534 return ggc_strdup (s);
1537 /* Return true if DWARF2 debug info can be emitted for DECL. */
1539 static bool
1540 dwarf2_debug_info_emitted_p (tree decl)
1542 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1543 return false;
1545 if (DECL_IGNORED_P (decl))
1546 return false;
1548 return true;
1551 /* Return scope resulting from combination of S1 and S2. */
1552 static tree
1553 choose_inner_scope (tree s1, tree s2)
1555 if (!s1)
1556 return s2;
1557 if (!s2)
1558 return s1;
1559 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1560 return s1;
1561 return s2;
1564 /* Emit lexical block notes needed to change scope from S1 to S2. */
1566 static void
1567 change_scope (rtx orig_insn, tree s1, tree s2)
1569 rtx insn = orig_insn;
1570 tree com = NULL_TREE;
1571 tree ts1 = s1, ts2 = s2;
1572 tree s;
1574 while (ts1 != ts2)
1576 gcc_assert (ts1 && ts2);
1577 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1578 ts1 = BLOCK_SUPERCONTEXT (ts1);
1579 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1580 ts2 = BLOCK_SUPERCONTEXT (ts2);
1581 else
1583 ts1 = BLOCK_SUPERCONTEXT (ts1);
1584 ts2 = BLOCK_SUPERCONTEXT (ts2);
1587 com = ts1;
1589 /* Close scopes. */
1590 s = s1;
1591 while (s != com)
1593 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1594 NOTE_BLOCK (note) = s;
1595 s = BLOCK_SUPERCONTEXT (s);
1598 /* Open scopes. */
1599 s = s2;
1600 while (s != com)
1602 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1603 NOTE_BLOCK (insn) = s;
1604 s = BLOCK_SUPERCONTEXT (s);
1608 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1609 on the scope tree and the newly reordered instructions. */
1611 static void
1612 reemit_insn_block_notes (void)
1614 tree cur_block = DECL_INITIAL (cfun->decl);
1615 rtx insn, note;
1617 insn = get_insns ();
1618 for (; insn; insn = NEXT_INSN (insn))
1620 tree this_block;
1622 /* Prevent lexical blocks from straddling section boundaries. */
1623 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1625 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1626 s = BLOCK_SUPERCONTEXT (s))
1628 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1629 NOTE_BLOCK (note) = s;
1630 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1631 NOTE_BLOCK (note) = s;
1635 if (!active_insn_p (insn))
1636 continue;
1638 /* Avoid putting scope notes between jump table and its label. */
1639 if (JUMP_TABLE_DATA_P (insn))
1640 continue;
1642 this_block = insn_scope (insn);
1643 /* For sequences compute scope resulting from merging all scopes
1644 of instructions nested inside. */
1645 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1647 int i;
1648 rtx body = PATTERN (insn);
1650 this_block = NULL;
1651 for (i = 0; i < XVECLEN (body, 0); i++)
1652 this_block = choose_inner_scope (this_block,
1653 insn_scope (XVECEXP (body, 0, i)));
1655 if (! this_block)
1657 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1658 continue;
1659 else
1660 this_block = DECL_INITIAL (cfun->decl);
1663 if (this_block != cur_block)
1665 change_scope (insn, cur_block, this_block);
1666 cur_block = this_block;
1670 /* change_scope emits before the insn, not after. */
1671 note = emit_note (NOTE_INSN_DELETED);
1672 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1673 delete_insn (note);
1675 reorder_blocks ();
1678 /* Output assembler code for the start of a function,
1679 and initialize some of the variables in this file
1680 for the new function. The label for the function and associated
1681 assembler pseudo-ops have already been output in `assemble_start_function'.
1683 FIRST is the first insn of the rtl for the function being compiled.
1684 FILE is the file to write assembler code to.
1685 OPTIMIZE_P is nonzero if we should eliminate redundant
1686 test and compare insns. */
1688 void
1689 final_start_function (rtx first, FILE *file,
1690 int optimize_p ATTRIBUTE_UNUSED)
1692 block_depth = 0;
1694 this_is_asm_operands = 0;
1696 need_profile_function = false;
1698 last_filename = LOCATION_FILE (prologue_location);
1699 last_linenum = LOCATION_LINE (prologue_location);
1700 last_discriminator = 0;
1702 high_block_linenum = high_function_linenum = last_linenum;
1704 if (!DECL_IGNORED_P (current_function_decl))
1705 debug_hooks->begin_prologue (last_linenum, last_filename);
1707 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1708 dwarf2out_begin_prologue (0, NULL);
1710 #ifdef LEAF_REG_REMAP
1711 if (crtl->uses_only_leaf_regs)
1712 leaf_renumber_regs (first);
1713 #endif
1715 /* The Sun386i and perhaps other machines don't work right
1716 if the profiling code comes after the prologue. */
1717 if (targetm.profile_before_prologue () && crtl->profile)
1719 if (targetm.asm_out.function_prologue
1720 == default_function_pro_epilogue
1721 #ifdef HAVE_prologue
1722 && HAVE_prologue
1723 #endif
1726 rtx insn;
1727 for (insn = first; insn; insn = NEXT_INSN (insn))
1728 if (!NOTE_P (insn))
1730 insn = NULL_RTX;
1731 break;
1733 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1734 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1735 break;
1736 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1737 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1738 continue;
1739 else
1741 insn = NULL_RTX;
1742 break;
1745 if (insn)
1746 need_profile_function = true;
1747 else
1748 profile_function (file);
1750 else
1751 profile_function (file);
1754 /* If debugging, assign block numbers to all of the blocks in this
1755 function. */
1756 if (write_symbols)
1758 reemit_insn_block_notes ();
1759 number_blocks (current_function_decl);
1760 /* We never actually put out begin/end notes for the top-level
1761 block in the function. But, conceptually, that block is
1762 always needed. */
1763 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1766 if (warn_frame_larger_than
1767 && get_frame_size () > frame_larger_than_size)
1769 /* Issue a warning. (WARN_FRAME_LARGER_THAN_EXTRA_TEXT is
1770 provided by configuration. The way extra text is added
1771 here may prevent localization from working properly.
1772 It's totally broken.) */
1773 warning (OPT_Wframe_larger_than_,
1774 "the frame size of %wd bytes is larger than %wd bytes"
1775 WARN_FRAME_LARGER_THAN_EXTRA_TEXT,
1776 get_frame_size (), frame_larger_than_size);
1779 /* First output the function prologue: code to set up the stack frame. */
1780 targetm.asm_out.function_prologue (file, get_frame_size ());
1782 /* If the machine represents the prologue as RTL, the profiling code must
1783 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1784 #ifdef HAVE_prologue
1785 if (! HAVE_prologue)
1786 #endif
1787 profile_after_prologue (file);
1790 static void
1791 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1793 if (!targetm.profile_before_prologue () && crtl->profile)
1794 profile_function (file);
1797 static void
1798 profile_function (FILE *file ATTRIBUTE_UNUSED)
1800 #ifndef NO_PROFILE_COUNTERS
1801 # define NO_PROFILE_COUNTERS 0
1802 #endif
1803 #ifdef ASM_OUTPUT_REG_PUSH
1804 rtx sval = NULL, chain = NULL;
1806 if (cfun->returns_struct)
1807 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1808 true);
1809 if (cfun->static_chain_decl)
1810 chain = targetm.calls.static_chain (current_function_decl, true);
1811 #endif /* ASM_OUTPUT_REG_PUSH */
1813 if (! NO_PROFILE_COUNTERS)
1815 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1816 switch_to_section (data_section);
1817 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1818 targetm.asm_out.internal_label (file, "LP", FUNC_LABEL_ID (cfun));
1819 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1822 switch_to_section (current_function_section ());
1824 #ifdef ASM_OUTPUT_REG_PUSH
1825 if (sval && REG_P (sval))
1826 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1827 if (chain && REG_P (chain))
1828 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1829 #endif
1831 FUNCTION_PROFILER (file, FUNC_LABEL_ID (cfun));
1833 #ifdef ASM_OUTPUT_REG_PUSH
1834 if (chain && REG_P (chain))
1835 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1836 if (sval && REG_P (sval))
1837 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1838 #endif
1841 /* Output assembler code for the end of a function.
1842 For clarity, args are same as those of `final_start_function'
1843 even though not all of them are needed. */
1845 void
1846 final_end_function (void)
1848 app_disable ();
1850 if (!DECL_IGNORED_P (current_function_decl))
1851 debug_hooks->end_function (high_function_linenum);
1853 /* Finally, output the function epilogue:
1854 code to restore the stack frame and return to the caller. */
1855 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1857 /* And debug output. */
1858 if (!DECL_IGNORED_P (current_function_decl))
1859 debug_hooks->end_epilogue (last_linenum, last_filename);
1861 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1862 && dwarf2out_do_frame ())
1863 dwarf2out_end_epilogue (last_linenum, last_filename);
1867 /* Dumper helper for basic block information. FILE is the assembly
1868 output file, and INSN is the instruction being emitted. */
1870 static void
1871 dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
1872 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1874 basic_block bb;
1876 if (!flag_debug_asm)
1877 return;
1879 if (INSN_UID (insn) < bb_map_size
1880 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1882 edge e;
1883 edge_iterator ei;
1885 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1886 if (bb->frequency)
1887 fprintf (file, " freq:%d", bb->frequency);
1888 if (bb->count)
1889 fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
1890 bb->count);
1891 fprintf (file, " seq:%d", (*bb_seqn)++);
1892 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1893 FOR_EACH_EDGE (e, ei, bb->preds)
1895 dump_edge_info (file, e, TDF_DETAILS, 0);
1897 fprintf (file, "\n");
1899 if (INSN_UID (insn) < bb_map_size
1900 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1902 edge e;
1903 edge_iterator ei;
1905 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1906 FOR_EACH_EDGE (e, ei, bb->succs)
1908 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1910 fprintf (file, "\n");
1914 /* Output assembler code for some insns: all or part of a function.
1915 For description of args, see `final_start_function', above. */
1917 void
1918 final (rtx first, FILE *file, int optimize_p)
1920 rtx insn, next;
1921 int seen = 0;
1923 /* Used for -dA dump. */
1924 basic_block *start_to_bb = NULL;
1925 basic_block *end_to_bb = NULL;
1926 int bb_map_size = 0;
1927 int bb_seqn = 0;
1929 last_ignored_compare = 0;
1931 #ifdef HAVE_cc0
1932 for (insn = first; insn; insn = NEXT_INSN (insn))
1934 /* If CC tracking across branches is enabled, record the insn which
1935 jumps to each branch only reached from one place. */
1936 if (optimize_p && JUMP_P (insn))
1938 rtx lab = JUMP_LABEL (insn);
1939 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
1941 LABEL_REFS (lab) = insn;
1945 #endif
1947 init_recog ();
1949 CC_STATUS_INIT;
1951 if (flag_debug_asm)
1953 basic_block bb;
1955 bb_map_size = get_max_uid () + 1;
1956 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1957 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1959 /* There is no cfg for a thunk. */
1960 if (!cfun->is_thunk)
1961 FOR_EACH_BB_REVERSE (bb)
1963 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1964 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1968 /* Output the insns. */
1969 for (insn = first; insn;)
1971 if (HAVE_ATTR_length)
1973 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1975 /* This can be triggered by bugs elsewhere in the compiler if
1976 new insns are created after init_insn_lengths is called. */
1977 gcc_assert (NOTE_P (insn));
1978 insn_current_address = -1;
1980 else
1981 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1984 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1985 bb_map_size, &bb_seqn);
1986 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1989 if (flag_debug_asm)
1991 free (start_to_bb);
1992 free (end_to_bb);
1995 /* Remove CFI notes, to avoid compare-debug failures. */
1996 for (insn = first; insn; insn = next)
1998 next = NEXT_INSN (insn);
1999 if (NOTE_P (insn)
2000 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2001 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2002 delete_insn (insn);
2006 const char *
2007 get_insn_template (int code, rtx insn)
2009 switch (insn_data[code].output_format)
2011 case INSN_OUTPUT_FORMAT_SINGLE:
2012 return insn_data[code].output.single;
2013 case INSN_OUTPUT_FORMAT_MULTI:
2014 return insn_data[code].output.multi[which_alternative];
2015 case INSN_OUTPUT_FORMAT_FUNCTION:
2016 gcc_assert (insn);
2017 return (*insn_data[code].output.function) (recog_data.operand, insn);
2019 default:
2020 gcc_unreachable ();
2024 /* Emit the appropriate declaration for an alternate-entry-point
2025 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2026 LABEL_KIND != LABEL_NORMAL.
2028 The case fall-through in this function is intentional. */
2029 static void
2030 output_alternate_entry_point (FILE *file, rtx insn)
2032 const char *name = LABEL_NAME (insn);
2034 switch (LABEL_KIND (insn))
2036 case LABEL_WEAK_ENTRY:
2037 #ifdef ASM_WEAKEN_LABEL
2038 ASM_WEAKEN_LABEL (file, name);
2039 #endif
2040 case LABEL_GLOBAL_ENTRY:
2041 targetm.asm_out.globalize_label (file, name);
2042 case LABEL_STATIC_ENTRY:
2043 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2044 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2045 #endif
2046 ASM_OUTPUT_LABEL (file, name);
2047 break;
2049 case LABEL_NORMAL:
2050 default:
2051 gcc_unreachable ();
2055 /* Given a CALL_INSN, find and return the nested CALL. */
2056 static rtx
2057 call_from_call_insn (rtx insn)
2059 rtx x;
2060 gcc_assert (CALL_P (insn));
2061 x = PATTERN (insn);
2063 while (GET_CODE (x) != CALL)
2065 switch (GET_CODE (x))
2067 default:
2068 gcc_unreachable ();
2069 case COND_EXEC:
2070 x = COND_EXEC_CODE (x);
2071 break;
2072 case PARALLEL:
2073 x = XVECEXP (x, 0, 0);
2074 break;
2075 case SET:
2076 x = XEXP (x, 1);
2077 break;
2080 return x;
2083 /* The final scan for one insn, INSN.
2084 Args are same as in `final', except that INSN
2085 is the insn being scanned.
2086 Value returned is the next insn to be scanned.
2088 NOPEEPHOLES is the flag to disallow peephole processing (currently
2089 used for within delayed branch sequence output).
2091 SEEN is used to track the end of the prologue, for emitting
2092 debug information. We force the emission of a line note after
2093 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
2094 at the beginning of the second basic block, whichever comes
2095 first. */
2098 final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2099 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2101 #ifdef HAVE_cc0
2102 rtx set;
2103 #endif
2104 rtx next;
2106 insn_counter++;
2108 /* Ignore deleted insns. These can occur when we split insns (due to a
2109 template of "#") while not optimizing. */
2110 if (INSN_DELETED_P (insn))
2111 return NEXT_INSN (insn);
2113 switch (GET_CODE (insn))
2115 case NOTE:
2116 switch (NOTE_KIND (insn))
2118 case NOTE_INSN_DELETED:
2119 break;
2121 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2122 in_cold_section_p = !in_cold_section_p;
2124 if (dwarf2out_do_frame ())
2125 dwarf2out_switch_text_section ();
2126 else if (!DECL_IGNORED_P (current_function_decl))
2127 debug_hooks->switch_text_section ();
2129 switch_to_section (current_function_section ());
2130 targetm.asm_out.function_switched_text_sections (asm_out_file,
2131 current_function_decl,
2132 in_cold_section_p);
2133 /* Emit a label for the split cold section. Form label name by
2134 suffixing "cold" to the original function's name. */
2135 if (in_cold_section_p)
2137 tree cold_function_name
2138 = clone_function_name (current_function_decl, "cold");
2139 ASM_OUTPUT_LABEL (asm_out_file,
2140 IDENTIFIER_POINTER (cold_function_name));
2142 has_cold_section_p = true;
2143 break;
2145 case NOTE_INSN_BASIC_BLOCK:
2146 if (need_profile_function)
2148 profile_function (asm_out_file);
2149 need_profile_function = false;
2152 if (targetm.asm_out.unwind_emit)
2153 targetm.asm_out.unwind_emit (asm_out_file, insn);
2155 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
2157 *seen |= SEEN_EMITTED;
2158 force_source_line = true;
2160 else
2161 *seen |= SEEN_BB;
2163 break;
2165 case NOTE_INSN_EH_REGION_BEG:
2166 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2167 NOTE_EH_HANDLER (insn));
2168 break;
2170 case NOTE_INSN_EH_REGION_END:
2171 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2172 NOTE_EH_HANDLER (insn));
2173 break;
2175 case NOTE_INSN_PROLOGUE_END:
2176 targetm.asm_out.function_end_prologue (file);
2177 profile_after_prologue (file);
2179 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2181 *seen |= SEEN_EMITTED;
2182 force_source_line = true;
2184 else
2185 *seen |= SEEN_NOTE;
2187 break;
2189 case NOTE_INSN_EPILOGUE_BEG:
2190 if (!DECL_IGNORED_P (current_function_decl))
2191 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2192 targetm.asm_out.function_begin_epilogue (file);
2193 break;
2195 case NOTE_INSN_CFI:
2196 dwarf2out_emit_cfi (NOTE_CFI (insn));
2197 break;
2199 case NOTE_INSN_CFI_LABEL:
2200 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2201 NOTE_LABEL_NUMBER (insn));
2202 break;
2204 case NOTE_INSN_FUNCTION_BEG:
2205 if (need_profile_function)
2207 profile_function (asm_out_file);
2208 need_profile_function = false;
2211 app_disable ();
2212 if (!DECL_IGNORED_P (current_function_decl))
2213 debug_hooks->end_prologue (last_linenum, last_filename);
2215 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2217 *seen |= SEEN_EMITTED;
2218 force_source_line = true;
2220 else
2221 *seen |= SEEN_NOTE;
2223 break;
2225 case NOTE_INSN_BLOCK_BEG:
2226 if (debug_info_level == DINFO_LEVEL_NORMAL
2227 || debug_info_level == DINFO_LEVEL_VERBOSE
2228 || write_symbols == DWARF2_DEBUG
2229 || write_symbols == VMS_AND_DWARF2_DEBUG
2230 || write_symbols == VMS_DEBUG)
2232 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2234 app_disable ();
2235 ++block_depth;
2236 high_block_linenum = last_linenum;
2238 /* Output debugging info about the symbol-block beginning. */
2239 if (!DECL_IGNORED_P (current_function_decl))
2240 debug_hooks->begin_block (last_linenum, n);
2242 /* Mark this block as output. */
2243 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2245 if (write_symbols == DBX_DEBUG
2246 || write_symbols == SDB_DEBUG)
2248 location_t *locus_ptr
2249 = block_nonartificial_location (NOTE_BLOCK (insn));
2251 if (locus_ptr != NULL)
2253 override_filename = LOCATION_FILE (*locus_ptr);
2254 override_linenum = LOCATION_LINE (*locus_ptr);
2255 override_discriminator =
2256 get_discriminator_from_locus (*locus_ptr);
2259 break;
2261 case NOTE_INSN_BLOCK_END:
2262 if (debug_info_level == DINFO_LEVEL_NORMAL
2263 || debug_info_level == DINFO_LEVEL_VERBOSE
2264 || write_symbols == DWARF2_DEBUG
2265 || write_symbols == VMS_AND_DWARF2_DEBUG
2266 || write_symbols == VMS_DEBUG)
2268 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2270 app_disable ();
2272 /* End of a symbol-block. */
2273 --block_depth;
2274 gcc_assert (block_depth >= 0);
2276 if (!DECL_IGNORED_P (current_function_decl))
2277 debug_hooks->end_block (high_block_linenum, n);
2279 if (write_symbols == DBX_DEBUG
2280 || write_symbols == SDB_DEBUG)
2282 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2283 location_t *locus_ptr
2284 = block_nonartificial_location (outer_block);
2286 if (locus_ptr != NULL)
2288 override_filename = LOCATION_FILE (*locus_ptr);
2289 override_linenum = LOCATION_LINE (*locus_ptr);
2290 override_discriminator =
2291 get_discriminator_from_locus (*locus_ptr);
2293 else
2295 override_filename = NULL;
2296 override_linenum = 0;
2297 override_discriminator = 0;
2300 break;
2302 case NOTE_INSN_DELETED_LABEL:
2303 /* Emit the label. We may have deleted the CODE_LABEL because
2304 the label could be proved to be unreachable, though still
2305 referenced (in the form of having its address taken. */
2306 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2307 break;
2309 case NOTE_INSN_DELETED_DEBUG_LABEL:
2310 /* Similarly, but need to use different namespace for it. */
2311 if (CODE_LABEL_NUMBER (insn) != -1)
2312 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2313 break;
2315 case NOTE_INSN_VAR_LOCATION:
2316 case NOTE_INSN_CALL_ARG_LOCATION:
2317 if (!DECL_IGNORED_P (current_function_decl))
2318 debug_hooks->var_location (insn);
2319 break;
2321 default:
2322 gcc_unreachable ();
2323 break;
2325 break;
2327 case BARRIER:
2328 break;
2330 case CODE_LABEL:
2331 /* The target port might emit labels in the output function for
2332 some insn, e.g. sh.c output_branchy_insn. */
2333 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2335 int align = LABEL_TO_ALIGNMENT (insn);
2336 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2337 int max_skip = LABEL_TO_MAX_SKIP (insn);
2338 #endif
2340 if (align && NEXT_INSN (insn))
2342 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2343 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2344 #else
2345 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2346 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2347 #else
2348 ASM_OUTPUT_ALIGN (file, align);
2349 #endif
2350 #endif
2353 CC_STATUS_INIT;
2355 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2356 debug_hooks->label (insn);
2358 app_disable ();
2360 next = next_nonnote_insn (insn);
2361 /* If this label is followed by a jump-table, make sure we put
2362 the label in the read-only section. Also possibly write the
2363 label and jump table together. */
2364 if (next != 0 && JUMP_TABLE_DATA_P (next))
2366 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2367 /* In this case, the case vector is being moved by the
2368 target, so don't output the label at all. Leave that
2369 to the back end macros. */
2370 #else
2371 if (! JUMP_TABLES_IN_TEXT_SECTION)
2373 int log_align;
2375 switch_to_section (targetm.asm_out.function_rodata_section
2376 (current_function_decl));
2378 #ifdef ADDR_VEC_ALIGN
2379 log_align = ADDR_VEC_ALIGN (next);
2380 #else
2381 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2382 #endif
2383 ASM_OUTPUT_ALIGN (file, log_align);
2385 else
2386 switch_to_section (current_function_section ());
2388 #ifdef ASM_OUTPUT_CASE_LABEL
2389 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2390 next);
2391 #else
2392 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2393 #endif
2394 #endif
2395 break;
2397 if (LABEL_ALT_ENTRY_P (insn))
2398 output_alternate_entry_point (file, insn);
2399 else
2400 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2401 break;
2403 default:
2405 rtx body = PATTERN (insn);
2406 int insn_code_number;
2407 const char *templ;
2408 bool is_stmt;
2410 /* Reset this early so it is correct for ASM statements. */
2411 current_insn_predicate = NULL_RTX;
2413 /* An INSN, JUMP_INSN or CALL_INSN.
2414 First check for special kinds that recog doesn't recognize. */
2416 if (GET_CODE (body) == USE /* These are just declarations. */
2417 || GET_CODE (body) == CLOBBER)
2418 break;
2420 #ifdef HAVE_cc0
2422 /* If there is a REG_CC_SETTER note on this insn, it means that
2423 the setting of the condition code was done in the delay slot
2424 of the insn that branched here. So recover the cc status
2425 from the insn that set it. */
2427 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2428 if (note)
2430 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2431 cc_prev_status = cc_status;
2434 #endif
2436 /* Detect insns that are really jump-tables
2437 and output them as such. */
2439 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2441 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2442 int vlen, idx;
2443 #endif
2445 if (! JUMP_TABLES_IN_TEXT_SECTION)
2446 switch_to_section (targetm.asm_out.function_rodata_section
2447 (current_function_decl));
2448 else
2449 switch_to_section (current_function_section ());
2451 app_disable ();
2453 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2454 if (GET_CODE (body) == ADDR_VEC)
2456 #ifdef ASM_OUTPUT_ADDR_VEC
2457 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2458 #else
2459 gcc_unreachable ();
2460 #endif
2462 else
2464 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2465 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2466 #else
2467 gcc_unreachable ();
2468 #endif
2470 #else
2471 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2472 for (idx = 0; idx < vlen; idx++)
2474 if (GET_CODE (body) == ADDR_VEC)
2476 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2477 ASM_OUTPUT_ADDR_VEC_ELT
2478 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2479 #else
2480 gcc_unreachable ();
2481 #endif
2483 else
2485 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2486 ASM_OUTPUT_ADDR_DIFF_ELT
2487 (file,
2488 body,
2489 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2490 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2491 #else
2492 gcc_unreachable ();
2493 #endif
2496 #ifdef ASM_OUTPUT_CASE_END
2497 ASM_OUTPUT_CASE_END (file,
2498 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2499 insn);
2500 #endif
2501 #endif
2503 switch_to_section (current_function_section ());
2505 break;
2507 /* Output this line note if it is the first or the last line
2508 note in a row. */
2509 if (!DECL_IGNORED_P (current_function_decl)
2510 && notice_source_line (insn, &is_stmt))
2511 (*debug_hooks->source_line) (last_linenum, last_filename,
2512 last_discriminator, is_stmt);
2514 if (GET_CODE (body) == ASM_INPUT)
2516 const char *string = XSTR (body, 0);
2518 /* There's no telling what that did to the condition codes. */
2519 CC_STATUS_INIT;
2521 if (string[0])
2523 expanded_location loc;
2525 app_enable ();
2526 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2527 if (*loc.file && loc.line)
2528 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2529 ASM_COMMENT_START, loc.line, loc.file);
2530 fprintf (asm_out_file, "\t%s\n", string);
2531 #if HAVE_AS_LINE_ZERO
2532 if (*loc.file && loc.line)
2533 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2534 #endif
2536 break;
2539 /* Detect `asm' construct with operands. */
2540 if (asm_noperands (body) >= 0)
2542 unsigned int noperands = asm_noperands (body);
2543 rtx *ops = XALLOCAVEC (rtx, noperands);
2544 const char *string;
2545 location_t loc;
2546 expanded_location expanded;
2548 /* There's no telling what that did to the condition codes. */
2549 CC_STATUS_INIT;
2551 /* Get out the operand values. */
2552 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2553 /* Inhibit dying on what would otherwise be compiler bugs. */
2554 insn_noperands = noperands;
2555 this_is_asm_operands = insn;
2556 expanded = expand_location (loc);
2558 #ifdef FINAL_PRESCAN_INSN
2559 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2560 #endif
2562 /* Output the insn using them. */
2563 if (string[0])
2565 app_enable ();
2566 if (expanded.file && expanded.line)
2567 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2568 ASM_COMMENT_START, expanded.line, expanded.file);
2569 output_asm_insn (string, ops);
2570 #if HAVE_AS_LINE_ZERO
2571 if (expanded.file && expanded.line)
2572 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2573 #endif
2576 if (targetm.asm_out.final_postscan_insn)
2577 targetm.asm_out.final_postscan_insn (file, insn, ops,
2578 insn_noperands);
2580 this_is_asm_operands = 0;
2581 break;
2584 app_disable ();
2586 if (GET_CODE (body) == SEQUENCE)
2588 /* A delayed-branch sequence */
2589 int i;
2591 final_sequence = body;
2593 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2594 force the restoration of a comparison that was previously
2595 thought unnecessary. If that happens, cancel this sequence
2596 and cause that insn to be restored. */
2598 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2599 if (next != XVECEXP (body, 0, 1))
2601 final_sequence = 0;
2602 return next;
2605 for (i = 1; i < XVECLEN (body, 0); i++)
2607 rtx insn = XVECEXP (body, 0, i);
2608 rtx next = NEXT_INSN (insn);
2609 /* We loop in case any instruction in a delay slot gets
2610 split. */
2612 insn = final_scan_insn (insn, file, 0, 1, seen);
2613 while (insn != next);
2615 #ifdef DBR_OUTPUT_SEQEND
2616 DBR_OUTPUT_SEQEND (file);
2617 #endif
2618 final_sequence = 0;
2620 /* If the insn requiring the delay slot was a CALL_INSN, the
2621 insns in the delay slot are actually executed before the
2622 called function. Hence we don't preserve any CC-setting
2623 actions in these insns and the CC must be marked as being
2624 clobbered by the function. */
2625 if (CALL_P (XVECEXP (body, 0, 0)))
2627 CC_STATUS_INIT;
2629 break;
2632 /* We have a real machine instruction as rtl. */
2634 body = PATTERN (insn);
2636 #ifdef HAVE_cc0
2637 set = single_set (insn);
2639 /* Check for redundant test and compare instructions
2640 (when the condition codes are already set up as desired).
2641 This is done only when optimizing; if not optimizing,
2642 it should be possible for the user to alter a variable
2643 with the debugger in between statements
2644 and the next statement should reexamine the variable
2645 to compute the condition codes. */
2647 if (optimize_p)
2649 if (set
2650 && GET_CODE (SET_DEST (set)) == CC0
2651 && insn != last_ignored_compare)
2653 rtx src1, src2;
2654 if (GET_CODE (SET_SRC (set)) == SUBREG)
2655 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2657 src1 = SET_SRC (set);
2658 src2 = NULL_RTX;
2659 if (GET_CODE (SET_SRC (set)) == COMPARE)
2661 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2662 XEXP (SET_SRC (set), 0)
2663 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2664 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2665 XEXP (SET_SRC (set), 1)
2666 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2667 if (XEXP (SET_SRC (set), 1)
2668 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2669 src2 = XEXP (SET_SRC (set), 0);
2671 if ((cc_status.value1 != 0
2672 && rtx_equal_p (src1, cc_status.value1))
2673 || (cc_status.value2 != 0
2674 && rtx_equal_p (src1, cc_status.value2))
2675 || (src2 != 0 && cc_status.value1 != 0
2676 && rtx_equal_p (src2, cc_status.value1))
2677 || (src2 != 0 && cc_status.value2 != 0
2678 && rtx_equal_p (src2, cc_status.value2)))
2680 /* Don't delete insn if it has an addressing side-effect. */
2681 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2682 /* or if anything in it is volatile. */
2683 && ! volatile_refs_p (PATTERN (insn)))
2685 /* We don't really delete the insn; just ignore it. */
2686 last_ignored_compare = insn;
2687 break;
2693 /* If this is a conditional branch, maybe modify it
2694 if the cc's are in a nonstandard state
2695 so that it accomplishes the same thing that it would
2696 do straightforwardly if the cc's were set up normally. */
2698 if (cc_status.flags != 0
2699 && JUMP_P (insn)
2700 && GET_CODE (body) == SET
2701 && SET_DEST (body) == pc_rtx
2702 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2703 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2704 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2706 /* This function may alter the contents of its argument
2707 and clear some of the cc_status.flags bits.
2708 It may also return 1 meaning condition now always true
2709 or -1 meaning condition now always false
2710 or 2 meaning condition nontrivial but altered. */
2711 int result = alter_cond (XEXP (SET_SRC (body), 0));
2712 /* If condition now has fixed value, replace the IF_THEN_ELSE
2713 with its then-operand or its else-operand. */
2714 if (result == 1)
2715 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2716 if (result == -1)
2717 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2719 /* The jump is now either unconditional or a no-op.
2720 If it has become a no-op, don't try to output it.
2721 (It would not be recognized.) */
2722 if (SET_SRC (body) == pc_rtx)
2724 delete_insn (insn);
2725 break;
2727 else if (ANY_RETURN_P (SET_SRC (body)))
2728 /* Replace (set (pc) (return)) with (return). */
2729 PATTERN (insn) = body = SET_SRC (body);
2731 /* Rerecognize the instruction if it has changed. */
2732 if (result != 0)
2733 INSN_CODE (insn) = -1;
2736 /* If this is a conditional trap, maybe modify it if the cc's
2737 are in a nonstandard state so that it accomplishes the same
2738 thing that it would do straightforwardly if the cc's were
2739 set up normally. */
2740 if (cc_status.flags != 0
2741 && NONJUMP_INSN_P (insn)
2742 && GET_CODE (body) == TRAP_IF
2743 && COMPARISON_P (TRAP_CONDITION (body))
2744 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2746 /* This function may alter the contents of its argument
2747 and clear some of the cc_status.flags bits.
2748 It may also return 1 meaning condition now always true
2749 or -1 meaning condition now always false
2750 or 2 meaning condition nontrivial but altered. */
2751 int result = alter_cond (TRAP_CONDITION (body));
2753 /* If TRAP_CONDITION has become always false, delete the
2754 instruction. */
2755 if (result == -1)
2757 delete_insn (insn);
2758 break;
2761 /* If TRAP_CONDITION has become always true, replace
2762 TRAP_CONDITION with const_true_rtx. */
2763 if (result == 1)
2764 TRAP_CONDITION (body) = const_true_rtx;
2766 /* Rerecognize the instruction if it has changed. */
2767 if (result != 0)
2768 INSN_CODE (insn) = -1;
2771 /* Make same adjustments to instructions that examine the
2772 condition codes without jumping and instructions that
2773 handle conditional moves (if this machine has either one). */
2775 if (cc_status.flags != 0
2776 && set != 0)
2778 rtx cond_rtx, then_rtx, else_rtx;
2780 if (!JUMP_P (insn)
2781 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2783 cond_rtx = XEXP (SET_SRC (set), 0);
2784 then_rtx = XEXP (SET_SRC (set), 1);
2785 else_rtx = XEXP (SET_SRC (set), 2);
2787 else
2789 cond_rtx = SET_SRC (set);
2790 then_rtx = const_true_rtx;
2791 else_rtx = const0_rtx;
2794 if (COMPARISON_P (cond_rtx)
2795 && XEXP (cond_rtx, 0) == cc0_rtx)
2797 int result;
2798 result = alter_cond (cond_rtx);
2799 if (result == 1)
2800 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2801 else if (result == -1)
2802 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2803 else if (result == 2)
2804 INSN_CODE (insn) = -1;
2805 if (SET_DEST (set) == SET_SRC (set))
2806 delete_insn (insn);
2810 #endif
2812 #ifdef HAVE_peephole
2813 /* Do machine-specific peephole optimizations if desired. */
2815 if (optimize_p && !flag_no_peephole && !nopeepholes)
2817 rtx next = peephole (insn);
2818 /* When peepholing, if there were notes within the peephole,
2819 emit them before the peephole. */
2820 if (next != 0 && next != NEXT_INSN (insn))
2822 rtx note, prev = PREV_INSN (insn);
2824 for (note = NEXT_INSN (insn); note != next;
2825 note = NEXT_INSN (note))
2826 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2828 /* Put the notes in the proper position for a later
2829 rescan. For example, the SH target can do this
2830 when generating a far jump in a delayed branch
2831 sequence. */
2832 note = NEXT_INSN (insn);
2833 PREV_INSN (note) = prev;
2834 NEXT_INSN (prev) = note;
2835 NEXT_INSN (PREV_INSN (next)) = insn;
2836 PREV_INSN (insn) = PREV_INSN (next);
2837 NEXT_INSN (insn) = next;
2838 PREV_INSN (next) = insn;
2841 /* PEEPHOLE might have changed this. */
2842 body = PATTERN (insn);
2844 #endif
2846 /* Try to recognize the instruction.
2847 If successful, verify that the operands satisfy the
2848 constraints for the instruction. Crash if they don't,
2849 since `reload' should have changed them so that they do. */
2851 insn_code_number = recog_memoized (insn);
2852 cleanup_subreg_operands (insn);
2854 /* Dump the insn in the assembly for debugging (-dAP).
2855 If the final dump is requested as slim RTL, dump slim
2856 RTL to the assembly file also. */
2857 if (flag_dump_rtl_in_asm)
2859 print_rtx_head = ASM_COMMENT_START;
2860 if (! (dump_flags & TDF_SLIM))
2861 print_rtl_single (asm_out_file, insn);
2862 else
2863 dump_insn_slim (asm_out_file, insn);
2864 print_rtx_head = "";
2867 if (! constrain_operands_cached (1))
2868 fatal_insn_not_found (insn);
2870 /* Some target machines need to prescan each insn before
2871 it is output. */
2873 #ifdef FINAL_PRESCAN_INSN
2874 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2875 #endif
2877 if (targetm.have_conditional_execution ()
2878 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2879 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2881 #ifdef HAVE_cc0
2882 cc_prev_status = cc_status;
2884 /* Update `cc_status' for this instruction.
2885 The instruction's output routine may change it further.
2886 If the output routine for a jump insn needs to depend
2887 on the cc status, it should look at cc_prev_status. */
2889 NOTICE_UPDATE_CC (body, insn);
2890 #endif
2892 current_output_insn = debug_insn = insn;
2894 /* Find the proper template for this insn. */
2895 templ = get_insn_template (insn_code_number, insn);
2897 /* If the C code returns 0, it means that it is a jump insn
2898 which follows a deleted test insn, and that test insn
2899 needs to be reinserted. */
2900 if (templ == 0)
2902 rtx prev;
2904 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2906 /* We have already processed the notes between the setter and
2907 the user. Make sure we don't process them again, this is
2908 particularly important if one of the notes is a block
2909 scope note or an EH note. */
2910 for (prev = insn;
2911 prev != last_ignored_compare;
2912 prev = PREV_INSN (prev))
2914 if (NOTE_P (prev))
2915 delete_insn (prev); /* Use delete_note. */
2918 return prev;
2921 /* If the template is the string "#", it means that this insn must
2922 be split. */
2923 if (templ[0] == '#' && templ[1] == '\0')
2925 rtx new_rtx = try_split (body, insn, 0);
2927 /* If we didn't split the insn, go away. */
2928 if (new_rtx == insn && PATTERN (new_rtx) == body)
2929 fatal_insn ("could not split insn", insn);
2931 /* If we have a length attribute, this instruction should have
2932 been split in shorten_branches, to ensure that we would have
2933 valid length info for the splitees. */
2934 gcc_assert (!HAVE_ATTR_length);
2936 return new_rtx;
2939 /* ??? This will put the directives in the wrong place if
2940 get_insn_template outputs assembly directly. However calling it
2941 before get_insn_template breaks if the insns is split. */
2942 if (targetm.asm_out.unwind_emit_before_insn
2943 && targetm.asm_out.unwind_emit)
2944 targetm.asm_out.unwind_emit (asm_out_file, insn);
2946 if (CALL_P (insn))
2948 rtx x = call_from_call_insn (insn);
2949 x = XEXP (x, 0);
2950 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2952 tree t;
2953 x = XEXP (x, 0);
2954 t = SYMBOL_REF_DECL (x);
2955 if (t)
2956 assemble_external (t);
2958 if (!DECL_IGNORED_P (current_function_decl))
2959 debug_hooks->var_location (insn);
2962 /* Output assembler code from the template. */
2963 output_asm_insn (templ, recog_data.operand);
2965 /* Some target machines need to postscan each insn after
2966 it is output. */
2967 if (targetm.asm_out.final_postscan_insn)
2968 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2969 recog_data.n_operands);
2971 if (!targetm.asm_out.unwind_emit_before_insn
2972 && targetm.asm_out.unwind_emit)
2973 targetm.asm_out.unwind_emit (asm_out_file, insn);
2975 current_output_insn = debug_insn = 0;
2978 return NEXT_INSN (insn);
2981 /* Return discriminator of the statement that produced this insn. */
2983 insn_discriminator (const_rtx insn)
2985 location_t loc = INSN_LOCATION (insn);
2986 if (!loc)
2987 return 0;
2988 return get_discriminator_from_locus (loc);
2992 /* Return whether a source line note needs to be emitted before INSN.
2993 Sets IS_STMT to TRUE if the line should be marked as a possible
2994 breakpoint location. */
2996 static bool
2997 notice_source_line (rtx insn, bool *is_stmt)
2999 const char *filename;
3000 int linenum;
3001 int discriminator;
3003 if (override_filename)
3005 filename = override_filename;
3006 linenum = override_linenum;
3007 discriminator = override_discriminator;
3009 else
3011 filename = insn_file (insn);
3012 linenum = insn_line (insn);
3013 discriminator = insn_discriminator (insn);
3016 if (filename == NULL)
3017 return false;
3019 if (force_source_line
3020 || filename != last_filename
3021 || last_linenum != linenum)
3023 force_source_line = false;
3024 last_filename = filename;
3025 last_linenum = linenum;
3026 last_discriminator = discriminator;
3027 *is_stmt = true;
3028 high_block_linenum = MAX (last_linenum, high_block_linenum);
3029 high_function_linenum = MAX (last_linenum, high_function_linenum);
3030 return true;
3033 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3035 /* If the discriminator changed, but the line number did not,
3036 output the line table entry with is_stmt false so the
3037 debugger does not treat this as a breakpoint location. */
3038 last_discriminator = discriminator;
3039 *is_stmt = false;
3040 return true;
3043 return false;
3046 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3047 directly to the desired hard register. */
3049 void
3050 cleanup_subreg_operands (rtx insn)
3052 int i;
3053 bool changed = false;
3054 extract_insn_cached (insn);
3055 for (i = 0; i < recog_data.n_operands; i++)
3057 /* The following test cannot use recog_data.operand when testing
3058 for a SUBREG: the underlying object might have been changed
3059 already if we are inside a match_operator expression that
3060 matches the else clause. Instead we test the underlying
3061 expression directly. */
3062 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3064 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3065 changed = true;
3067 else if (GET_CODE (recog_data.operand[i]) == PLUS
3068 || GET_CODE (recog_data.operand[i]) == MULT
3069 || MEM_P (recog_data.operand[i]))
3070 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3073 for (i = 0; i < recog_data.n_dups; i++)
3075 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3077 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3078 changed = true;
3080 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3081 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3082 || MEM_P (*recog_data.dup_loc[i]))
3083 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3085 if (changed)
3086 df_insn_rescan (insn);
3089 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3090 the thing it is a subreg of. Do it anyway if FINAL_P. */
3093 alter_subreg (rtx *xp, bool final_p)
3095 rtx x = *xp;
3096 rtx y = SUBREG_REG (x);
3098 /* simplify_subreg does not remove subreg from volatile references.
3099 We are required to. */
3100 if (MEM_P (y))
3102 int offset = SUBREG_BYTE (x);
3104 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3105 contains 0 instead of the proper offset. See simplify_subreg. */
3106 if (offset == 0
3107 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3109 int difference = GET_MODE_SIZE (GET_MODE (y))
3110 - GET_MODE_SIZE (GET_MODE (x));
3111 if (WORDS_BIG_ENDIAN)
3112 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3113 if (BYTES_BIG_ENDIAN)
3114 offset += difference % UNITS_PER_WORD;
3117 if (final_p)
3118 *xp = adjust_address (y, GET_MODE (x), offset);
3119 else
3120 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3122 else
3124 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3125 SUBREG_BYTE (x));
3127 if (new_rtx != 0)
3128 *xp = new_rtx;
3129 else if (final_p && REG_P (y))
3131 /* Simplify_subreg can't handle some REG cases, but we have to. */
3132 unsigned int regno;
3133 HOST_WIDE_INT offset;
3135 regno = subreg_regno (x);
3136 if (subreg_lowpart_p (x))
3137 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3138 else
3139 offset = SUBREG_BYTE (x);
3140 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3144 return *xp;
3147 /* Do alter_subreg on all the SUBREGs contained in X. */
3149 static rtx
3150 walk_alter_subreg (rtx *xp, bool *changed)
3152 rtx x = *xp;
3153 switch (GET_CODE (x))
3155 case PLUS:
3156 case MULT:
3157 case AND:
3158 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3159 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3160 break;
3162 case MEM:
3163 case ZERO_EXTEND:
3164 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3165 break;
3167 case SUBREG:
3168 *changed = true;
3169 return alter_subreg (xp, true);
3171 default:
3172 break;
3175 return *xp;
3178 #ifdef HAVE_cc0
3180 /* Given BODY, the body of a jump instruction, alter the jump condition
3181 as required by the bits that are set in cc_status.flags.
3182 Not all of the bits there can be handled at this level in all cases.
3184 The value is normally 0.
3185 1 means that the condition has become always true.
3186 -1 means that the condition has become always false.
3187 2 means that COND has been altered. */
3189 static int
3190 alter_cond (rtx cond)
3192 int value = 0;
3194 if (cc_status.flags & CC_REVERSED)
3196 value = 2;
3197 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3200 if (cc_status.flags & CC_INVERTED)
3202 value = 2;
3203 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3206 if (cc_status.flags & CC_NOT_POSITIVE)
3207 switch (GET_CODE (cond))
3209 case LE:
3210 case LEU:
3211 case GEU:
3212 /* Jump becomes unconditional. */
3213 return 1;
3215 case GT:
3216 case GTU:
3217 case LTU:
3218 /* Jump becomes no-op. */
3219 return -1;
3221 case GE:
3222 PUT_CODE (cond, EQ);
3223 value = 2;
3224 break;
3226 case LT:
3227 PUT_CODE (cond, NE);
3228 value = 2;
3229 break;
3231 default:
3232 break;
3235 if (cc_status.flags & CC_NOT_NEGATIVE)
3236 switch (GET_CODE (cond))
3238 case GE:
3239 case GEU:
3240 /* Jump becomes unconditional. */
3241 return 1;
3243 case LT:
3244 case LTU:
3245 /* Jump becomes no-op. */
3246 return -1;
3248 case LE:
3249 case LEU:
3250 PUT_CODE (cond, EQ);
3251 value = 2;
3252 break;
3254 case GT:
3255 case GTU:
3256 PUT_CODE (cond, NE);
3257 value = 2;
3258 break;
3260 default:
3261 break;
3264 if (cc_status.flags & CC_NO_OVERFLOW)
3265 switch (GET_CODE (cond))
3267 case GEU:
3268 /* Jump becomes unconditional. */
3269 return 1;
3271 case LEU:
3272 PUT_CODE (cond, EQ);
3273 value = 2;
3274 break;
3276 case GTU:
3277 PUT_CODE (cond, NE);
3278 value = 2;
3279 break;
3281 case LTU:
3282 /* Jump becomes no-op. */
3283 return -1;
3285 default:
3286 break;
3289 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3290 switch (GET_CODE (cond))
3292 default:
3293 gcc_unreachable ();
3295 case NE:
3296 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3297 value = 2;
3298 break;
3300 case EQ:
3301 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3302 value = 2;
3303 break;
3306 if (cc_status.flags & CC_NOT_SIGNED)
3307 /* The flags are valid if signed condition operators are converted
3308 to unsigned. */
3309 switch (GET_CODE (cond))
3311 case LE:
3312 PUT_CODE (cond, LEU);
3313 value = 2;
3314 break;
3316 case LT:
3317 PUT_CODE (cond, LTU);
3318 value = 2;
3319 break;
3321 case GT:
3322 PUT_CODE (cond, GTU);
3323 value = 2;
3324 break;
3326 case GE:
3327 PUT_CODE (cond, GEU);
3328 value = 2;
3329 break;
3331 default:
3332 break;
3335 return value;
3337 #endif
3339 /* Report inconsistency between the assembler template and the operands.
3340 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3342 void
3343 output_operand_lossage (const char *cmsgid, ...)
3345 char *fmt_string;
3346 char *new_message;
3347 const char *pfx_str;
3348 va_list ap;
3350 va_start (ap, cmsgid);
3352 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3353 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3354 vasprintf (&new_message, fmt_string, ap);
3356 if (this_is_asm_operands)
3357 error_for_asm (this_is_asm_operands, "%s", new_message);
3358 else
3359 internal_error ("%s", new_message);
3361 free (fmt_string);
3362 free (new_message);
3363 va_end (ap);
3366 /* Output of assembler code from a template, and its subroutines. */
3368 /* Annotate the assembly with a comment describing the pattern and
3369 alternative used. */
3371 static void
3372 output_asm_name (void)
3374 if (debug_insn)
3376 int num = INSN_CODE (debug_insn);
3377 fprintf (asm_out_file, "\t%s %d\t%s",
3378 ASM_COMMENT_START, INSN_UID (debug_insn),
3379 insn_data[num].name);
3380 if (insn_data[num].n_alternatives > 1)
3381 fprintf (asm_out_file, "/%d", which_alternative + 1);
3383 if (HAVE_ATTR_length)
3384 fprintf (asm_out_file, "\t[length = %d]",
3385 get_attr_length (debug_insn));
3387 /* Clear this so only the first assembler insn
3388 of any rtl insn will get the special comment for -dp. */
3389 debug_insn = 0;
3393 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3394 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3395 corresponds to the address of the object and 0 if to the object. */
3397 static tree
3398 get_mem_expr_from_op (rtx op, int *paddressp)
3400 tree expr;
3401 int inner_addressp;
3403 *paddressp = 0;
3405 if (REG_P (op))
3406 return REG_EXPR (op);
3407 else if (!MEM_P (op))
3408 return 0;
3410 if (MEM_EXPR (op) != 0)
3411 return MEM_EXPR (op);
3413 /* Otherwise we have an address, so indicate it and look at the address. */
3414 *paddressp = 1;
3415 op = XEXP (op, 0);
3417 /* First check if we have a decl for the address, then look at the right side
3418 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3419 But don't allow the address to itself be indirect. */
3420 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3421 return expr;
3422 else if (GET_CODE (op) == PLUS
3423 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3424 return expr;
3426 while (UNARY_P (op)
3427 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3428 op = XEXP (op, 0);
3430 expr = get_mem_expr_from_op (op, &inner_addressp);
3431 return inner_addressp ? 0 : expr;
3434 /* Output operand names for assembler instructions. OPERANDS is the
3435 operand vector, OPORDER is the order to write the operands, and NOPS
3436 is the number of operands to write. */
3438 static void
3439 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3441 int wrote = 0;
3442 int i;
3444 for (i = 0; i < nops; i++)
3446 int addressp;
3447 rtx op = operands[oporder[i]];
3448 tree expr = get_mem_expr_from_op (op, &addressp);
3450 fprintf (asm_out_file, "%c%s",
3451 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3452 wrote = 1;
3453 if (expr)
3455 fprintf (asm_out_file, "%s",
3456 addressp ? "*" : "");
3457 print_mem_expr (asm_out_file, expr);
3458 wrote = 1;
3460 else if (REG_P (op) && ORIGINAL_REGNO (op)
3461 && ORIGINAL_REGNO (op) != REGNO (op))
3462 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3466 #ifdef ASSEMBLER_DIALECT
3467 /* Helper function to parse assembler dialects in the asm string.
3468 This is called from output_asm_insn and asm_fprintf. */
3469 static const char *
3470 do_assembler_dialects (const char *p, int *dialect)
3472 char c = *(p - 1);
3474 switch (c)
3476 case '{':
3478 int i;
3480 if (*dialect)
3481 output_operand_lossage ("nested assembly dialect alternatives");
3482 else
3483 *dialect = 1;
3485 /* If we want the first dialect, do nothing. Otherwise, skip
3486 DIALECT_NUMBER of strings ending with '|'. */
3487 for (i = 0; i < dialect_number; i++)
3489 while (*p && *p != '}' && *p++ != '|')
3491 if (*p == '}')
3492 break;
3495 if (*p == '\0')
3496 output_operand_lossage ("unterminated assembly dialect alternative");
3498 break;
3500 case '|':
3501 if (*dialect)
3503 /* Skip to close brace. */
3506 if (*p == '\0')
3508 output_operand_lossage ("unterminated assembly dialect alternative");
3509 break;
3512 while (*p++ != '}');
3513 *dialect = 0;
3515 else
3516 putc (c, asm_out_file);
3517 break;
3519 case '}':
3520 if (! *dialect)
3521 putc (c, asm_out_file);
3522 *dialect = 0;
3523 break;
3524 default:
3525 gcc_unreachable ();
3528 return p;
3530 #endif
3532 /* Output text from TEMPLATE to the assembler output file,
3533 obeying %-directions to substitute operands taken from
3534 the vector OPERANDS.
3536 %N (for N a digit) means print operand N in usual manner.
3537 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3538 and print the label name with no punctuation.
3539 %cN means require operand N to be a constant
3540 and print the constant expression with no punctuation.
3541 %aN means expect operand N to be a memory address
3542 (not a memory reference!) and print a reference
3543 to that address.
3544 %nN means expect operand N to be a constant
3545 and print a constant expression for minus the value
3546 of the operand, with no other punctuation. */
3548 void
3549 output_asm_insn (const char *templ, rtx *operands)
3551 const char *p;
3552 int c;
3553 #ifdef ASSEMBLER_DIALECT
3554 int dialect = 0;
3555 #endif
3556 int oporder[MAX_RECOG_OPERANDS];
3557 char opoutput[MAX_RECOG_OPERANDS];
3558 int ops = 0;
3560 /* An insn may return a null string template
3561 in a case where no assembler code is needed. */
3562 if (*templ == 0)
3563 return;
3565 memset (opoutput, 0, sizeof opoutput);
3566 p = templ;
3567 putc ('\t', asm_out_file);
3569 #ifdef ASM_OUTPUT_OPCODE
3570 ASM_OUTPUT_OPCODE (asm_out_file, p);
3571 #endif
3573 while ((c = *p++))
3574 switch (c)
3576 case '\n':
3577 if (flag_verbose_asm)
3578 output_asm_operand_names (operands, oporder, ops);
3579 if (flag_print_asm_name)
3580 output_asm_name ();
3582 ops = 0;
3583 memset (opoutput, 0, sizeof opoutput);
3585 putc (c, asm_out_file);
3586 #ifdef ASM_OUTPUT_OPCODE
3587 while ((c = *p) == '\t')
3589 putc (c, asm_out_file);
3590 p++;
3592 ASM_OUTPUT_OPCODE (asm_out_file, p);
3593 #endif
3594 break;
3596 #ifdef ASSEMBLER_DIALECT
3597 case '{':
3598 case '}':
3599 case '|':
3600 p = do_assembler_dialects (p, &dialect);
3601 break;
3602 #endif
3604 case '%':
3605 /* %% outputs a single %. */
3606 if (*p == '%')
3608 p++;
3609 putc (c, asm_out_file);
3611 /* %= outputs a number which is unique to each insn in the entire
3612 compilation. This is useful for making local labels that are
3613 referred to more than once in a given insn. */
3614 else if (*p == '=')
3616 p++;
3617 fprintf (asm_out_file, "%d", insn_counter);
3619 /* % followed by a letter and some digits
3620 outputs an operand in a special way depending on the letter.
3621 Letters `acln' are implemented directly.
3622 Other letters are passed to `output_operand' so that
3623 the TARGET_PRINT_OPERAND hook can define them. */
3624 else if (ISALPHA (*p))
3626 int letter = *p++;
3627 unsigned long opnum;
3628 char *endptr;
3630 opnum = strtoul (p, &endptr, 10);
3632 if (endptr == p)
3633 output_operand_lossage ("operand number missing "
3634 "after %%-letter");
3635 else if (this_is_asm_operands && opnum >= insn_noperands)
3636 output_operand_lossage ("operand number out of range");
3637 else if (letter == 'l')
3638 output_asm_label (operands[opnum]);
3639 else if (letter == 'a')
3640 output_address (operands[opnum]);
3641 else if (letter == 'c')
3643 if (CONSTANT_ADDRESS_P (operands[opnum]))
3644 output_addr_const (asm_out_file, operands[opnum]);
3645 else
3646 output_operand (operands[opnum], 'c');
3648 else if (letter == 'n')
3650 if (CONST_INT_P (operands[opnum]))
3651 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3652 - INTVAL (operands[opnum]));
3653 else
3655 putc ('-', asm_out_file);
3656 output_addr_const (asm_out_file, operands[opnum]);
3659 else
3660 output_operand (operands[opnum], letter);
3662 if (!opoutput[opnum])
3663 oporder[ops++] = opnum;
3664 opoutput[opnum] = 1;
3666 p = endptr;
3667 c = *p;
3669 /* % followed by a digit outputs an operand the default way. */
3670 else if (ISDIGIT (*p))
3672 unsigned long opnum;
3673 char *endptr;
3675 opnum = strtoul (p, &endptr, 10);
3676 if (this_is_asm_operands && opnum >= insn_noperands)
3677 output_operand_lossage ("operand number out of range");
3678 else
3679 output_operand (operands[opnum], 0);
3681 if (!opoutput[opnum])
3682 oporder[ops++] = opnum;
3683 opoutput[opnum] = 1;
3685 p = endptr;
3686 c = *p;
3688 /* % followed by punctuation: output something for that
3689 punctuation character alone, with no operand. The
3690 TARGET_PRINT_OPERAND hook decides what is actually done. */
3691 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3692 output_operand (NULL_RTX, *p++);
3693 else
3694 output_operand_lossage ("invalid %%-code");
3695 break;
3697 default:
3698 putc (c, asm_out_file);
3701 /* Write out the variable names for operands, if we know them. */
3702 if (flag_verbose_asm)
3703 output_asm_operand_names (operands, oporder, ops);
3704 if (flag_print_asm_name)
3705 output_asm_name ();
3707 putc ('\n', asm_out_file);
3710 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3712 void
3713 output_asm_label (rtx x)
3715 char buf[256];
3717 if (GET_CODE (x) == LABEL_REF)
3718 x = XEXP (x, 0);
3719 if (LABEL_P (x)
3720 || (NOTE_P (x)
3721 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3722 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3723 else
3724 output_operand_lossage ("'%%l' operand isn't a label");
3726 assemble_name (asm_out_file, buf);
3729 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3730 output_operand. Marks SYMBOL_REFs as referenced through use of
3731 assemble_external. */
3733 static int
3734 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3736 rtx x = *xp;
3738 /* If we have a used symbol, we may have to emit assembly
3739 annotations corresponding to whether the symbol is external, weak
3740 or has non-default visibility. */
3741 if (GET_CODE (x) == SYMBOL_REF)
3743 tree t;
3745 t = SYMBOL_REF_DECL (x);
3746 if (t)
3747 assemble_external (t);
3749 return -1;
3752 return 0;
3755 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3757 void
3758 mark_symbol_refs_as_used (rtx x)
3760 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3763 /* Print operand X using machine-dependent assembler syntax.
3764 CODE is a non-digit that preceded the operand-number in the % spec,
3765 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3766 between the % and the digits.
3767 When CODE is a non-letter, X is 0.
3769 The meanings of the letters are machine-dependent and controlled
3770 by TARGET_PRINT_OPERAND. */
3772 void
3773 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3775 if (x && GET_CODE (x) == SUBREG)
3776 x = alter_subreg (&x, true);
3778 /* X must not be a pseudo reg. */
3779 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3781 targetm.asm_out.print_operand (asm_out_file, x, code);
3783 if (x == NULL_RTX)
3784 return;
3786 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3789 /* Print a memory reference operand for address X using
3790 machine-dependent assembler syntax. */
3792 void
3793 output_address (rtx x)
3795 bool changed = false;
3796 walk_alter_subreg (&x, &changed);
3797 targetm.asm_out.print_operand_address (asm_out_file, x);
3800 /* Print an integer constant expression in assembler syntax.
3801 Addition and subtraction are the only arithmetic
3802 that may appear in these expressions. */
3804 void
3805 output_addr_const (FILE *file, rtx x)
3807 char buf[256];
3809 restart:
3810 switch (GET_CODE (x))
3812 case PC:
3813 putc ('.', file);
3814 break;
3816 case SYMBOL_REF:
3817 if (SYMBOL_REF_DECL (x))
3818 assemble_external (SYMBOL_REF_DECL (x));
3819 #ifdef ASM_OUTPUT_SYMBOL_REF
3820 ASM_OUTPUT_SYMBOL_REF (file, x);
3821 #else
3822 assemble_name (file, XSTR (x, 0));
3823 #endif
3824 break;
3826 case LABEL_REF:
3827 x = XEXP (x, 0);
3828 /* Fall through. */
3829 case CODE_LABEL:
3830 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3831 #ifdef ASM_OUTPUT_LABEL_REF
3832 ASM_OUTPUT_LABEL_REF (file, buf);
3833 #else
3834 assemble_name (file, buf);
3835 #endif
3836 break;
3838 case CONST_INT:
3839 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3840 break;
3842 case CONST:
3843 /* This used to output parentheses around the expression,
3844 but that does not work on the 386 (either ATT or BSD assembler). */
3845 output_addr_const (file, XEXP (x, 0));
3846 break;
3848 case CONST_DOUBLE:
3849 if (GET_MODE (x) == VOIDmode)
3851 /* We can use %d if the number is one word and positive. */
3852 if (CONST_DOUBLE_HIGH (x))
3853 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3854 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3855 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3856 else if (CONST_DOUBLE_LOW (x) < 0)
3857 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3858 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3859 else
3860 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3862 else
3863 /* We can't handle floating point constants;
3864 PRINT_OPERAND must handle them. */
3865 output_operand_lossage ("floating constant misused");
3866 break;
3868 case CONST_FIXED:
3869 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3870 break;
3872 case PLUS:
3873 /* Some assemblers need integer constants to appear last (eg masm). */
3874 if (CONST_INT_P (XEXP (x, 0)))
3876 output_addr_const (file, XEXP (x, 1));
3877 if (INTVAL (XEXP (x, 0)) >= 0)
3878 fprintf (file, "+");
3879 output_addr_const (file, XEXP (x, 0));
3881 else
3883 output_addr_const (file, XEXP (x, 0));
3884 if (!CONST_INT_P (XEXP (x, 1))
3885 || INTVAL (XEXP (x, 1)) >= 0)
3886 fprintf (file, "+");
3887 output_addr_const (file, XEXP (x, 1));
3889 break;
3891 case MINUS:
3892 /* Avoid outputting things like x-x or x+5-x,
3893 since some assemblers can't handle that. */
3894 x = simplify_subtraction (x);
3895 if (GET_CODE (x) != MINUS)
3896 goto restart;
3898 output_addr_const (file, XEXP (x, 0));
3899 fprintf (file, "-");
3900 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3901 || GET_CODE (XEXP (x, 1)) == PC
3902 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3903 output_addr_const (file, XEXP (x, 1));
3904 else
3906 fputs (targetm.asm_out.open_paren, file);
3907 output_addr_const (file, XEXP (x, 1));
3908 fputs (targetm.asm_out.close_paren, file);
3910 break;
3912 case ZERO_EXTEND:
3913 case SIGN_EXTEND:
3914 case SUBREG:
3915 case TRUNCATE:
3916 output_addr_const (file, XEXP (x, 0));
3917 break;
3919 default:
3920 if (targetm.asm_out.output_addr_const_extra (file, x))
3921 break;
3923 output_operand_lossage ("invalid expression as operand");
3927 /* Output a quoted string. */
3929 void
3930 output_quoted_string (FILE *asm_file, const char *string)
3932 #ifdef OUTPUT_QUOTED_STRING
3933 OUTPUT_QUOTED_STRING (asm_file, string);
3934 #else
3935 char c;
3937 putc ('\"', asm_file);
3938 while ((c = *string++) != 0)
3940 if (ISPRINT (c))
3942 if (c == '\"' || c == '\\')
3943 putc ('\\', asm_file);
3944 putc (c, asm_file);
3946 else
3947 fprintf (asm_file, "\\%03o", (unsigned char) c);
3949 putc ('\"', asm_file);
3950 #endif
3953 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3955 void
3956 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3958 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3959 if (value == 0)
3960 putc ('0', f);
3961 else
3963 char *p = buf + sizeof (buf);
3965 *--p = "0123456789abcdef"[value % 16];
3966 while ((value /= 16) != 0);
3967 *--p = 'x';
3968 *--p = '0';
3969 fwrite (p, 1, buf + sizeof (buf) - p, f);
3973 /* Internal function that prints an unsigned long in decimal in reverse.
3974 The output string IS NOT null-terminated. */
3976 static int
3977 sprint_ul_rev (char *s, unsigned long value)
3979 int i = 0;
3982 s[i] = "0123456789"[value % 10];
3983 value /= 10;
3984 i++;
3985 /* alternate version, without modulo */
3986 /* oldval = value; */
3987 /* value /= 10; */
3988 /* s[i] = "0123456789" [oldval - 10*value]; */
3989 /* i++ */
3991 while (value != 0);
3992 return i;
3995 /* Write an unsigned long as decimal to a file, fast. */
3997 void
3998 fprint_ul (FILE *f, unsigned long value)
4000 /* python says: len(str(2**64)) == 20 */
4001 char s[20];
4002 int i;
4004 i = sprint_ul_rev (s, value);
4006 /* It's probably too small to bother with string reversal and fputs. */
4009 i--;
4010 putc (s[i], f);
4012 while (i != 0);
4015 /* Write an unsigned long as decimal to a string, fast.
4016 s must be wide enough to not overflow, at least 21 chars.
4017 Returns the length of the string (without terminating '\0'). */
4020 sprint_ul (char *s, unsigned long value)
4022 int len;
4023 char tmp_c;
4024 int i;
4025 int j;
4027 len = sprint_ul_rev (s, value);
4028 s[len] = '\0';
4030 /* Reverse the string. */
4031 i = 0;
4032 j = len - 1;
4033 while (i < j)
4035 tmp_c = s[i];
4036 s[i] = s[j];
4037 s[j] = tmp_c;
4038 i++; j--;
4041 return len;
4044 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4045 %R prints the value of REGISTER_PREFIX.
4046 %L prints the value of LOCAL_LABEL_PREFIX.
4047 %U prints the value of USER_LABEL_PREFIX.
4048 %I prints the value of IMMEDIATE_PREFIX.
4049 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4050 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4052 We handle alternate assembler dialects here, just like output_asm_insn. */
4054 void
4055 asm_fprintf (FILE *file, const char *p, ...)
4057 char buf[10];
4058 char *q, c;
4059 #ifdef ASSEMBLER_DIALECT
4060 int dialect = 0;
4061 #endif
4062 va_list argptr;
4064 va_start (argptr, p);
4066 buf[0] = '%';
4068 while ((c = *p++))
4069 switch (c)
4071 #ifdef ASSEMBLER_DIALECT
4072 case '{':
4073 case '}':
4074 case '|':
4075 p = do_assembler_dialects (p, &dialect);
4076 break;
4077 #endif
4079 case '%':
4080 c = *p++;
4081 q = &buf[1];
4082 while (strchr ("-+ #0", c))
4084 *q++ = c;
4085 c = *p++;
4087 while (ISDIGIT (c) || c == '.')
4089 *q++ = c;
4090 c = *p++;
4092 switch (c)
4094 case '%':
4095 putc ('%', file);
4096 break;
4098 case 'd': case 'i': case 'u':
4099 case 'x': case 'X': case 'o':
4100 case 'c':
4101 *q++ = c;
4102 *q = 0;
4103 fprintf (file, buf, va_arg (argptr, int));
4104 break;
4106 case 'w':
4107 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4108 'o' cases, but we do not check for those cases. It
4109 means that the value is a HOST_WIDE_INT, which may be
4110 either `long' or `long long'. */
4111 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4112 q += strlen (HOST_WIDE_INT_PRINT);
4113 *q++ = *p++;
4114 *q = 0;
4115 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4116 break;
4118 case 'l':
4119 *q++ = c;
4120 #ifdef HAVE_LONG_LONG
4121 if (*p == 'l')
4123 *q++ = *p++;
4124 *q++ = *p++;
4125 *q = 0;
4126 fprintf (file, buf, va_arg (argptr, long long));
4128 else
4129 #endif
4131 *q++ = *p++;
4132 *q = 0;
4133 fprintf (file, buf, va_arg (argptr, long));
4136 break;
4138 case 's':
4139 *q++ = c;
4140 *q = 0;
4141 fprintf (file, buf, va_arg (argptr, char *));
4142 break;
4144 case 'O':
4145 #ifdef ASM_OUTPUT_OPCODE
4146 ASM_OUTPUT_OPCODE (asm_out_file, p);
4147 #endif
4148 break;
4150 case 'R':
4151 #ifdef REGISTER_PREFIX
4152 fprintf (file, "%s", REGISTER_PREFIX);
4153 #endif
4154 break;
4156 case 'I':
4157 #ifdef IMMEDIATE_PREFIX
4158 fprintf (file, "%s", IMMEDIATE_PREFIX);
4159 #endif
4160 break;
4162 case 'L':
4163 #ifdef LOCAL_LABEL_PREFIX
4164 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4165 #endif
4166 break;
4168 case 'U':
4169 fputs (user_label_prefix, file);
4170 break;
4172 #ifdef ASM_FPRINTF_EXTENSIONS
4173 /* Uppercase letters are reserved for general use by asm_fprintf
4174 and so are not available to target specific code. In order to
4175 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4176 they are defined here. As they get turned into real extensions
4177 to asm_fprintf they should be removed from this list. */
4178 case 'A': case 'B': case 'C': case 'D': case 'E':
4179 case 'F': case 'G': case 'H': case 'J': case 'K':
4180 case 'M': case 'N': case 'P': case 'Q': case 'S':
4181 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4182 break;
4184 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4185 #endif
4186 default:
4187 gcc_unreachable ();
4189 break;
4191 default:
4192 putc (c, file);
4194 va_end (argptr);
4197 /* Return nonzero if this function has no function calls. */
4200 leaf_function_p (void)
4202 rtx insn;
4204 if (crtl->profile || profile_arc_flag)
4205 return 0;
4207 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4209 if (CALL_P (insn)
4210 && ! SIBLING_CALL_P (insn))
4211 return 0;
4212 if (NONJUMP_INSN_P (insn)
4213 && GET_CODE (PATTERN (insn)) == SEQUENCE
4214 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4215 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4216 return 0;
4219 return 1;
4222 /* Return 1 if branch is a forward branch.
4223 Uses insn_shuid array, so it works only in the final pass. May be used by
4224 output templates to customary add branch prediction hints.
4227 final_forward_branch_p (rtx insn)
4229 int insn_id, label_id;
4231 gcc_assert (uid_shuid);
4232 insn_id = INSN_SHUID (insn);
4233 label_id = INSN_SHUID (JUMP_LABEL (insn));
4234 /* We've hit some insns that does not have id information available. */
4235 gcc_assert (insn_id && label_id);
4236 return insn_id < label_id;
4239 /* On some machines, a function with no call insns
4240 can run faster if it doesn't create its own register window.
4241 When output, the leaf function should use only the "output"
4242 registers. Ordinarily, the function would be compiled to use
4243 the "input" registers to find its arguments; it is a candidate
4244 for leaf treatment if it uses only the "input" registers.
4245 Leaf function treatment means renumbering so the function
4246 uses the "output" registers instead. */
4248 #ifdef LEAF_REGISTERS
4250 /* Return 1 if this function uses only the registers that can be
4251 safely renumbered. */
4254 only_leaf_regs_used (void)
4256 int i;
4257 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4259 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4260 if ((df_regs_ever_live_p (i) || global_regs[i])
4261 && ! permitted_reg_in_leaf_functions[i])
4262 return 0;
4264 if (crtl->uses_pic_offset_table
4265 && pic_offset_table_rtx != 0
4266 && REG_P (pic_offset_table_rtx)
4267 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4268 return 0;
4270 return 1;
4273 /* Scan all instructions and renumber all registers into those
4274 available in leaf functions. */
4276 static void
4277 leaf_renumber_regs (rtx first)
4279 rtx insn;
4281 /* Renumber only the actual patterns.
4282 The reg-notes can contain frame pointer refs,
4283 and renumbering them could crash, and should not be needed. */
4284 for (insn = first; insn; insn = NEXT_INSN (insn))
4285 if (INSN_P (insn))
4286 leaf_renumber_regs_insn (PATTERN (insn));
4289 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4290 available in leaf functions. */
4292 void
4293 leaf_renumber_regs_insn (rtx in_rtx)
4295 int i, j;
4296 const char *format_ptr;
4298 if (in_rtx == 0)
4299 return;
4301 /* Renumber all input-registers into output-registers.
4302 renumbered_regs would be 1 for an output-register;
4303 they */
4305 if (REG_P (in_rtx))
4307 int newreg;
4309 /* Don't renumber the same reg twice. */
4310 if (in_rtx->used)
4311 return;
4313 newreg = REGNO (in_rtx);
4314 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4315 to reach here as part of a REG_NOTE. */
4316 if (newreg >= FIRST_PSEUDO_REGISTER)
4318 in_rtx->used = 1;
4319 return;
4321 newreg = LEAF_REG_REMAP (newreg);
4322 gcc_assert (newreg >= 0);
4323 df_set_regs_ever_live (REGNO (in_rtx), false);
4324 df_set_regs_ever_live (newreg, true);
4325 SET_REGNO (in_rtx, newreg);
4326 in_rtx->used = 1;
4329 if (INSN_P (in_rtx))
4331 /* Inside a SEQUENCE, we find insns.
4332 Renumber just the patterns of these insns,
4333 just as we do for the top-level insns. */
4334 leaf_renumber_regs_insn (PATTERN (in_rtx));
4335 return;
4338 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4340 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4341 switch (*format_ptr++)
4343 case 'e':
4344 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4345 break;
4347 case 'E':
4348 if (NULL != XVEC (in_rtx, i))
4350 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4351 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4353 break;
4355 case 'S':
4356 case 's':
4357 case '0':
4358 case 'i':
4359 case 'w':
4360 case 'n':
4361 case 'u':
4362 break;
4364 default:
4365 gcc_unreachable ();
4368 #endif
4370 /* List the call graph profiled edges whise value is greater than
4371 PARAM_NOTE_CGRAPH_SECTION_EDGE_THRESHOLD in the
4372 "gnu.callgraph.text" section. */
4373 static void
4374 dump_cgraph_profiles (void)
4376 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4377 struct cgraph_edge *e;
4378 struct cgraph_node *callee;
4380 for (e = node->callees; e != NULL; e = e->next_callee)
4382 if (e->count <= PARAM_VALUE (PARAM_GNU_CGRAPH_SECTION_EDGE_THRESHOLD))
4383 continue;
4384 callee = e->callee;
4385 fprintf (asm_out_file, "\t.string \"%s\"\n",
4386 IDENTIFIER_POINTER (decl_assembler_name (callee->symbol.decl)));
4387 fprintf (asm_out_file, "\t.string \"" HOST_WIDEST_INT_PRINT_DEC "\"\n",
4388 e->count);
4392 /* Iterate through the basic blocks in DECL and get the max count.
4393 If COLD is true, find the max count of the cold part of the split. */
4394 static gcov_type
4395 get_max_count (tree decl, bool cold)
4397 basic_block bb;
4398 gcov_type max_count = cold ? 0 :(cgraph_get_node (decl))->count;
4400 FOR_EACH_BB (bb)
4402 if (cold && BB_PARTITION (bb) != BB_COLD_PARTITION)
4403 continue;
4404 if (bb->count > max_count)
4405 max_count = bb->count;
4407 return max_count;
4410 /* Turn the RTL into assembly. */
4411 static unsigned int
4412 rest_of_handle_final (void)
4414 rtx x;
4415 const char *fnname;
4416 char *profile_fnname;
4417 unsigned int flags;
4419 /* Get the function's name, as described by its RTL. This may be
4420 different from the DECL_NAME name used in the source file. */
4422 x = DECL_RTL (current_function_decl);
4423 gcc_assert (MEM_P (x));
4424 x = XEXP (x, 0);
4425 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4426 fnname = XSTR (x, 0);
4428 has_cold_section_p = false;
4430 assemble_start_function (current_function_decl, fnname);
4431 final_start_function (get_insns (), asm_out_file, optimize);
4432 final (get_insns (), asm_out_file, optimize);
4433 final_end_function ();
4435 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4436 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4437 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4438 output_function_exception_table (fnname);
4440 assemble_end_function (current_function_decl, fnname);
4442 user_defined_section_attribute = false;
4444 /* Free up reg info memory. */
4445 free_reg_info ();
4447 if (! quiet_flag)
4448 fflush (asm_out_file);
4450 /* Write DBX symbols if requested. */
4452 /* Note that for those inline functions where we don't initially
4453 know for certain that we will be generating an out-of-line copy,
4454 the first invocation of this routine (rest_of_compilation) will
4455 skip over this code by doing a `goto exit_rest_of_compilation;'.
4456 Later on, wrapup_global_declarations will (indirectly) call
4457 rest_of_compilation again for those inline functions that need
4458 to have out-of-line copies generated. During that call, we
4459 *will* be routed past here. */
4461 timevar_push (TV_SYMOUT);
4462 if (!DECL_IGNORED_P (current_function_decl))
4463 debug_hooks->function_decl (current_function_decl);
4464 timevar_pop (TV_SYMOUT);
4466 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4467 DECL_INITIAL (current_function_decl) = error_mark_node;
4469 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4470 && targetm.have_ctors_dtors)
4471 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4472 decl_init_priority_lookup
4473 (current_function_decl));
4474 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4475 && targetm.have_ctors_dtors)
4476 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4477 decl_fini_priority_lookup
4478 (current_function_decl));
4480 /* With -fcallgraph-profiles-sections and -freorder-functions=,
4481 add ".gnu.callgraph.text" section for storing profiling information. */
4482 if ((flag_reorder_functions > 1)
4483 && (flag_profile_use || flag_auto_profile)
4484 && cgraph_get_node (current_function_decl) != NULL
4485 && ((cgraph_get_node (current_function_decl))->callees != NULL
4486 || (cgraph_get_node (current_function_decl))->count > 0))
4488 flags = SECTION_DEBUG | SECTION_EXCLUDE;
4489 asprintf (&profile_fnname, ".gnu.callgraph.text.%s", fnname);
4490 switch_to_section (get_section (profile_fnname, flags, NULL));
4491 fprintf (asm_out_file, "\t.string \"Function %s\"\n", fnname);
4492 fprintf (asm_out_file, "\t.string \"Weight "
4493 HOST_WIDEST_INT_PRINT_DEC
4495 HOST_WIDEST_INT_PRINT_DEC
4496 "\"\n",
4497 (cgraph_get_node (current_function_decl))->count,
4498 get_max_count (current_function_decl, false));
4499 /* If this function is split into a cold section, record that weight
4500 here. */
4501 if (has_cold_section_p)
4502 fprintf (asm_out_file, "\t.string \"ColdWeight "
4503 HOST_WIDEST_INT_PRINT_DEC
4504 "\"\n",
4505 get_max_count (current_function_decl, true));
4506 dump_cgraph_profiles ();
4507 free (profile_fnname);
4509 return 0;
4512 struct rtl_opt_pass pass_final =
4515 RTL_PASS,
4516 "final", /* name */
4517 OPTGROUP_NONE, /* optinfo_flags */
4518 NULL, /* gate */
4519 rest_of_handle_final, /* execute */
4520 NULL, /* sub */
4521 NULL, /* next */
4522 0, /* static_pass_number */
4523 TV_FINAL, /* tv_id */
4524 0, /* properties_required */
4525 0, /* properties_provided */
4526 0, /* properties_destroyed */
4527 0, /* todo_flags_start */
4528 TODO_ggc_collect /* todo_flags_finish */
4533 static unsigned int
4534 rest_of_handle_shorten_branches (void)
4536 /* Shorten branches. */
4537 shorten_branches (get_insns ());
4538 return 0;
4541 struct rtl_opt_pass pass_shorten_branches =
4544 RTL_PASS,
4545 "shorten", /* name */
4546 OPTGROUP_NONE, /* optinfo_flags */
4547 NULL, /* gate */
4548 rest_of_handle_shorten_branches, /* execute */
4549 NULL, /* sub */
4550 NULL, /* next */
4551 0, /* static_pass_number */
4552 TV_SHORTEN_BRANCH, /* tv_id */
4553 0, /* properties_required */
4554 0, /* properties_provided */
4555 0, /* properties_destroyed */
4556 0, /* todo_flags_start */
4557 0 /* todo_flags_finish */
4562 static unsigned int
4563 rest_of_clean_state (void)
4565 rtx insn, next;
4566 FILE *final_output = NULL;
4567 int save_unnumbered = flag_dump_unnumbered;
4568 int save_noaddr = flag_dump_noaddr;
4570 if (flag_dump_final_insns)
4572 final_output = fopen (flag_dump_final_insns, "a");
4573 if (!final_output)
4575 error ("could not open final insn dump file %qs: %m",
4576 flag_dump_final_insns);
4577 flag_dump_final_insns = NULL;
4579 else
4581 flag_dump_noaddr = flag_dump_unnumbered = 1;
4582 if (flag_compare_debug_opt || flag_compare_debug)
4583 dump_flags |= TDF_NOUID;
4584 dump_function_header (final_output, current_function_decl,
4585 dump_flags);
4586 final_insns_dump_p = true;
4588 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4589 if (LABEL_P (insn))
4590 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4591 else
4593 if (NOTE_P (insn))
4594 set_block_for_insn (insn, NULL);
4595 INSN_UID (insn) = 0;
4600 /* It is very important to decompose the RTL instruction chain here:
4601 debug information keeps pointing into CODE_LABEL insns inside the function
4602 body. If these remain pointing to the other insns, we end up preserving
4603 whole RTL chain and attached detailed debug info in memory. */
4604 for (insn = get_insns (); insn; insn = next)
4606 next = NEXT_INSN (insn);
4607 NEXT_INSN (insn) = NULL;
4608 PREV_INSN (insn) = NULL;
4610 if (final_output
4611 && (!NOTE_P (insn) ||
4612 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4613 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4614 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4615 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4616 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4617 print_rtl_single (final_output, insn);
4620 if (final_output)
4622 flag_dump_noaddr = save_noaddr;
4623 flag_dump_unnumbered = save_unnumbered;
4624 final_insns_dump_p = false;
4626 if (fclose (final_output))
4628 error ("could not close final insn dump file %qs: %m",
4629 flag_dump_final_insns);
4630 flag_dump_final_insns = NULL;
4634 /* In case the function was not output,
4635 don't leave any temporary anonymous types
4636 queued up for sdb output. */
4637 #ifdef SDB_DEBUGGING_INFO
4638 if (write_symbols == SDB_DEBUG)
4639 sdbout_types (NULL_TREE);
4640 #endif
4642 flag_rerun_cse_after_global_opts = 0;
4643 reload_completed = 0;
4644 epilogue_completed = 0;
4645 #ifdef STACK_REGS
4646 regstack_completed = 0;
4647 #endif
4649 /* Clear out the insn_length contents now that they are no
4650 longer valid. */
4651 init_insn_lengths ();
4653 /* Show no temporary slots allocated. */
4654 init_temp_slots ();
4656 free_bb_for_insn ();
4658 delete_tree_ssa ();
4660 /* We can reduce stack alignment on call site only when we are sure that
4661 the function body just produced will be actually used in the final
4662 executable. */
4663 if (decl_binds_to_current_def_p (current_function_decl))
4665 unsigned int pref = crtl->preferred_stack_boundary;
4666 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4667 pref = crtl->stack_alignment_needed;
4668 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4669 = pref;
4672 /* Make sure volatile mem refs aren't considered valid operands for
4673 arithmetic insns. We must call this here if this is a nested inline
4674 function, since the above code leaves us in the init_recog state,
4675 and the function context push/pop code does not save/restore volatile_ok.
4677 ??? Maybe it isn't necessary for expand_start_function to call this
4678 anymore if we do it here? */
4680 init_recog_no_volatile ();
4682 /* We're done with this function. Free up memory if we can. */
4683 free_after_parsing (cfun);
4684 free_after_compilation (cfun);
4685 return 0;
4688 struct rtl_opt_pass pass_clean_state =
4691 RTL_PASS,
4692 "*clean_state", /* name */
4693 OPTGROUP_NONE, /* optinfo_flags */
4694 NULL, /* gate */
4695 rest_of_clean_state, /* execute */
4696 NULL, /* sub */
4697 NULL, /* next */
4698 0, /* static_pass_number */
4699 TV_FINAL, /* tv_id */
4700 0, /* properties_required */
4701 0, /* properties_provided */
4702 PROP_rtl, /* properties_destroyed */
4703 0, /* todo_flags_start */
4704 0 /* todo_flags_finish */