* c-c++-common/pr60226.c: Expect maximum object file alignment
[official-gcc.git] / gcc / final.c
blob5b04311218e29432dd29bebec06438a92abbb066
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "tm_p.h"
56 #include "insn-config.h"
57 #include "regs.h"
58 #include "emit-rtl.h"
59 #include "recog.h"
60 #include "cgraph.h"
61 #include "tree-pretty-print.h" /* for dump_function_header */
62 #include "varasm.h"
63 #include "insn-attr.h"
64 #include "conditions.h"
65 #include "flags.h"
66 #include "output.h"
67 #include "except.h"
68 #include "rtl-error.h"
69 #include "toplev.h" /* exact_log2, floor_log2 */
70 #include "reload.h"
71 #include "intl.h"
72 #include "cfgrtl.h"
73 #include "debug.h"
74 #include "tree-pass.h"
75 #include "tree-ssa.h"
76 #include "cfgloop.h"
77 #include "params.h"
78 #include "asan.h"
79 #include "rtl-iter.h"
80 #include "print-rtl.h"
82 #ifdef XCOFF_DEBUGGING_INFO
83 #include "xcoffout.h" /* Needed for external data declarations. */
84 #endif
86 #include "dwarf2out.h"
88 #ifdef DBX_DEBUGGING_INFO
89 #include "dbxout.h"
90 #endif
92 #include "sdbout.h"
94 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
95 So define a null default for it to save conditionalization later. */
96 #ifndef CC_STATUS_INIT
97 #define CC_STATUS_INIT
98 #endif
100 /* Is the given character a logical line separator for the assembler? */
101 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
102 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
103 #endif
105 #ifndef JUMP_TABLES_IN_TEXT_SECTION
106 #define JUMP_TABLES_IN_TEXT_SECTION 0
107 #endif
109 /* Bitflags used by final_scan_insn. */
110 #define SEEN_NOTE 1
111 #define SEEN_EMITTED 2
113 /* Last insn processed by final_scan_insn. */
114 static rtx_insn *debug_insn;
115 rtx_insn *current_output_insn;
117 /* Line number of last NOTE. */
118 static int last_linenum;
120 /* Last discriminator written to assembly. */
121 static int last_discriminator;
123 /* Discriminator of current block. */
124 static int discriminator;
126 /* Highest line number in current block. */
127 static int high_block_linenum;
129 /* Likewise for function. */
130 static int high_function_linenum;
132 /* Filename of last NOTE. */
133 static const char *last_filename;
135 /* Override filename and line number. */
136 static const char *override_filename;
137 static int override_linenum;
139 /* Whether to force emission of a line note before the next insn. */
140 static bool force_source_line = false;
142 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
144 /* Nonzero while outputting an `asm' with operands.
145 This means that inconsistencies are the user's fault, so don't die.
146 The precise value is the insn being output, to pass to error_for_asm. */
147 const rtx_insn *this_is_asm_operands;
149 /* Number of operands of this insn, for an `asm' with operands. */
150 static unsigned int insn_noperands;
152 /* Compare optimization flag. */
154 static rtx last_ignored_compare = 0;
156 /* Assign a unique number to each insn that is output.
157 This can be used to generate unique local labels. */
159 static int insn_counter = 0;
161 /* This variable contains machine-dependent flags (defined in tm.h)
162 set and examined by output routines
163 that describe how to interpret the condition codes properly. */
165 CC_STATUS cc_status;
167 /* During output of an insn, this contains a copy of cc_status
168 from before the insn. */
170 CC_STATUS cc_prev_status;
172 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
174 static int block_depth;
176 /* Nonzero if have enabled APP processing of our assembler output. */
178 static int app_on;
180 /* If we are outputting an insn sequence, this contains the sequence rtx.
181 Zero otherwise. */
183 rtx_sequence *final_sequence;
185 #ifdef ASSEMBLER_DIALECT
187 /* Number of the assembler dialect to use, starting at 0. */
188 static int dialect_number;
189 #endif
191 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
192 rtx current_insn_predicate;
194 /* True if printing into -fdump-final-insns= dump. */
195 bool final_insns_dump_p;
197 /* True if profile_function should be called, but hasn't been called yet. */
198 static bool need_profile_function;
200 static int asm_insn_count (rtx);
201 static void profile_function (FILE *);
202 static void profile_after_prologue (FILE *);
203 static bool notice_source_line (rtx_insn *, bool *);
204 static rtx walk_alter_subreg (rtx *, bool *);
205 static void output_asm_name (void);
206 static void output_alternate_entry_point (FILE *, rtx_insn *);
207 static tree get_mem_expr_from_op (rtx, int *);
208 static void output_asm_operand_names (rtx *, int *, int);
209 #ifdef LEAF_REGISTERS
210 static void leaf_renumber_regs (rtx_insn *);
211 #endif
212 #if HAVE_cc0
213 static int alter_cond (rtx);
214 #endif
215 #ifndef ADDR_VEC_ALIGN
216 static int final_addr_vec_align (rtx);
217 #endif
218 static int align_fuzz (rtx, rtx, int, unsigned);
219 static void collect_fn_hard_reg_usage (void);
220 static tree get_call_fndecl (rtx_insn *);
222 /* Initialize data in final at the beginning of a compilation. */
224 void
225 init_final (const char *filename ATTRIBUTE_UNUSED)
227 app_on = 0;
228 final_sequence = 0;
230 #ifdef ASSEMBLER_DIALECT
231 dialect_number = ASSEMBLER_DIALECT;
232 #endif
235 /* Default target function prologue and epilogue assembler output.
237 If not overridden for epilogue code, then the function body itself
238 contains return instructions wherever needed. */
239 void
240 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
245 void
246 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
247 tree decl ATTRIBUTE_UNUSED,
248 bool new_is_cold ATTRIBUTE_UNUSED)
252 /* Default target hook that outputs nothing to a stream. */
253 void
254 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
258 /* Enable APP processing of subsequent output.
259 Used before the output from an `asm' statement. */
261 void
262 app_enable (void)
264 if (! app_on)
266 fputs (ASM_APP_ON, asm_out_file);
267 app_on = 1;
271 /* Disable APP processing of subsequent output.
272 Called from varasm.c before most kinds of output. */
274 void
275 app_disable (void)
277 if (app_on)
279 fputs (ASM_APP_OFF, asm_out_file);
280 app_on = 0;
284 /* Return the number of slots filled in the current
285 delayed branch sequence (we don't count the insn needing the
286 delay slot). Zero if not in a delayed branch sequence. */
289 dbr_sequence_length (void)
291 if (final_sequence != 0)
292 return XVECLEN (final_sequence, 0) - 1;
293 else
294 return 0;
297 /* The next two pages contain routines used to compute the length of an insn
298 and to shorten branches. */
300 /* Arrays for insn lengths, and addresses. The latter is referenced by
301 `insn_current_length'. */
303 static int *insn_lengths;
305 vec<int> insn_addresses_;
307 /* Max uid for which the above arrays are valid. */
308 static int insn_lengths_max_uid;
310 /* Address of insn being processed. Used by `insn_current_length'. */
311 int insn_current_address;
313 /* Address of insn being processed in previous iteration. */
314 int insn_last_address;
316 /* known invariant alignment of insn being processed. */
317 int insn_current_align;
319 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
320 gives the next following alignment insn that increases the known
321 alignment, or NULL_RTX if there is no such insn.
322 For any alignment obtained this way, we can again index uid_align with
323 its uid to obtain the next following align that in turn increases the
324 alignment, till we reach NULL_RTX; the sequence obtained this way
325 for each insn we'll call the alignment chain of this insn in the following
326 comments. */
328 struct label_alignment
330 short alignment;
331 short max_skip;
334 static rtx *uid_align;
335 static int *uid_shuid;
336 static struct label_alignment *label_align;
338 /* Indicate that branch shortening hasn't yet been done. */
340 void
341 init_insn_lengths (void)
343 if (uid_shuid)
345 free (uid_shuid);
346 uid_shuid = 0;
348 if (insn_lengths)
350 free (insn_lengths);
351 insn_lengths = 0;
352 insn_lengths_max_uid = 0;
354 if (HAVE_ATTR_length)
355 INSN_ADDRESSES_FREE ();
356 if (uid_align)
358 free (uid_align);
359 uid_align = 0;
363 /* Obtain the current length of an insn. If branch shortening has been done,
364 get its actual length. Otherwise, use FALLBACK_FN to calculate the
365 length. */
366 static int
367 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
369 rtx body;
370 int i;
371 int length = 0;
373 if (!HAVE_ATTR_length)
374 return 0;
376 if (insn_lengths_max_uid > INSN_UID (insn))
377 return insn_lengths[INSN_UID (insn)];
378 else
379 switch (GET_CODE (insn))
381 case NOTE:
382 case BARRIER:
383 case CODE_LABEL:
384 case DEBUG_INSN:
385 return 0;
387 case CALL_INSN:
388 case JUMP_INSN:
389 length = fallback_fn (insn);
390 break;
392 case INSN:
393 body = PATTERN (insn);
394 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
395 return 0;
397 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
398 length = asm_insn_count (body) * fallback_fn (insn);
399 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
400 for (i = 0; i < seq->len (); i++)
401 length += get_attr_length_1 (seq->insn (i), fallback_fn);
402 else
403 length = fallback_fn (insn);
404 break;
406 default:
407 break;
410 #ifdef ADJUST_INSN_LENGTH
411 ADJUST_INSN_LENGTH (insn, length);
412 #endif
413 return length;
416 /* Obtain the current length of an insn. If branch shortening has been done,
417 get its actual length. Otherwise, get its maximum length. */
419 get_attr_length (rtx_insn *insn)
421 return get_attr_length_1 (insn, insn_default_length);
424 /* Obtain the current length of an insn. If branch shortening has been done,
425 get its actual length. Otherwise, get its minimum length. */
427 get_attr_min_length (rtx_insn *insn)
429 return get_attr_length_1 (insn, insn_min_length);
432 /* Code to handle alignment inside shorten_branches. */
434 /* Here is an explanation how the algorithm in align_fuzz can give
435 proper results:
437 Call a sequence of instructions beginning with alignment point X
438 and continuing until the next alignment point `block X'. When `X'
439 is used in an expression, it means the alignment value of the
440 alignment point.
442 Call the distance between the start of the first insn of block X, and
443 the end of the last insn of block X `IX', for the `inner size of X'.
444 This is clearly the sum of the instruction lengths.
446 Likewise with the next alignment-delimited block following X, which we
447 shall call block Y.
449 Call the distance between the start of the first insn of block X, and
450 the start of the first insn of block Y `OX', for the `outer size of X'.
452 The estimated padding is then OX - IX.
454 OX can be safely estimated as
456 if (X >= Y)
457 OX = round_up(IX, Y)
458 else
459 OX = round_up(IX, X) + Y - X
461 Clearly est(IX) >= real(IX), because that only depends on the
462 instruction lengths, and those being overestimated is a given.
464 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
465 we needn't worry about that when thinking about OX.
467 When X >= Y, the alignment provided by Y adds no uncertainty factor
468 for branch ranges starting before X, so we can just round what we have.
469 But when X < Y, we don't know anything about the, so to speak,
470 `middle bits', so we have to assume the worst when aligning up from an
471 address mod X to one mod Y, which is Y - X. */
473 #ifndef LABEL_ALIGN
474 #define LABEL_ALIGN(LABEL) align_labels_log
475 #endif
477 #ifndef LOOP_ALIGN
478 #define LOOP_ALIGN(LABEL) align_loops_log
479 #endif
481 #ifndef LABEL_ALIGN_AFTER_BARRIER
482 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
483 #endif
485 #ifndef JUMP_ALIGN
486 #define JUMP_ALIGN(LABEL) align_jumps_log
487 #endif
490 default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
492 return 0;
496 default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
498 return align_loops_max_skip;
502 default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
504 return align_labels_max_skip;
508 default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
510 return align_jumps_max_skip;
513 #ifndef ADDR_VEC_ALIGN
514 static int
515 final_addr_vec_align (rtx addr_vec)
517 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
519 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
520 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
521 return exact_log2 (align);
525 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
526 #endif
528 #ifndef INSN_LENGTH_ALIGNMENT
529 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
530 #endif
532 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
534 static int min_labelno, max_labelno;
536 #define LABEL_TO_ALIGNMENT(LABEL) \
537 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
539 #define LABEL_TO_MAX_SKIP(LABEL) \
540 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
542 /* For the benefit of port specific code do this also as a function. */
545 label_to_alignment (rtx label)
547 if (CODE_LABEL_NUMBER (label) <= max_labelno)
548 return LABEL_TO_ALIGNMENT (label);
549 return 0;
553 label_to_max_skip (rtx label)
555 if (CODE_LABEL_NUMBER (label) <= max_labelno)
556 return LABEL_TO_MAX_SKIP (label);
557 return 0;
560 /* The differences in addresses
561 between a branch and its target might grow or shrink depending on
562 the alignment the start insn of the range (the branch for a forward
563 branch or the label for a backward branch) starts out on; if these
564 differences are used naively, they can even oscillate infinitely.
565 We therefore want to compute a 'worst case' address difference that
566 is independent of the alignment the start insn of the range end
567 up on, and that is at least as large as the actual difference.
568 The function align_fuzz calculates the amount we have to add to the
569 naively computed difference, by traversing the part of the alignment
570 chain of the start insn of the range that is in front of the end insn
571 of the range, and considering for each alignment the maximum amount
572 that it might contribute to a size increase.
574 For casesi tables, we also want to know worst case minimum amounts of
575 address difference, in case a machine description wants to introduce
576 some common offset that is added to all offsets in a table.
577 For this purpose, align_fuzz with a growth argument of 0 computes the
578 appropriate adjustment. */
580 /* Compute the maximum delta by which the difference of the addresses of
581 START and END might grow / shrink due to a different address for start
582 which changes the size of alignment insns between START and END.
583 KNOWN_ALIGN_LOG is the alignment known for START.
584 GROWTH should be ~0 if the objective is to compute potential code size
585 increase, and 0 if the objective is to compute potential shrink.
586 The return value is undefined for any other value of GROWTH. */
588 static int
589 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
591 int uid = INSN_UID (start);
592 rtx align_label;
593 int known_align = 1 << known_align_log;
594 int end_shuid = INSN_SHUID (end);
595 int fuzz = 0;
597 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
599 int align_addr, new_align;
601 uid = INSN_UID (align_label);
602 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
603 if (uid_shuid[uid] > end_shuid)
604 break;
605 known_align_log = LABEL_TO_ALIGNMENT (align_label);
606 new_align = 1 << known_align_log;
607 if (new_align < known_align)
608 continue;
609 fuzz += (-align_addr ^ growth) & (new_align - known_align);
610 known_align = new_align;
612 return fuzz;
615 /* Compute a worst-case reference address of a branch so that it
616 can be safely used in the presence of aligned labels. Since the
617 size of the branch itself is unknown, the size of the branch is
618 not included in the range. I.e. for a forward branch, the reference
619 address is the end address of the branch as known from the previous
620 branch shortening pass, minus a value to account for possible size
621 increase due to alignment. For a backward branch, it is the start
622 address of the branch as known from the current pass, plus a value
623 to account for possible size increase due to alignment.
624 NB.: Therefore, the maximum offset allowed for backward branches needs
625 to exclude the branch size. */
628 insn_current_reference_address (rtx_insn *branch)
630 rtx dest;
631 int seq_uid;
633 if (! INSN_ADDRESSES_SET_P ())
634 return 0;
636 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
637 seq_uid = INSN_UID (seq);
638 if (!JUMP_P (branch))
639 /* This can happen for example on the PA; the objective is to know the
640 offset to address something in front of the start of the function.
641 Thus, we can treat it like a backward branch.
642 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
643 any alignment we'd encounter, so we skip the call to align_fuzz. */
644 return insn_current_address;
645 dest = JUMP_LABEL (branch);
647 /* BRANCH has no proper alignment chain set, so use SEQ.
648 BRANCH also has no INSN_SHUID. */
649 if (INSN_SHUID (seq) < INSN_SHUID (dest))
651 /* Forward branch. */
652 return (insn_last_address + insn_lengths[seq_uid]
653 - align_fuzz (seq, dest, length_unit_log, ~0));
655 else
657 /* Backward branch. */
658 return (insn_current_address
659 + align_fuzz (dest, seq, length_unit_log, ~0));
663 /* Compute branch alignments based on frequency information in the
664 CFG. */
666 unsigned int
667 compute_alignments (void)
669 int log, max_skip, max_log;
670 basic_block bb;
671 int freq_max = 0;
672 int freq_threshold = 0;
674 if (label_align)
676 free (label_align);
677 label_align = 0;
680 max_labelno = max_label_num ();
681 min_labelno = get_first_label_num ();
682 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
684 /* If not optimizing or optimizing for size, don't assign any alignments. */
685 if (! optimize || optimize_function_for_size_p (cfun))
686 return 0;
688 if (dump_file)
690 dump_reg_info (dump_file);
691 dump_flow_info (dump_file, TDF_DETAILS);
692 flow_loops_dump (dump_file, NULL, 1);
694 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
695 FOR_EACH_BB_FN (bb, cfun)
696 if (bb->frequency > freq_max)
697 freq_max = bb->frequency;
698 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
700 if (dump_file)
701 fprintf (dump_file, "freq_max: %i\n",freq_max);
702 FOR_EACH_BB_FN (bb, cfun)
704 rtx_insn *label = BB_HEAD (bb);
705 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
706 edge e;
707 edge_iterator ei;
709 if (!LABEL_P (label)
710 || optimize_bb_for_size_p (bb))
712 if (dump_file)
713 fprintf (dump_file,
714 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
715 bb->index, bb->frequency, bb->loop_father->num,
716 bb_loop_depth (bb));
717 continue;
719 max_log = LABEL_ALIGN (label);
720 max_skip = targetm.asm_out.label_align_max_skip (label);
722 FOR_EACH_EDGE (e, ei, bb->preds)
724 if (e->flags & EDGE_FALLTHRU)
725 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
726 else
727 branch_frequency += EDGE_FREQUENCY (e);
729 if (dump_file)
731 fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
732 " %2i fall %4i branch %4i",
733 bb->index, bb->frequency, bb->loop_father->num,
734 bb_loop_depth (bb),
735 fallthru_frequency, branch_frequency);
736 if (!bb->loop_father->inner && bb->loop_father->num)
737 fprintf (dump_file, " inner_loop");
738 if (bb->loop_father->header == bb)
739 fprintf (dump_file, " loop_header");
740 fprintf (dump_file, "\n");
743 /* There are two purposes to align block with no fallthru incoming edge:
744 1) to avoid fetch stalls when branch destination is near cache boundary
745 2) to improve cache efficiency in case the previous block is not executed
746 (so it does not need to be in the cache).
748 We to catch first case, we align frequently executed blocks.
749 To catch the second, we align blocks that are executed more frequently
750 than the predecessor and the predecessor is likely to not be executed
751 when function is called. */
753 if (!has_fallthru
754 && (branch_frequency > freq_threshold
755 || (bb->frequency > bb->prev_bb->frequency * 10
756 && (bb->prev_bb->frequency
757 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
759 log = JUMP_ALIGN (label);
760 if (dump_file)
761 fprintf (dump_file, " jump alignment added.\n");
762 if (max_log < log)
764 max_log = log;
765 max_skip = targetm.asm_out.jump_align_max_skip (label);
768 /* In case block is frequent and reached mostly by non-fallthru edge,
769 align it. It is most likely a first block of loop. */
770 if (has_fallthru
771 && !(single_succ_p (bb)
772 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
773 && optimize_bb_for_speed_p (bb)
774 && branch_frequency + fallthru_frequency > freq_threshold
775 && (branch_frequency
776 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
778 log = LOOP_ALIGN (label);
779 if (dump_file)
780 fprintf (dump_file, " internal loop alignment added.\n");
781 if (max_log < log)
783 max_log = log;
784 max_skip = targetm.asm_out.loop_align_max_skip (label);
787 LABEL_TO_ALIGNMENT (label) = max_log;
788 LABEL_TO_MAX_SKIP (label) = max_skip;
791 loop_optimizer_finalize ();
792 free_dominance_info (CDI_DOMINATORS);
793 return 0;
796 /* Grow the LABEL_ALIGN array after new labels are created. */
798 static void
799 grow_label_align (void)
801 int old = max_labelno;
802 int n_labels;
803 int n_old_labels;
805 max_labelno = max_label_num ();
807 n_labels = max_labelno - min_labelno + 1;
808 n_old_labels = old - min_labelno + 1;
810 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
812 /* Range of labels grows monotonically in the function. Failing here
813 means that the initialization of array got lost. */
814 gcc_assert (n_old_labels <= n_labels);
816 memset (label_align + n_old_labels, 0,
817 (n_labels - n_old_labels) * sizeof (struct label_alignment));
820 /* Update the already computed alignment information. LABEL_PAIRS is a vector
821 made up of pairs of labels for which the alignment information of the first
822 element will be copied from that of the second element. */
824 void
825 update_alignments (vec<rtx> &label_pairs)
827 unsigned int i = 0;
828 rtx iter, label = NULL_RTX;
830 if (max_labelno != max_label_num ())
831 grow_label_align ();
833 FOR_EACH_VEC_ELT (label_pairs, i, iter)
834 if (i & 1)
836 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
837 LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
839 else
840 label = iter;
843 namespace {
845 const pass_data pass_data_compute_alignments =
847 RTL_PASS, /* type */
848 "alignments", /* name */
849 OPTGROUP_NONE, /* optinfo_flags */
850 TV_NONE, /* tv_id */
851 0, /* properties_required */
852 0, /* properties_provided */
853 0, /* properties_destroyed */
854 0, /* todo_flags_start */
855 0, /* todo_flags_finish */
858 class pass_compute_alignments : public rtl_opt_pass
860 public:
861 pass_compute_alignments (gcc::context *ctxt)
862 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
865 /* opt_pass methods: */
866 virtual unsigned int execute (function *) { return compute_alignments (); }
868 }; // class pass_compute_alignments
870 } // anon namespace
872 rtl_opt_pass *
873 make_pass_compute_alignments (gcc::context *ctxt)
875 return new pass_compute_alignments (ctxt);
879 /* Make a pass over all insns and compute their actual lengths by shortening
880 any branches of variable length if possible. */
882 /* shorten_branches might be called multiple times: for example, the SH
883 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
884 In order to do this, it needs proper length information, which it obtains
885 by calling shorten_branches. This cannot be collapsed with
886 shorten_branches itself into a single pass unless we also want to integrate
887 reorg.c, since the branch splitting exposes new instructions with delay
888 slots. */
890 void
891 shorten_branches (rtx_insn *first)
893 rtx_insn *insn;
894 int max_uid;
895 int i;
896 int max_log;
897 int max_skip;
898 #define MAX_CODE_ALIGN 16
899 rtx_insn *seq;
900 int something_changed = 1;
901 char *varying_length;
902 rtx body;
903 int uid;
904 rtx align_tab[MAX_CODE_ALIGN];
906 /* Compute maximum UID and allocate label_align / uid_shuid. */
907 max_uid = get_max_uid ();
909 /* Free uid_shuid before reallocating it. */
910 free (uid_shuid);
912 uid_shuid = XNEWVEC (int, max_uid);
914 if (max_labelno != max_label_num ())
915 grow_label_align ();
917 /* Initialize label_align and set up uid_shuid to be strictly
918 monotonically rising with insn order. */
919 /* We use max_log here to keep track of the maximum alignment we want to
920 impose on the next CODE_LABEL (or the current one if we are processing
921 the CODE_LABEL itself). */
923 max_log = 0;
924 max_skip = 0;
926 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
928 int log;
930 INSN_SHUID (insn) = i++;
931 if (INSN_P (insn))
932 continue;
934 if (LABEL_P (insn))
936 rtx_insn *next;
937 bool next_is_jumptable;
939 /* Merge in alignments computed by compute_alignments. */
940 log = LABEL_TO_ALIGNMENT (insn);
941 if (max_log < log)
943 max_log = log;
944 max_skip = LABEL_TO_MAX_SKIP (insn);
947 next = next_nonnote_insn (insn);
948 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
949 if (!next_is_jumptable)
951 log = LABEL_ALIGN (insn);
952 if (max_log < log)
954 max_log = log;
955 max_skip = targetm.asm_out.label_align_max_skip (insn);
958 /* ADDR_VECs only take room if read-only data goes into the text
959 section. */
960 if ((JUMP_TABLES_IN_TEXT_SECTION
961 || readonly_data_section == text_section)
962 && next_is_jumptable)
964 log = ADDR_VEC_ALIGN (next);
965 if (max_log < log)
967 max_log = log;
968 max_skip = targetm.asm_out.label_align_max_skip (insn);
971 LABEL_TO_ALIGNMENT (insn) = max_log;
972 LABEL_TO_MAX_SKIP (insn) = max_skip;
973 max_log = 0;
974 max_skip = 0;
976 else if (BARRIER_P (insn))
978 rtx_insn *label;
980 for (label = insn; label && ! INSN_P (label);
981 label = NEXT_INSN (label))
982 if (LABEL_P (label))
984 log = LABEL_ALIGN_AFTER_BARRIER (insn);
985 if (max_log < log)
987 max_log = log;
988 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
990 break;
994 if (!HAVE_ATTR_length)
995 return;
997 /* Allocate the rest of the arrays. */
998 insn_lengths = XNEWVEC (int, max_uid);
999 insn_lengths_max_uid = max_uid;
1000 /* Syntax errors can lead to labels being outside of the main insn stream.
1001 Initialize insn_addresses, so that we get reproducible results. */
1002 INSN_ADDRESSES_ALLOC (max_uid);
1004 varying_length = XCNEWVEC (char, max_uid);
1006 /* Initialize uid_align. We scan instructions
1007 from end to start, and keep in align_tab[n] the last seen insn
1008 that does an alignment of at least n+1, i.e. the successor
1009 in the alignment chain for an insn that does / has a known
1010 alignment of n. */
1011 uid_align = XCNEWVEC (rtx, max_uid);
1013 for (i = MAX_CODE_ALIGN; --i >= 0;)
1014 align_tab[i] = NULL_RTX;
1015 seq = get_last_insn ();
1016 for (; seq; seq = PREV_INSN (seq))
1018 int uid = INSN_UID (seq);
1019 int log;
1020 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1021 uid_align[uid] = align_tab[0];
1022 if (log)
1024 /* Found an alignment label. */
1025 uid_align[uid] = align_tab[log];
1026 for (i = log - 1; i >= 0; i--)
1027 align_tab[i] = seq;
1031 /* When optimizing, we start assuming minimum length, and keep increasing
1032 lengths as we find the need for this, till nothing changes.
1033 When not optimizing, we start assuming maximum lengths, and
1034 do a single pass to update the lengths. */
1035 bool increasing = optimize != 0;
1037 #ifdef CASE_VECTOR_SHORTEN_MODE
1038 if (optimize)
1040 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1041 label fields. */
1043 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1044 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1045 int rel;
1047 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1049 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1050 int len, i, min, max, insn_shuid;
1051 int min_align;
1052 addr_diff_vec_flags flags;
1054 if (! JUMP_TABLE_DATA_P (insn)
1055 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1056 continue;
1057 pat = PATTERN (insn);
1058 len = XVECLEN (pat, 1);
1059 gcc_assert (len > 0);
1060 min_align = MAX_CODE_ALIGN;
1061 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1063 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1064 int shuid = INSN_SHUID (lab);
1065 if (shuid < min)
1067 min = shuid;
1068 min_lab = lab;
1070 if (shuid > max)
1072 max = shuid;
1073 max_lab = lab;
1075 if (min_align > LABEL_TO_ALIGNMENT (lab))
1076 min_align = LABEL_TO_ALIGNMENT (lab);
1078 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1079 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1080 insn_shuid = INSN_SHUID (insn);
1081 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1082 memset (&flags, 0, sizeof (flags));
1083 flags.min_align = min_align;
1084 flags.base_after_vec = rel > insn_shuid;
1085 flags.min_after_vec = min > insn_shuid;
1086 flags.max_after_vec = max > insn_shuid;
1087 flags.min_after_base = min > rel;
1088 flags.max_after_base = max > rel;
1089 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1091 if (increasing)
1092 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1095 #endif /* CASE_VECTOR_SHORTEN_MODE */
1097 /* Compute initial lengths, addresses, and varying flags for each insn. */
1098 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1100 for (insn_current_address = 0, insn = first;
1101 insn != 0;
1102 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1104 uid = INSN_UID (insn);
1106 insn_lengths[uid] = 0;
1108 if (LABEL_P (insn))
1110 int log = LABEL_TO_ALIGNMENT (insn);
1111 if (log)
1113 int align = 1 << log;
1114 int new_address = (insn_current_address + align - 1) & -align;
1115 insn_lengths[uid] = new_address - insn_current_address;
1119 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1121 if (NOTE_P (insn) || BARRIER_P (insn)
1122 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1123 continue;
1124 if (insn->deleted ())
1125 continue;
1127 body = PATTERN (insn);
1128 if (JUMP_TABLE_DATA_P (insn))
1130 /* This only takes room if read-only data goes into the text
1131 section. */
1132 if (JUMP_TABLES_IN_TEXT_SECTION
1133 || readonly_data_section == text_section)
1134 insn_lengths[uid] = (XVECLEN (body,
1135 GET_CODE (body) == ADDR_DIFF_VEC)
1136 * GET_MODE_SIZE (GET_MODE (body)));
1137 /* Alignment is handled by ADDR_VEC_ALIGN. */
1139 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1140 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1141 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1143 int i;
1144 int const_delay_slots;
1145 if (DELAY_SLOTS)
1146 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1147 else
1148 const_delay_slots = 0;
1150 int (*inner_length_fun) (rtx_insn *)
1151 = const_delay_slots ? length_fun : insn_default_length;
1152 /* Inside a delay slot sequence, we do not do any branch shortening
1153 if the shortening could change the number of delay slots
1154 of the branch. */
1155 for (i = 0; i < body_seq->len (); i++)
1157 rtx_insn *inner_insn = body_seq->insn (i);
1158 int inner_uid = INSN_UID (inner_insn);
1159 int inner_length;
1161 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1162 || asm_noperands (PATTERN (inner_insn)) >= 0)
1163 inner_length = (asm_insn_count (PATTERN (inner_insn))
1164 * insn_default_length (inner_insn));
1165 else
1166 inner_length = inner_length_fun (inner_insn);
1168 insn_lengths[inner_uid] = inner_length;
1169 if (const_delay_slots)
1171 if ((varying_length[inner_uid]
1172 = insn_variable_length_p (inner_insn)) != 0)
1173 varying_length[uid] = 1;
1174 INSN_ADDRESSES (inner_uid) = (insn_current_address
1175 + insn_lengths[uid]);
1177 else
1178 varying_length[inner_uid] = 0;
1179 insn_lengths[uid] += inner_length;
1182 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1184 insn_lengths[uid] = length_fun (insn);
1185 varying_length[uid] = insn_variable_length_p (insn);
1188 /* If needed, do any adjustment. */
1189 #ifdef ADJUST_INSN_LENGTH
1190 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1191 if (insn_lengths[uid] < 0)
1192 fatal_insn ("negative insn length", insn);
1193 #endif
1196 /* Now loop over all the insns finding varying length insns. For each,
1197 get the current insn length. If it has changed, reflect the change.
1198 When nothing changes for a full pass, we are done. */
1200 while (something_changed)
1202 something_changed = 0;
1203 insn_current_align = MAX_CODE_ALIGN - 1;
1204 for (insn_current_address = 0, insn = first;
1205 insn != 0;
1206 insn = NEXT_INSN (insn))
1208 int new_length;
1209 #ifdef ADJUST_INSN_LENGTH
1210 int tmp_length;
1211 #endif
1212 int length_align;
1214 uid = INSN_UID (insn);
1216 if (LABEL_P (insn))
1218 int log = LABEL_TO_ALIGNMENT (insn);
1220 #ifdef CASE_VECTOR_SHORTEN_MODE
1221 /* If the mode of a following jump table was changed, we
1222 may need to update the alignment of this label. */
1223 rtx_insn *next;
1224 bool next_is_jumptable;
1226 next = next_nonnote_insn (insn);
1227 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1228 if ((JUMP_TABLES_IN_TEXT_SECTION
1229 || readonly_data_section == text_section)
1230 && next_is_jumptable)
1232 int newlog = ADDR_VEC_ALIGN (next);
1233 if (newlog != log)
1235 log = newlog;
1236 LABEL_TO_ALIGNMENT (insn) = log;
1237 something_changed = 1;
1240 #endif
1242 if (log > insn_current_align)
1244 int align = 1 << log;
1245 int new_address= (insn_current_address + align - 1) & -align;
1246 insn_lengths[uid] = new_address - insn_current_address;
1247 insn_current_align = log;
1248 insn_current_address = new_address;
1250 else
1251 insn_lengths[uid] = 0;
1252 INSN_ADDRESSES (uid) = insn_current_address;
1253 continue;
1256 length_align = INSN_LENGTH_ALIGNMENT (insn);
1257 if (length_align < insn_current_align)
1258 insn_current_align = length_align;
1260 insn_last_address = INSN_ADDRESSES (uid);
1261 INSN_ADDRESSES (uid) = insn_current_address;
1263 #ifdef CASE_VECTOR_SHORTEN_MODE
1264 if (optimize
1265 && JUMP_TABLE_DATA_P (insn)
1266 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1268 rtx body = PATTERN (insn);
1269 int old_length = insn_lengths[uid];
1270 rtx_insn *rel_lab =
1271 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1272 rtx min_lab = XEXP (XEXP (body, 2), 0);
1273 rtx max_lab = XEXP (XEXP (body, 3), 0);
1274 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1275 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1276 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1277 rtx_insn *prev;
1278 int rel_align = 0;
1279 addr_diff_vec_flags flags;
1280 machine_mode vec_mode;
1282 /* Avoid automatic aggregate initialization. */
1283 flags = ADDR_DIFF_VEC_FLAGS (body);
1285 /* Try to find a known alignment for rel_lab. */
1286 for (prev = rel_lab;
1287 prev
1288 && ! insn_lengths[INSN_UID (prev)]
1289 && ! (varying_length[INSN_UID (prev)] & 1);
1290 prev = PREV_INSN (prev))
1291 if (varying_length[INSN_UID (prev)] & 2)
1293 rel_align = LABEL_TO_ALIGNMENT (prev);
1294 break;
1297 /* See the comment on addr_diff_vec_flags in rtl.h for the
1298 meaning of the flags values. base: REL_LAB vec: INSN */
1299 /* Anything after INSN has still addresses from the last
1300 pass; adjust these so that they reflect our current
1301 estimate for this pass. */
1302 if (flags.base_after_vec)
1303 rel_addr += insn_current_address - insn_last_address;
1304 if (flags.min_after_vec)
1305 min_addr += insn_current_address - insn_last_address;
1306 if (flags.max_after_vec)
1307 max_addr += insn_current_address - insn_last_address;
1308 /* We want to know the worst case, i.e. lowest possible value
1309 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1310 its offset is positive, and we have to be wary of code shrink;
1311 otherwise, it is negative, and we have to be vary of code
1312 size increase. */
1313 if (flags.min_after_base)
1315 /* If INSN is between REL_LAB and MIN_LAB, the size
1316 changes we are about to make can change the alignment
1317 within the observed offset, therefore we have to break
1318 it up into two parts that are independent. */
1319 if (! flags.base_after_vec && flags.min_after_vec)
1321 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1322 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1324 else
1325 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1327 else
1329 if (flags.base_after_vec && ! flags.min_after_vec)
1331 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1332 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1334 else
1335 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1337 /* Likewise, determine the highest lowest possible value
1338 for the offset of MAX_LAB. */
1339 if (flags.max_after_base)
1341 if (! flags.base_after_vec && flags.max_after_vec)
1343 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1344 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1346 else
1347 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1349 else
1351 if (flags.base_after_vec && ! flags.max_after_vec)
1353 max_addr += align_fuzz (max_lab, insn, 0, 0);
1354 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1356 else
1357 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1359 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1360 max_addr - rel_addr, body);
1361 if (!increasing
1362 || (GET_MODE_SIZE (vec_mode)
1363 >= GET_MODE_SIZE (GET_MODE (body))))
1364 PUT_MODE (body, vec_mode);
1365 if (JUMP_TABLES_IN_TEXT_SECTION
1366 || readonly_data_section == text_section)
1368 insn_lengths[uid]
1369 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1370 insn_current_address += insn_lengths[uid];
1371 if (insn_lengths[uid] != old_length)
1372 something_changed = 1;
1375 continue;
1377 #endif /* CASE_VECTOR_SHORTEN_MODE */
1379 if (! (varying_length[uid]))
1381 if (NONJUMP_INSN_P (insn)
1382 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1384 int i;
1386 body = PATTERN (insn);
1387 for (i = 0; i < XVECLEN (body, 0); i++)
1389 rtx inner_insn = XVECEXP (body, 0, i);
1390 int inner_uid = INSN_UID (inner_insn);
1392 INSN_ADDRESSES (inner_uid) = insn_current_address;
1394 insn_current_address += insn_lengths[inner_uid];
1397 else
1398 insn_current_address += insn_lengths[uid];
1400 continue;
1403 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1405 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1406 int i;
1408 body = PATTERN (insn);
1409 new_length = 0;
1410 for (i = 0; i < seqn->len (); i++)
1412 rtx_insn *inner_insn = seqn->insn (i);
1413 int inner_uid = INSN_UID (inner_insn);
1414 int inner_length;
1416 INSN_ADDRESSES (inner_uid) = insn_current_address;
1418 /* insn_current_length returns 0 for insns with a
1419 non-varying length. */
1420 if (! varying_length[inner_uid])
1421 inner_length = insn_lengths[inner_uid];
1422 else
1423 inner_length = insn_current_length (inner_insn);
1425 if (inner_length != insn_lengths[inner_uid])
1427 if (!increasing || inner_length > insn_lengths[inner_uid])
1429 insn_lengths[inner_uid] = inner_length;
1430 something_changed = 1;
1432 else
1433 inner_length = insn_lengths[inner_uid];
1435 insn_current_address += inner_length;
1436 new_length += inner_length;
1439 else
1441 new_length = insn_current_length (insn);
1442 insn_current_address += new_length;
1445 #ifdef ADJUST_INSN_LENGTH
1446 /* If needed, do any adjustment. */
1447 tmp_length = new_length;
1448 ADJUST_INSN_LENGTH (insn, new_length);
1449 insn_current_address += (new_length - tmp_length);
1450 #endif
1452 if (new_length != insn_lengths[uid]
1453 && (!increasing || new_length > insn_lengths[uid]))
1455 insn_lengths[uid] = new_length;
1456 something_changed = 1;
1458 else
1459 insn_current_address += insn_lengths[uid] - new_length;
1461 /* For a non-optimizing compile, do only a single pass. */
1462 if (!increasing)
1463 break;
1466 free (varying_length);
1469 /* Given the body of an INSN known to be generated by an ASM statement, return
1470 the number of machine instructions likely to be generated for this insn.
1471 This is used to compute its length. */
1473 static int
1474 asm_insn_count (rtx body)
1476 const char *templ;
1478 if (GET_CODE (body) == ASM_INPUT)
1479 templ = XSTR (body, 0);
1480 else
1481 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1483 return asm_str_count (templ);
1486 /* Return the number of machine instructions likely to be generated for the
1487 inline-asm template. */
1489 asm_str_count (const char *templ)
1491 int count = 1;
1493 if (!*templ)
1494 return 0;
1496 for (; *templ; templ++)
1497 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1498 || *templ == '\n')
1499 count++;
1501 return count;
1504 /* ??? This is probably the wrong place for these. */
1505 /* Structure recording the mapping from source file and directory
1506 names at compile time to those to be embedded in debug
1507 information. */
1508 struct debug_prefix_map
1510 const char *old_prefix;
1511 const char *new_prefix;
1512 size_t old_len;
1513 size_t new_len;
1514 struct debug_prefix_map *next;
1517 /* Linked list of such structures. */
1518 static debug_prefix_map *debug_prefix_maps;
1521 /* Record a debug file prefix mapping. ARG is the argument to
1522 -fdebug-prefix-map and must be of the form OLD=NEW. */
1524 void
1525 add_debug_prefix_map (const char *arg)
1527 debug_prefix_map *map;
1528 const char *p;
1530 p = strchr (arg, '=');
1531 if (!p)
1533 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1534 return;
1536 map = XNEW (debug_prefix_map);
1537 map->old_prefix = xstrndup (arg, p - arg);
1538 map->old_len = p - arg;
1539 p++;
1540 map->new_prefix = xstrdup (p);
1541 map->new_len = strlen (p);
1542 map->next = debug_prefix_maps;
1543 debug_prefix_maps = map;
1546 /* Perform user-specified mapping of debug filename prefixes. Return
1547 the new name corresponding to FILENAME. */
1549 const char *
1550 remap_debug_filename (const char *filename)
1552 debug_prefix_map *map;
1553 char *s;
1554 const char *name;
1555 size_t name_len;
1557 for (map = debug_prefix_maps; map; map = map->next)
1558 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1559 break;
1560 if (!map)
1561 return filename;
1562 name = filename + map->old_len;
1563 name_len = strlen (name) + 1;
1564 s = (char *) alloca (name_len + map->new_len);
1565 memcpy (s, map->new_prefix, map->new_len);
1566 memcpy (s + map->new_len, name, name_len);
1567 return ggc_strdup (s);
1570 /* Return true if DWARF2 debug info can be emitted for DECL. */
1572 static bool
1573 dwarf2_debug_info_emitted_p (tree decl)
1575 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1576 return false;
1578 if (DECL_IGNORED_P (decl))
1579 return false;
1581 return true;
1584 /* Return scope resulting from combination of S1 and S2. */
1585 static tree
1586 choose_inner_scope (tree s1, tree s2)
1588 if (!s1)
1589 return s2;
1590 if (!s2)
1591 return s1;
1592 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1593 return s1;
1594 return s2;
1597 /* Emit lexical block notes needed to change scope from S1 to S2. */
1599 static void
1600 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1602 rtx_insn *insn = orig_insn;
1603 tree com = NULL_TREE;
1604 tree ts1 = s1, ts2 = s2;
1605 tree s;
1607 while (ts1 != ts2)
1609 gcc_assert (ts1 && ts2);
1610 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1611 ts1 = BLOCK_SUPERCONTEXT (ts1);
1612 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1613 ts2 = BLOCK_SUPERCONTEXT (ts2);
1614 else
1616 ts1 = BLOCK_SUPERCONTEXT (ts1);
1617 ts2 = BLOCK_SUPERCONTEXT (ts2);
1620 com = ts1;
1622 /* Close scopes. */
1623 s = s1;
1624 while (s != com)
1626 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1627 NOTE_BLOCK (note) = s;
1628 s = BLOCK_SUPERCONTEXT (s);
1631 /* Open scopes. */
1632 s = s2;
1633 while (s != com)
1635 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1636 NOTE_BLOCK (insn) = s;
1637 s = BLOCK_SUPERCONTEXT (s);
1641 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1642 on the scope tree and the newly reordered instructions. */
1644 static void
1645 reemit_insn_block_notes (void)
1647 tree cur_block = DECL_INITIAL (cfun->decl);
1648 rtx_insn *insn;
1649 rtx_note *note;
1651 insn = get_insns ();
1652 for (; insn; insn = NEXT_INSN (insn))
1654 tree this_block;
1656 /* Prevent lexical blocks from straddling section boundaries. */
1657 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1659 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1660 s = BLOCK_SUPERCONTEXT (s))
1662 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1663 NOTE_BLOCK (note) = s;
1664 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1665 NOTE_BLOCK (note) = s;
1669 if (!active_insn_p (insn))
1670 continue;
1672 /* Avoid putting scope notes between jump table and its label. */
1673 if (JUMP_TABLE_DATA_P (insn))
1674 continue;
1676 this_block = insn_scope (insn);
1677 /* For sequences compute scope resulting from merging all scopes
1678 of instructions nested inside. */
1679 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1681 int i;
1683 this_block = NULL;
1684 for (i = 0; i < body->len (); i++)
1685 this_block = choose_inner_scope (this_block,
1686 insn_scope (body->insn (i)));
1688 if (! this_block)
1690 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1691 continue;
1692 else
1693 this_block = DECL_INITIAL (cfun->decl);
1696 if (this_block != cur_block)
1698 change_scope (insn, cur_block, this_block);
1699 cur_block = this_block;
1703 /* change_scope emits before the insn, not after. */
1704 note = emit_note (NOTE_INSN_DELETED);
1705 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1706 delete_insn (note);
1708 reorder_blocks ();
1711 static const char *some_local_dynamic_name;
1713 /* Locate some local-dynamic symbol still in use by this function
1714 so that we can print its name in local-dynamic base patterns.
1715 Return null if there are no local-dynamic references. */
1717 const char *
1718 get_some_local_dynamic_name ()
1720 subrtx_iterator::array_type array;
1721 rtx_insn *insn;
1723 if (some_local_dynamic_name)
1724 return some_local_dynamic_name;
1726 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1727 if (NONDEBUG_INSN_P (insn))
1728 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1730 const_rtx x = *iter;
1731 if (GET_CODE (x) == SYMBOL_REF)
1733 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1734 return some_local_dynamic_name = XSTR (x, 0);
1735 if (CONSTANT_POOL_ADDRESS_P (x))
1736 iter.substitute (get_pool_constant (x));
1740 return 0;
1743 /* Output assembler code for the start of a function,
1744 and initialize some of the variables in this file
1745 for the new function. The label for the function and associated
1746 assembler pseudo-ops have already been output in `assemble_start_function'.
1748 FIRST is the first insn of the rtl for the function being compiled.
1749 FILE is the file to write assembler code to.
1750 OPTIMIZE_P is nonzero if we should eliminate redundant
1751 test and compare insns. */
1753 void
1754 final_start_function (rtx_insn *first, FILE *file,
1755 int optimize_p ATTRIBUTE_UNUSED)
1757 block_depth = 0;
1759 this_is_asm_operands = 0;
1761 need_profile_function = false;
1763 last_filename = LOCATION_FILE (prologue_location);
1764 last_linenum = LOCATION_LINE (prologue_location);
1765 last_discriminator = discriminator = 0;
1767 high_block_linenum = high_function_linenum = last_linenum;
1769 if (flag_sanitize & SANITIZE_ADDRESS)
1770 asan_function_start ();
1772 if (!DECL_IGNORED_P (current_function_decl))
1773 debug_hooks->begin_prologue (last_linenum, last_filename);
1775 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1776 dwarf2out_begin_prologue (0, NULL);
1778 #ifdef LEAF_REG_REMAP
1779 if (crtl->uses_only_leaf_regs)
1780 leaf_renumber_regs (first);
1781 #endif
1783 /* The Sun386i and perhaps other machines don't work right
1784 if the profiling code comes after the prologue. */
1785 if (targetm.profile_before_prologue () && crtl->profile)
1787 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1788 && targetm.have_prologue ())
1790 rtx_insn *insn;
1791 for (insn = first; insn; insn = NEXT_INSN (insn))
1792 if (!NOTE_P (insn))
1794 insn = NULL;
1795 break;
1797 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1798 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1799 break;
1800 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1801 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1802 continue;
1803 else
1805 insn = NULL;
1806 break;
1809 if (insn)
1810 need_profile_function = true;
1811 else
1812 profile_function (file);
1814 else
1815 profile_function (file);
1818 /* If debugging, assign block numbers to all of the blocks in this
1819 function. */
1820 if (write_symbols)
1822 reemit_insn_block_notes ();
1823 number_blocks (current_function_decl);
1824 /* We never actually put out begin/end notes for the top-level
1825 block in the function. But, conceptually, that block is
1826 always needed. */
1827 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1830 if (warn_frame_larger_than
1831 && get_frame_size () > frame_larger_than_size)
1833 /* Issue a warning */
1834 warning (OPT_Wframe_larger_than_,
1835 "the frame size of %wd bytes is larger than %wd bytes",
1836 get_frame_size (), frame_larger_than_size);
1839 /* First output the function prologue: code to set up the stack frame. */
1840 targetm.asm_out.function_prologue (file, get_frame_size ());
1842 /* If the machine represents the prologue as RTL, the profiling code must
1843 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1844 if (! targetm.have_prologue ())
1845 profile_after_prologue (file);
1848 static void
1849 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1851 if (!targetm.profile_before_prologue () && crtl->profile)
1852 profile_function (file);
1855 static void
1856 profile_function (FILE *file ATTRIBUTE_UNUSED)
1858 #ifndef NO_PROFILE_COUNTERS
1859 # define NO_PROFILE_COUNTERS 0
1860 #endif
1861 #ifdef ASM_OUTPUT_REG_PUSH
1862 rtx sval = NULL, chain = NULL;
1864 if (cfun->returns_struct)
1865 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1866 true);
1867 if (cfun->static_chain_decl)
1868 chain = targetm.calls.static_chain (current_function_decl, true);
1869 #endif /* ASM_OUTPUT_REG_PUSH */
1871 if (! NO_PROFILE_COUNTERS)
1873 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1874 switch_to_section (data_section);
1875 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1876 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1877 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1880 switch_to_section (current_function_section ());
1882 #ifdef ASM_OUTPUT_REG_PUSH
1883 if (sval && REG_P (sval))
1884 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1885 if (chain && REG_P (chain))
1886 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1887 #endif
1889 FUNCTION_PROFILER (file, current_function_funcdef_no);
1891 #ifdef ASM_OUTPUT_REG_PUSH
1892 if (chain && REG_P (chain))
1893 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1894 if (sval && REG_P (sval))
1895 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1896 #endif
1899 /* Output assembler code for the end of a function.
1900 For clarity, args are same as those of `final_start_function'
1901 even though not all of them are needed. */
1903 void
1904 final_end_function (void)
1906 app_disable ();
1908 if (!DECL_IGNORED_P (current_function_decl))
1909 debug_hooks->end_function (high_function_linenum);
1911 /* Finally, output the function epilogue:
1912 code to restore the stack frame and return to the caller. */
1913 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1915 /* And debug output. */
1916 if (!DECL_IGNORED_P (current_function_decl))
1917 debug_hooks->end_epilogue (last_linenum, last_filename);
1919 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1920 && dwarf2out_do_frame ())
1921 dwarf2out_end_epilogue (last_linenum, last_filename);
1923 some_local_dynamic_name = 0;
1927 /* Dumper helper for basic block information. FILE is the assembly
1928 output file, and INSN is the instruction being emitted. */
1930 static void
1931 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1932 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1934 basic_block bb;
1936 if (!flag_debug_asm)
1937 return;
1939 if (INSN_UID (insn) < bb_map_size
1940 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1942 edge e;
1943 edge_iterator ei;
1945 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1946 if (bb->frequency)
1947 fprintf (file, " freq:%d", bb->frequency);
1948 if (bb->count)
1949 fprintf (file, " count:%" PRId64,
1950 bb->count);
1951 fprintf (file, " seq:%d", (*bb_seqn)++);
1952 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1953 FOR_EACH_EDGE (e, ei, bb->preds)
1955 dump_edge_info (file, e, TDF_DETAILS, 0);
1957 fprintf (file, "\n");
1959 if (INSN_UID (insn) < bb_map_size
1960 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1962 edge e;
1963 edge_iterator ei;
1965 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1966 FOR_EACH_EDGE (e, ei, bb->succs)
1968 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1970 fprintf (file, "\n");
1974 /* Output assembler code for some insns: all or part of a function.
1975 For description of args, see `final_start_function', above. */
1977 void
1978 final (rtx_insn *first, FILE *file, int optimize_p)
1980 rtx_insn *insn, *next;
1981 int seen = 0;
1983 /* Used for -dA dump. */
1984 basic_block *start_to_bb = NULL;
1985 basic_block *end_to_bb = NULL;
1986 int bb_map_size = 0;
1987 int bb_seqn = 0;
1989 last_ignored_compare = 0;
1991 if (HAVE_cc0)
1992 for (insn = first; insn; insn = NEXT_INSN (insn))
1994 /* If CC tracking across branches is enabled, record the insn which
1995 jumps to each branch only reached from one place. */
1996 if (optimize_p && JUMP_P (insn))
1998 rtx lab = JUMP_LABEL (insn);
1999 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
2001 LABEL_REFS (lab) = insn;
2006 init_recog ();
2008 CC_STATUS_INIT;
2010 if (flag_debug_asm)
2012 basic_block bb;
2014 bb_map_size = get_max_uid () + 1;
2015 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
2016 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
2018 /* There is no cfg for a thunk. */
2019 if (!cfun->is_thunk)
2020 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2022 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2023 end_to_bb[INSN_UID (BB_END (bb))] = bb;
2027 /* Output the insns. */
2028 for (insn = first; insn;)
2030 if (HAVE_ATTR_length)
2032 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2034 /* This can be triggered by bugs elsewhere in the compiler if
2035 new insns are created after init_insn_lengths is called. */
2036 gcc_assert (NOTE_P (insn));
2037 insn_current_address = -1;
2039 else
2040 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2043 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2044 bb_map_size, &bb_seqn);
2045 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2048 if (flag_debug_asm)
2050 free (start_to_bb);
2051 free (end_to_bb);
2054 /* Remove CFI notes, to avoid compare-debug failures. */
2055 for (insn = first; insn; insn = next)
2057 next = NEXT_INSN (insn);
2058 if (NOTE_P (insn)
2059 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2060 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2061 delete_insn (insn);
2065 const char *
2066 get_insn_template (int code, rtx insn)
2068 switch (insn_data[code].output_format)
2070 case INSN_OUTPUT_FORMAT_SINGLE:
2071 return insn_data[code].output.single;
2072 case INSN_OUTPUT_FORMAT_MULTI:
2073 return insn_data[code].output.multi[which_alternative];
2074 case INSN_OUTPUT_FORMAT_FUNCTION:
2075 gcc_assert (insn);
2076 return (*insn_data[code].output.function) (recog_data.operand,
2077 as_a <rtx_insn *> (insn));
2079 default:
2080 gcc_unreachable ();
2084 /* Emit the appropriate declaration for an alternate-entry-point
2085 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2086 LABEL_KIND != LABEL_NORMAL.
2088 The case fall-through in this function is intentional. */
2089 static void
2090 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2092 const char *name = LABEL_NAME (insn);
2094 switch (LABEL_KIND (insn))
2096 case LABEL_WEAK_ENTRY:
2097 #ifdef ASM_WEAKEN_LABEL
2098 ASM_WEAKEN_LABEL (file, name);
2099 #endif
2100 case LABEL_GLOBAL_ENTRY:
2101 targetm.asm_out.globalize_label (file, name);
2102 case LABEL_STATIC_ENTRY:
2103 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2104 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2105 #endif
2106 ASM_OUTPUT_LABEL (file, name);
2107 break;
2109 case LABEL_NORMAL:
2110 default:
2111 gcc_unreachable ();
2115 /* Given a CALL_INSN, find and return the nested CALL. */
2116 static rtx
2117 call_from_call_insn (rtx_call_insn *insn)
2119 rtx x;
2120 gcc_assert (CALL_P (insn));
2121 x = PATTERN (insn);
2123 while (GET_CODE (x) != CALL)
2125 switch (GET_CODE (x))
2127 default:
2128 gcc_unreachable ();
2129 case COND_EXEC:
2130 x = COND_EXEC_CODE (x);
2131 break;
2132 case PARALLEL:
2133 x = XVECEXP (x, 0, 0);
2134 break;
2135 case SET:
2136 x = XEXP (x, 1);
2137 break;
2140 return x;
2143 /* The final scan for one insn, INSN.
2144 Args are same as in `final', except that INSN
2145 is the insn being scanned.
2146 Value returned is the next insn to be scanned.
2148 NOPEEPHOLES is the flag to disallow peephole processing (currently
2149 used for within delayed branch sequence output).
2151 SEEN is used to track the end of the prologue, for emitting
2152 debug information. We force the emission of a line note after
2153 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2155 rtx_insn *
2156 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2157 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2159 #if HAVE_cc0
2160 rtx set;
2161 #endif
2162 rtx_insn *next;
2164 insn_counter++;
2166 /* Ignore deleted insns. These can occur when we split insns (due to a
2167 template of "#") while not optimizing. */
2168 if (insn->deleted ())
2169 return NEXT_INSN (insn);
2171 switch (GET_CODE (insn))
2173 case NOTE:
2174 switch (NOTE_KIND (insn))
2176 case NOTE_INSN_DELETED:
2177 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2178 break;
2180 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2181 in_cold_section_p = !in_cold_section_p;
2183 if (dwarf2out_do_frame ())
2184 dwarf2out_switch_text_section ();
2185 else if (!DECL_IGNORED_P (current_function_decl))
2186 debug_hooks->switch_text_section ();
2188 switch_to_section (current_function_section ());
2189 targetm.asm_out.function_switched_text_sections (asm_out_file,
2190 current_function_decl,
2191 in_cold_section_p);
2192 /* Emit a label for the split cold section. Form label name by
2193 suffixing "cold" to the original function's name. */
2194 if (in_cold_section_p)
2196 cold_function_name
2197 = clone_function_name (current_function_decl, "cold");
2198 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2199 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2200 IDENTIFIER_POINTER
2201 (cold_function_name),
2202 current_function_decl);
2203 #else
2204 ASM_OUTPUT_LABEL (asm_out_file,
2205 IDENTIFIER_POINTER (cold_function_name));
2206 #endif
2208 break;
2210 case NOTE_INSN_BASIC_BLOCK:
2211 if (need_profile_function)
2213 profile_function (asm_out_file);
2214 need_profile_function = false;
2217 if (targetm.asm_out.unwind_emit)
2218 targetm.asm_out.unwind_emit (asm_out_file, insn);
2220 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2222 break;
2224 case NOTE_INSN_EH_REGION_BEG:
2225 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2226 NOTE_EH_HANDLER (insn));
2227 break;
2229 case NOTE_INSN_EH_REGION_END:
2230 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2231 NOTE_EH_HANDLER (insn));
2232 break;
2234 case NOTE_INSN_PROLOGUE_END:
2235 targetm.asm_out.function_end_prologue (file);
2236 profile_after_prologue (file);
2238 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2240 *seen |= SEEN_EMITTED;
2241 force_source_line = true;
2243 else
2244 *seen |= SEEN_NOTE;
2246 break;
2248 case NOTE_INSN_EPILOGUE_BEG:
2249 if (!DECL_IGNORED_P (current_function_decl))
2250 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2251 targetm.asm_out.function_begin_epilogue (file);
2252 break;
2254 case NOTE_INSN_CFI:
2255 dwarf2out_emit_cfi (NOTE_CFI (insn));
2256 break;
2258 case NOTE_INSN_CFI_LABEL:
2259 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2260 NOTE_LABEL_NUMBER (insn));
2261 break;
2263 case NOTE_INSN_FUNCTION_BEG:
2264 if (need_profile_function)
2266 profile_function (asm_out_file);
2267 need_profile_function = false;
2270 app_disable ();
2271 if (!DECL_IGNORED_P (current_function_decl))
2272 debug_hooks->end_prologue (last_linenum, last_filename);
2274 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2276 *seen |= SEEN_EMITTED;
2277 force_source_line = true;
2279 else
2280 *seen |= SEEN_NOTE;
2282 break;
2284 case NOTE_INSN_BLOCK_BEG:
2285 if (debug_info_level == DINFO_LEVEL_NORMAL
2286 || debug_info_level == DINFO_LEVEL_VERBOSE
2287 || write_symbols == DWARF2_DEBUG
2288 || write_symbols == VMS_AND_DWARF2_DEBUG
2289 || write_symbols == VMS_DEBUG)
2291 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2293 app_disable ();
2294 ++block_depth;
2295 high_block_linenum = last_linenum;
2297 /* Output debugging info about the symbol-block beginning. */
2298 if (!DECL_IGNORED_P (current_function_decl))
2299 debug_hooks->begin_block (last_linenum, n);
2301 /* Mark this block as output. */
2302 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2304 if (write_symbols == DBX_DEBUG
2305 || write_symbols == SDB_DEBUG)
2307 location_t *locus_ptr
2308 = block_nonartificial_location (NOTE_BLOCK (insn));
2310 if (locus_ptr != NULL)
2312 override_filename = LOCATION_FILE (*locus_ptr);
2313 override_linenum = LOCATION_LINE (*locus_ptr);
2316 break;
2318 case NOTE_INSN_BLOCK_END:
2319 if (debug_info_level == DINFO_LEVEL_NORMAL
2320 || debug_info_level == DINFO_LEVEL_VERBOSE
2321 || write_symbols == DWARF2_DEBUG
2322 || write_symbols == VMS_AND_DWARF2_DEBUG
2323 || write_symbols == VMS_DEBUG)
2325 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2327 app_disable ();
2329 /* End of a symbol-block. */
2330 --block_depth;
2331 gcc_assert (block_depth >= 0);
2333 if (!DECL_IGNORED_P (current_function_decl))
2334 debug_hooks->end_block (high_block_linenum, n);
2336 if (write_symbols == DBX_DEBUG
2337 || write_symbols == SDB_DEBUG)
2339 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2340 location_t *locus_ptr
2341 = block_nonartificial_location (outer_block);
2343 if (locus_ptr != NULL)
2345 override_filename = LOCATION_FILE (*locus_ptr);
2346 override_linenum = LOCATION_LINE (*locus_ptr);
2348 else
2350 override_filename = NULL;
2351 override_linenum = 0;
2354 break;
2356 case NOTE_INSN_DELETED_LABEL:
2357 /* Emit the label. We may have deleted the CODE_LABEL because
2358 the label could be proved to be unreachable, though still
2359 referenced (in the form of having its address taken. */
2360 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2361 break;
2363 case NOTE_INSN_DELETED_DEBUG_LABEL:
2364 /* Similarly, but need to use different namespace for it. */
2365 if (CODE_LABEL_NUMBER (insn) != -1)
2366 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2367 break;
2369 case NOTE_INSN_VAR_LOCATION:
2370 case NOTE_INSN_CALL_ARG_LOCATION:
2371 if (!DECL_IGNORED_P (current_function_decl))
2372 debug_hooks->var_location (insn);
2373 break;
2375 default:
2376 gcc_unreachable ();
2377 break;
2379 break;
2381 case BARRIER:
2382 break;
2384 case CODE_LABEL:
2385 /* The target port might emit labels in the output function for
2386 some insn, e.g. sh.c output_branchy_insn. */
2387 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2389 int align = LABEL_TO_ALIGNMENT (insn);
2390 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2391 int max_skip = LABEL_TO_MAX_SKIP (insn);
2392 #endif
2394 if (align && NEXT_INSN (insn))
2396 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2397 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2398 #else
2399 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2400 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2401 #else
2402 ASM_OUTPUT_ALIGN (file, align);
2403 #endif
2404 #endif
2407 CC_STATUS_INIT;
2409 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2410 debug_hooks->label (as_a <rtx_code_label *> (insn));
2412 app_disable ();
2414 next = next_nonnote_insn (insn);
2415 /* If this label is followed by a jump-table, make sure we put
2416 the label in the read-only section. Also possibly write the
2417 label and jump table together. */
2418 if (next != 0 && JUMP_TABLE_DATA_P (next))
2420 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2421 /* In this case, the case vector is being moved by the
2422 target, so don't output the label at all. Leave that
2423 to the back end macros. */
2424 #else
2425 if (! JUMP_TABLES_IN_TEXT_SECTION)
2427 int log_align;
2429 switch_to_section (targetm.asm_out.function_rodata_section
2430 (current_function_decl));
2432 #ifdef ADDR_VEC_ALIGN
2433 log_align = ADDR_VEC_ALIGN (next);
2434 #else
2435 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2436 #endif
2437 ASM_OUTPUT_ALIGN (file, log_align);
2439 else
2440 switch_to_section (current_function_section ());
2442 #ifdef ASM_OUTPUT_CASE_LABEL
2443 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2444 next);
2445 #else
2446 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2447 #endif
2448 #endif
2449 break;
2451 if (LABEL_ALT_ENTRY_P (insn))
2452 output_alternate_entry_point (file, insn);
2453 else
2454 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2455 break;
2457 default:
2459 rtx body = PATTERN (insn);
2460 int insn_code_number;
2461 const char *templ;
2462 bool is_stmt;
2464 /* Reset this early so it is correct for ASM statements. */
2465 current_insn_predicate = NULL_RTX;
2467 /* An INSN, JUMP_INSN or CALL_INSN.
2468 First check for special kinds that recog doesn't recognize. */
2470 if (GET_CODE (body) == USE /* These are just declarations. */
2471 || GET_CODE (body) == CLOBBER)
2472 break;
2474 #if HAVE_cc0
2476 /* If there is a REG_CC_SETTER note on this insn, it means that
2477 the setting of the condition code was done in the delay slot
2478 of the insn that branched here. So recover the cc status
2479 from the insn that set it. */
2481 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2482 if (note)
2484 rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2485 NOTICE_UPDATE_CC (PATTERN (other), other);
2486 cc_prev_status = cc_status;
2489 #endif
2491 /* Detect insns that are really jump-tables
2492 and output them as such. */
2494 if (JUMP_TABLE_DATA_P (insn))
2496 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2497 int vlen, idx;
2498 #endif
2500 if (! JUMP_TABLES_IN_TEXT_SECTION)
2501 switch_to_section (targetm.asm_out.function_rodata_section
2502 (current_function_decl));
2503 else
2504 switch_to_section (current_function_section ());
2506 app_disable ();
2508 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2509 if (GET_CODE (body) == ADDR_VEC)
2511 #ifdef ASM_OUTPUT_ADDR_VEC
2512 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2513 #else
2514 gcc_unreachable ();
2515 #endif
2517 else
2519 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2520 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2521 #else
2522 gcc_unreachable ();
2523 #endif
2525 #else
2526 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2527 for (idx = 0; idx < vlen; idx++)
2529 if (GET_CODE (body) == ADDR_VEC)
2531 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2532 ASM_OUTPUT_ADDR_VEC_ELT
2533 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2534 #else
2535 gcc_unreachable ();
2536 #endif
2538 else
2540 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2541 ASM_OUTPUT_ADDR_DIFF_ELT
2542 (file,
2543 body,
2544 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2545 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2546 #else
2547 gcc_unreachable ();
2548 #endif
2551 #ifdef ASM_OUTPUT_CASE_END
2552 ASM_OUTPUT_CASE_END (file,
2553 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2554 insn);
2555 #endif
2556 #endif
2558 switch_to_section (current_function_section ());
2560 break;
2562 /* Output this line note if it is the first or the last line
2563 note in a row. */
2564 if (!DECL_IGNORED_P (current_function_decl)
2565 && notice_source_line (insn, &is_stmt))
2566 (*debug_hooks->source_line) (last_linenum, last_filename,
2567 last_discriminator, is_stmt);
2569 if (GET_CODE (body) == PARALLEL
2570 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2571 body = XVECEXP (body, 0, 0);
2573 if (GET_CODE (body) == ASM_INPUT)
2575 const char *string = XSTR (body, 0);
2577 /* There's no telling what that did to the condition codes. */
2578 CC_STATUS_INIT;
2580 if (string[0])
2582 expanded_location loc;
2584 app_enable ();
2585 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2586 if (*loc.file && loc.line)
2587 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2588 ASM_COMMENT_START, loc.line, loc.file);
2589 fprintf (asm_out_file, "\t%s\n", string);
2590 #if HAVE_AS_LINE_ZERO
2591 if (*loc.file && loc.line)
2592 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2593 #endif
2595 break;
2598 /* Detect `asm' construct with operands. */
2599 if (asm_noperands (body) >= 0)
2601 unsigned int noperands = asm_noperands (body);
2602 rtx *ops = XALLOCAVEC (rtx, noperands);
2603 const char *string;
2604 location_t loc;
2605 expanded_location expanded;
2607 /* There's no telling what that did to the condition codes. */
2608 CC_STATUS_INIT;
2610 /* Get out the operand values. */
2611 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2612 /* Inhibit dying on what would otherwise be compiler bugs. */
2613 insn_noperands = noperands;
2614 this_is_asm_operands = insn;
2615 expanded = expand_location (loc);
2617 #ifdef FINAL_PRESCAN_INSN
2618 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2619 #endif
2621 /* Output the insn using them. */
2622 if (string[0])
2624 app_enable ();
2625 if (expanded.file && expanded.line)
2626 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2627 ASM_COMMENT_START, expanded.line, expanded.file);
2628 output_asm_insn (string, ops);
2629 #if HAVE_AS_LINE_ZERO
2630 if (expanded.file && expanded.line)
2631 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2632 #endif
2635 if (targetm.asm_out.final_postscan_insn)
2636 targetm.asm_out.final_postscan_insn (file, insn, ops,
2637 insn_noperands);
2639 this_is_asm_operands = 0;
2640 break;
2643 app_disable ();
2645 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2647 /* A delayed-branch sequence */
2648 int i;
2650 final_sequence = seq;
2652 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2653 force the restoration of a comparison that was previously
2654 thought unnecessary. If that happens, cancel this sequence
2655 and cause that insn to be restored. */
2657 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2658 if (next != seq->insn (1))
2660 final_sequence = 0;
2661 return next;
2664 for (i = 1; i < seq->len (); i++)
2666 rtx_insn *insn = seq->insn (i);
2667 rtx_insn *next = NEXT_INSN (insn);
2668 /* We loop in case any instruction in a delay slot gets
2669 split. */
2671 insn = final_scan_insn (insn, file, 0, 1, seen);
2672 while (insn != next);
2674 #ifdef DBR_OUTPUT_SEQEND
2675 DBR_OUTPUT_SEQEND (file);
2676 #endif
2677 final_sequence = 0;
2679 /* If the insn requiring the delay slot was a CALL_INSN, the
2680 insns in the delay slot are actually executed before the
2681 called function. Hence we don't preserve any CC-setting
2682 actions in these insns and the CC must be marked as being
2683 clobbered by the function. */
2684 if (CALL_P (seq->insn (0)))
2686 CC_STATUS_INIT;
2688 break;
2691 /* We have a real machine instruction as rtl. */
2693 body = PATTERN (insn);
2695 #if HAVE_cc0
2696 set = single_set (insn);
2698 /* Check for redundant test and compare instructions
2699 (when the condition codes are already set up as desired).
2700 This is done only when optimizing; if not optimizing,
2701 it should be possible for the user to alter a variable
2702 with the debugger in between statements
2703 and the next statement should reexamine the variable
2704 to compute the condition codes. */
2706 if (optimize_p)
2708 if (set
2709 && GET_CODE (SET_DEST (set)) == CC0
2710 && insn != last_ignored_compare)
2712 rtx src1, src2;
2713 if (GET_CODE (SET_SRC (set)) == SUBREG)
2714 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2716 src1 = SET_SRC (set);
2717 src2 = NULL_RTX;
2718 if (GET_CODE (SET_SRC (set)) == COMPARE)
2720 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2721 XEXP (SET_SRC (set), 0)
2722 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2723 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2724 XEXP (SET_SRC (set), 1)
2725 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2726 if (XEXP (SET_SRC (set), 1)
2727 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2728 src2 = XEXP (SET_SRC (set), 0);
2730 if ((cc_status.value1 != 0
2731 && rtx_equal_p (src1, cc_status.value1))
2732 || (cc_status.value2 != 0
2733 && rtx_equal_p (src1, cc_status.value2))
2734 || (src2 != 0 && cc_status.value1 != 0
2735 && rtx_equal_p (src2, cc_status.value1))
2736 || (src2 != 0 && cc_status.value2 != 0
2737 && rtx_equal_p (src2, cc_status.value2)))
2739 /* Don't delete insn if it has an addressing side-effect. */
2740 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2741 /* or if anything in it is volatile. */
2742 && ! volatile_refs_p (PATTERN (insn)))
2744 /* We don't really delete the insn; just ignore it. */
2745 last_ignored_compare = insn;
2746 break;
2752 /* If this is a conditional branch, maybe modify it
2753 if the cc's are in a nonstandard state
2754 so that it accomplishes the same thing that it would
2755 do straightforwardly if the cc's were set up normally. */
2757 if (cc_status.flags != 0
2758 && JUMP_P (insn)
2759 && GET_CODE (body) == SET
2760 && SET_DEST (body) == pc_rtx
2761 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2762 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2763 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2765 /* This function may alter the contents of its argument
2766 and clear some of the cc_status.flags bits.
2767 It may also return 1 meaning condition now always true
2768 or -1 meaning condition now always false
2769 or 2 meaning condition nontrivial but altered. */
2770 int result = alter_cond (XEXP (SET_SRC (body), 0));
2771 /* If condition now has fixed value, replace the IF_THEN_ELSE
2772 with its then-operand or its else-operand. */
2773 if (result == 1)
2774 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2775 if (result == -1)
2776 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2778 /* The jump is now either unconditional or a no-op.
2779 If it has become a no-op, don't try to output it.
2780 (It would not be recognized.) */
2781 if (SET_SRC (body) == pc_rtx)
2783 delete_insn (insn);
2784 break;
2786 else if (ANY_RETURN_P (SET_SRC (body)))
2787 /* Replace (set (pc) (return)) with (return). */
2788 PATTERN (insn) = body = SET_SRC (body);
2790 /* Rerecognize the instruction if it has changed. */
2791 if (result != 0)
2792 INSN_CODE (insn) = -1;
2795 /* If this is a conditional trap, maybe modify it if the cc's
2796 are in a nonstandard state so that it accomplishes the same
2797 thing that it would do straightforwardly if the cc's were
2798 set up normally. */
2799 if (cc_status.flags != 0
2800 && NONJUMP_INSN_P (insn)
2801 && GET_CODE (body) == TRAP_IF
2802 && COMPARISON_P (TRAP_CONDITION (body))
2803 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2805 /* This function may alter the contents of its argument
2806 and clear some of the cc_status.flags bits.
2807 It may also return 1 meaning condition now always true
2808 or -1 meaning condition now always false
2809 or 2 meaning condition nontrivial but altered. */
2810 int result = alter_cond (TRAP_CONDITION (body));
2812 /* If TRAP_CONDITION has become always false, delete the
2813 instruction. */
2814 if (result == -1)
2816 delete_insn (insn);
2817 break;
2820 /* If TRAP_CONDITION has become always true, replace
2821 TRAP_CONDITION with const_true_rtx. */
2822 if (result == 1)
2823 TRAP_CONDITION (body) = const_true_rtx;
2825 /* Rerecognize the instruction if it has changed. */
2826 if (result != 0)
2827 INSN_CODE (insn) = -1;
2830 /* Make same adjustments to instructions that examine the
2831 condition codes without jumping and instructions that
2832 handle conditional moves (if this machine has either one). */
2834 if (cc_status.flags != 0
2835 && set != 0)
2837 rtx cond_rtx, then_rtx, else_rtx;
2839 if (!JUMP_P (insn)
2840 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2842 cond_rtx = XEXP (SET_SRC (set), 0);
2843 then_rtx = XEXP (SET_SRC (set), 1);
2844 else_rtx = XEXP (SET_SRC (set), 2);
2846 else
2848 cond_rtx = SET_SRC (set);
2849 then_rtx = const_true_rtx;
2850 else_rtx = const0_rtx;
2853 if (COMPARISON_P (cond_rtx)
2854 && XEXP (cond_rtx, 0) == cc0_rtx)
2856 int result;
2857 result = alter_cond (cond_rtx);
2858 if (result == 1)
2859 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2860 else if (result == -1)
2861 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2862 else if (result == 2)
2863 INSN_CODE (insn) = -1;
2864 if (SET_DEST (set) == SET_SRC (set))
2865 delete_insn (insn);
2869 #endif
2871 /* Do machine-specific peephole optimizations if desired. */
2873 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2875 rtx_insn *next = peephole (insn);
2876 /* When peepholing, if there were notes within the peephole,
2877 emit them before the peephole. */
2878 if (next != 0 && next != NEXT_INSN (insn))
2880 rtx_insn *note, *prev = PREV_INSN (insn);
2882 for (note = NEXT_INSN (insn); note != next;
2883 note = NEXT_INSN (note))
2884 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2886 /* Put the notes in the proper position for a later
2887 rescan. For example, the SH target can do this
2888 when generating a far jump in a delayed branch
2889 sequence. */
2890 note = NEXT_INSN (insn);
2891 SET_PREV_INSN (note) = prev;
2892 SET_NEXT_INSN (prev) = note;
2893 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2894 SET_PREV_INSN (insn) = PREV_INSN (next);
2895 SET_NEXT_INSN (insn) = next;
2896 SET_PREV_INSN (next) = insn;
2899 /* PEEPHOLE might have changed this. */
2900 body = PATTERN (insn);
2903 /* Try to recognize the instruction.
2904 If successful, verify that the operands satisfy the
2905 constraints for the instruction. Crash if they don't,
2906 since `reload' should have changed them so that they do. */
2908 insn_code_number = recog_memoized (insn);
2909 cleanup_subreg_operands (insn);
2911 /* Dump the insn in the assembly for debugging (-dAP).
2912 If the final dump is requested as slim RTL, dump slim
2913 RTL to the assembly file also. */
2914 if (flag_dump_rtl_in_asm)
2916 print_rtx_head = ASM_COMMENT_START;
2917 if (! (dump_flags & TDF_SLIM))
2918 print_rtl_single (asm_out_file, insn);
2919 else
2920 dump_insn_slim (asm_out_file, insn);
2921 print_rtx_head = "";
2924 if (! constrain_operands_cached (insn, 1))
2925 fatal_insn_not_found (insn);
2927 /* Some target machines need to prescan each insn before
2928 it is output. */
2930 #ifdef FINAL_PRESCAN_INSN
2931 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2932 #endif
2934 if (targetm.have_conditional_execution ()
2935 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2936 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2938 #if HAVE_cc0
2939 cc_prev_status = cc_status;
2941 /* Update `cc_status' for this instruction.
2942 The instruction's output routine may change it further.
2943 If the output routine for a jump insn needs to depend
2944 on the cc status, it should look at cc_prev_status. */
2946 NOTICE_UPDATE_CC (body, insn);
2947 #endif
2949 current_output_insn = debug_insn = insn;
2951 /* Find the proper template for this insn. */
2952 templ = get_insn_template (insn_code_number, insn);
2954 /* If the C code returns 0, it means that it is a jump insn
2955 which follows a deleted test insn, and that test insn
2956 needs to be reinserted. */
2957 if (templ == 0)
2959 rtx_insn *prev;
2961 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2963 /* We have already processed the notes between the setter and
2964 the user. Make sure we don't process them again, this is
2965 particularly important if one of the notes is a block
2966 scope note or an EH note. */
2967 for (prev = insn;
2968 prev != last_ignored_compare;
2969 prev = PREV_INSN (prev))
2971 if (NOTE_P (prev))
2972 delete_insn (prev); /* Use delete_note. */
2975 return prev;
2978 /* If the template is the string "#", it means that this insn must
2979 be split. */
2980 if (templ[0] == '#' && templ[1] == '\0')
2982 rtx_insn *new_rtx = try_split (body, insn, 0);
2984 /* If we didn't split the insn, go away. */
2985 if (new_rtx == insn && PATTERN (new_rtx) == body)
2986 fatal_insn ("could not split insn", insn);
2988 /* If we have a length attribute, this instruction should have
2989 been split in shorten_branches, to ensure that we would have
2990 valid length info for the splitees. */
2991 gcc_assert (!HAVE_ATTR_length);
2993 return new_rtx;
2996 /* ??? This will put the directives in the wrong place if
2997 get_insn_template outputs assembly directly. However calling it
2998 before get_insn_template breaks if the insns is split. */
2999 if (targetm.asm_out.unwind_emit_before_insn
3000 && targetm.asm_out.unwind_emit)
3001 targetm.asm_out.unwind_emit (asm_out_file, insn);
3003 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
3004 if (call_insn != NULL)
3006 rtx x = call_from_call_insn (call_insn);
3007 x = XEXP (x, 0);
3008 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3010 tree t;
3011 x = XEXP (x, 0);
3012 t = SYMBOL_REF_DECL (x);
3013 if (t)
3014 assemble_external (t);
3018 /* Output assembler code from the template. */
3019 output_asm_insn (templ, recog_data.operand);
3021 /* Some target machines need to postscan each insn after
3022 it is output. */
3023 if (targetm.asm_out.final_postscan_insn)
3024 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3025 recog_data.n_operands);
3027 if (!targetm.asm_out.unwind_emit_before_insn
3028 && targetm.asm_out.unwind_emit)
3029 targetm.asm_out.unwind_emit (asm_out_file, insn);
3031 /* Let the debug info back-end know about this call. We do this only
3032 after the instruction has been emitted because labels that may be
3033 created to reference the call instruction must appear after it. */
3034 if (call_insn != NULL && !DECL_IGNORED_P (current_function_decl))
3035 debug_hooks->var_location (insn);
3037 current_output_insn = debug_insn = 0;
3040 return NEXT_INSN (insn);
3043 /* Return whether a source line note needs to be emitted before INSN.
3044 Sets IS_STMT to TRUE if the line should be marked as a possible
3045 breakpoint location. */
3047 static bool
3048 notice_source_line (rtx_insn *insn, bool *is_stmt)
3050 const char *filename;
3051 int linenum;
3053 if (override_filename)
3055 filename = override_filename;
3056 linenum = override_linenum;
3058 else if (INSN_HAS_LOCATION (insn))
3060 expanded_location xloc = insn_location (insn);
3061 filename = xloc.file;
3062 linenum = xloc.line;
3064 else
3066 filename = NULL;
3067 linenum = 0;
3070 if (filename == NULL)
3071 return false;
3073 if (force_source_line
3074 || filename != last_filename
3075 || last_linenum != linenum)
3077 force_source_line = false;
3078 last_filename = filename;
3079 last_linenum = linenum;
3080 last_discriminator = discriminator;
3081 *is_stmt = true;
3082 high_block_linenum = MAX (last_linenum, high_block_linenum);
3083 high_function_linenum = MAX (last_linenum, high_function_linenum);
3084 return true;
3087 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3089 /* If the discriminator changed, but the line number did not,
3090 output the line table entry with is_stmt false so the
3091 debugger does not treat this as a breakpoint location. */
3092 last_discriminator = discriminator;
3093 *is_stmt = false;
3094 return true;
3097 return false;
3100 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3101 directly to the desired hard register. */
3103 void
3104 cleanup_subreg_operands (rtx_insn *insn)
3106 int i;
3107 bool changed = false;
3108 extract_insn_cached (insn);
3109 for (i = 0; i < recog_data.n_operands; i++)
3111 /* The following test cannot use recog_data.operand when testing
3112 for a SUBREG: the underlying object might have been changed
3113 already if we are inside a match_operator expression that
3114 matches the else clause. Instead we test the underlying
3115 expression directly. */
3116 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3118 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3119 changed = true;
3121 else if (GET_CODE (recog_data.operand[i]) == PLUS
3122 || GET_CODE (recog_data.operand[i]) == MULT
3123 || MEM_P (recog_data.operand[i]))
3124 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3127 for (i = 0; i < recog_data.n_dups; i++)
3129 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3131 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3132 changed = true;
3134 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3135 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3136 || MEM_P (*recog_data.dup_loc[i]))
3137 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3139 if (changed)
3140 df_insn_rescan (insn);
3143 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3144 the thing it is a subreg of. Do it anyway if FINAL_P. */
3147 alter_subreg (rtx *xp, bool final_p)
3149 rtx x = *xp;
3150 rtx y = SUBREG_REG (x);
3152 /* simplify_subreg does not remove subreg from volatile references.
3153 We are required to. */
3154 if (MEM_P (y))
3156 int offset = SUBREG_BYTE (x);
3158 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3159 contains 0 instead of the proper offset. See simplify_subreg. */
3160 if (offset == 0
3161 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3163 int difference = GET_MODE_SIZE (GET_MODE (y))
3164 - GET_MODE_SIZE (GET_MODE (x));
3165 if (WORDS_BIG_ENDIAN)
3166 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3167 if (BYTES_BIG_ENDIAN)
3168 offset += difference % UNITS_PER_WORD;
3171 if (final_p)
3172 *xp = adjust_address (y, GET_MODE (x), offset);
3173 else
3174 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3176 else if (REG_P (y) && HARD_REGISTER_P (y))
3178 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3179 SUBREG_BYTE (x));
3181 if (new_rtx != 0)
3182 *xp = new_rtx;
3183 else if (final_p && REG_P (y))
3185 /* Simplify_subreg can't handle some REG cases, but we have to. */
3186 unsigned int regno;
3187 HOST_WIDE_INT offset;
3189 regno = subreg_regno (x);
3190 if (subreg_lowpart_p (x))
3191 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3192 else
3193 offset = SUBREG_BYTE (x);
3194 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3198 return *xp;
3201 /* Do alter_subreg on all the SUBREGs contained in X. */
3203 static rtx
3204 walk_alter_subreg (rtx *xp, bool *changed)
3206 rtx x = *xp;
3207 switch (GET_CODE (x))
3209 case PLUS:
3210 case MULT:
3211 case AND:
3212 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3213 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3214 break;
3216 case MEM:
3217 case ZERO_EXTEND:
3218 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3219 break;
3221 case SUBREG:
3222 *changed = true;
3223 return alter_subreg (xp, true);
3225 default:
3226 break;
3229 return *xp;
3232 #if HAVE_cc0
3234 /* Given BODY, the body of a jump instruction, alter the jump condition
3235 as required by the bits that are set in cc_status.flags.
3236 Not all of the bits there can be handled at this level in all cases.
3238 The value is normally 0.
3239 1 means that the condition has become always true.
3240 -1 means that the condition has become always false.
3241 2 means that COND has been altered. */
3243 static int
3244 alter_cond (rtx cond)
3246 int value = 0;
3248 if (cc_status.flags & CC_REVERSED)
3250 value = 2;
3251 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3254 if (cc_status.flags & CC_INVERTED)
3256 value = 2;
3257 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3260 if (cc_status.flags & CC_NOT_POSITIVE)
3261 switch (GET_CODE (cond))
3263 case LE:
3264 case LEU:
3265 case GEU:
3266 /* Jump becomes unconditional. */
3267 return 1;
3269 case GT:
3270 case GTU:
3271 case LTU:
3272 /* Jump becomes no-op. */
3273 return -1;
3275 case GE:
3276 PUT_CODE (cond, EQ);
3277 value = 2;
3278 break;
3280 case LT:
3281 PUT_CODE (cond, NE);
3282 value = 2;
3283 break;
3285 default:
3286 break;
3289 if (cc_status.flags & CC_NOT_NEGATIVE)
3290 switch (GET_CODE (cond))
3292 case GE:
3293 case GEU:
3294 /* Jump becomes unconditional. */
3295 return 1;
3297 case LT:
3298 case LTU:
3299 /* Jump becomes no-op. */
3300 return -1;
3302 case LE:
3303 case LEU:
3304 PUT_CODE (cond, EQ);
3305 value = 2;
3306 break;
3308 case GT:
3309 case GTU:
3310 PUT_CODE (cond, NE);
3311 value = 2;
3312 break;
3314 default:
3315 break;
3318 if (cc_status.flags & CC_NO_OVERFLOW)
3319 switch (GET_CODE (cond))
3321 case GEU:
3322 /* Jump becomes unconditional. */
3323 return 1;
3325 case LEU:
3326 PUT_CODE (cond, EQ);
3327 value = 2;
3328 break;
3330 case GTU:
3331 PUT_CODE (cond, NE);
3332 value = 2;
3333 break;
3335 case LTU:
3336 /* Jump becomes no-op. */
3337 return -1;
3339 default:
3340 break;
3343 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3344 switch (GET_CODE (cond))
3346 default:
3347 gcc_unreachable ();
3349 case NE:
3350 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3351 value = 2;
3352 break;
3354 case EQ:
3355 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3356 value = 2;
3357 break;
3360 if (cc_status.flags & CC_NOT_SIGNED)
3361 /* The flags are valid if signed condition operators are converted
3362 to unsigned. */
3363 switch (GET_CODE (cond))
3365 case LE:
3366 PUT_CODE (cond, LEU);
3367 value = 2;
3368 break;
3370 case LT:
3371 PUT_CODE (cond, LTU);
3372 value = 2;
3373 break;
3375 case GT:
3376 PUT_CODE (cond, GTU);
3377 value = 2;
3378 break;
3380 case GE:
3381 PUT_CODE (cond, GEU);
3382 value = 2;
3383 break;
3385 default:
3386 break;
3389 return value;
3391 #endif
3393 /* Report inconsistency between the assembler template and the operands.
3394 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3396 void
3397 output_operand_lossage (const char *cmsgid, ...)
3399 char *fmt_string;
3400 char *new_message;
3401 const char *pfx_str;
3402 va_list ap;
3404 va_start (ap, cmsgid);
3406 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3407 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3408 new_message = xvasprintf (fmt_string, ap);
3410 if (this_is_asm_operands)
3411 error_for_asm (this_is_asm_operands, "%s", new_message);
3412 else
3413 internal_error ("%s", new_message);
3415 free (fmt_string);
3416 free (new_message);
3417 va_end (ap);
3420 /* Output of assembler code from a template, and its subroutines. */
3422 /* Annotate the assembly with a comment describing the pattern and
3423 alternative used. */
3425 static void
3426 output_asm_name (void)
3428 if (debug_insn)
3430 int num = INSN_CODE (debug_insn);
3431 fprintf (asm_out_file, "\t%s %d\t%s",
3432 ASM_COMMENT_START, INSN_UID (debug_insn),
3433 insn_data[num].name);
3434 if (insn_data[num].n_alternatives > 1)
3435 fprintf (asm_out_file, "/%d", which_alternative + 1);
3437 if (HAVE_ATTR_length)
3438 fprintf (asm_out_file, "\t[length = %d]",
3439 get_attr_length (debug_insn));
3441 /* Clear this so only the first assembler insn
3442 of any rtl insn will get the special comment for -dp. */
3443 debug_insn = 0;
3447 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3448 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3449 corresponds to the address of the object and 0 if to the object. */
3451 static tree
3452 get_mem_expr_from_op (rtx op, int *paddressp)
3454 tree expr;
3455 int inner_addressp;
3457 *paddressp = 0;
3459 if (REG_P (op))
3460 return REG_EXPR (op);
3461 else if (!MEM_P (op))
3462 return 0;
3464 if (MEM_EXPR (op) != 0)
3465 return MEM_EXPR (op);
3467 /* Otherwise we have an address, so indicate it and look at the address. */
3468 *paddressp = 1;
3469 op = XEXP (op, 0);
3471 /* First check if we have a decl for the address, then look at the right side
3472 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3473 But don't allow the address to itself be indirect. */
3474 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3475 return expr;
3476 else if (GET_CODE (op) == PLUS
3477 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3478 return expr;
3480 while (UNARY_P (op)
3481 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3482 op = XEXP (op, 0);
3484 expr = get_mem_expr_from_op (op, &inner_addressp);
3485 return inner_addressp ? 0 : expr;
3488 /* Output operand names for assembler instructions. OPERANDS is the
3489 operand vector, OPORDER is the order to write the operands, and NOPS
3490 is the number of operands to write. */
3492 static void
3493 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3495 int wrote = 0;
3496 int i;
3498 for (i = 0; i < nops; i++)
3500 int addressp;
3501 rtx op = operands[oporder[i]];
3502 tree expr = get_mem_expr_from_op (op, &addressp);
3504 fprintf (asm_out_file, "%c%s",
3505 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3506 wrote = 1;
3507 if (expr)
3509 fprintf (asm_out_file, "%s",
3510 addressp ? "*" : "");
3511 print_mem_expr (asm_out_file, expr);
3512 wrote = 1;
3514 else if (REG_P (op) && ORIGINAL_REGNO (op)
3515 && ORIGINAL_REGNO (op) != REGNO (op))
3516 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3520 #ifdef ASSEMBLER_DIALECT
3521 /* Helper function to parse assembler dialects in the asm string.
3522 This is called from output_asm_insn and asm_fprintf. */
3523 static const char *
3524 do_assembler_dialects (const char *p, int *dialect)
3526 char c = *(p - 1);
3528 switch (c)
3530 case '{':
3532 int i;
3534 if (*dialect)
3535 output_operand_lossage ("nested assembly dialect alternatives");
3536 else
3537 *dialect = 1;
3539 /* If we want the first dialect, do nothing. Otherwise, skip
3540 DIALECT_NUMBER of strings ending with '|'. */
3541 for (i = 0; i < dialect_number; i++)
3543 while (*p && *p != '}')
3545 if (*p == '|')
3547 p++;
3548 break;
3551 /* Skip over any character after a percent sign. */
3552 if (*p == '%')
3553 p++;
3554 if (*p)
3555 p++;
3558 if (*p == '}')
3559 break;
3562 if (*p == '\0')
3563 output_operand_lossage ("unterminated assembly dialect alternative");
3565 break;
3567 case '|':
3568 if (*dialect)
3570 /* Skip to close brace. */
3573 if (*p == '\0')
3575 output_operand_lossage ("unterminated assembly dialect alternative");
3576 break;
3579 /* Skip over any character after a percent sign. */
3580 if (*p == '%' && p[1])
3582 p += 2;
3583 continue;
3586 if (*p++ == '}')
3587 break;
3589 while (1);
3591 *dialect = 0;
3593 else
3594 putc (c, asm_out_file);
3595 break;
3597 case '}':
3598 if (! *dialect)
3599 putc (c, asm_out_file);
3600 *dialect = 0;
3601 break;
3602 default:
3603 gcc_unreachable ();
3606 return p;
3608 #endif
3610 /* Output text from TEMPLATE to the assembler output file,
3611 obeying %-directions to substitute operands taken from
3612 the vector OPERANDS.
3614 %N (for N a digit) means print operand N in usual manner.
3615 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3616 and print the label name with no punctuation.
3617 %cN means require operand N to be a constant
3618 and print the constant expression with no punctuation.
3619 %aN means expect operand N to be a memory address
3620 (not a memory reference!) and print a reference
3621 to that address.
3622 %nN means expect operand N to be a constant
3623 and print a constant expression for minus the value
3624 of the operand, with no other punctuation. */
3626 void
3627 output_asm_insn (const char *templ, rtx *operands)
3629 const char *p;
3630 int c;
3631 #ifdef ASSEMBLER_DIALECT
3632 int dialect = 0;
3633 #endif
3634 int oporder[MAX_RECOG_OPERANDS];
3635 char opoutput[MAX_RECOG_OPERANDS];
3636 int ops = 0;
3638 /* An insn may return a null string template
3639 in a case where no assembler code is needed. */
3640 if (*templ == 0)
3641 return;
3643 memset (opoutput, 0, sizeof opoutput);
3644 p = templ;
3645 putc ('\t', asm_out_file);
3647 #ifdef ASM_OUTPUT_OPCODE
3648 ASM_OUTPUT_OPCODE (asm_out_file, p);
3649 #endif
3651 while ((c = *p++))
3652 switch (c)
3654 case '\n':
3655 if (flag_verbose_asm)
3656 output_asm_operand_names (operands, oporder, ops);
3657 if (flag_print_asm_name)
3658 output_asm_name ();
3660 ops = 0;
3661 memset (opoutput, 0, sizeof opoutput);
3663 putc (c, asm_out_file);
3664 #ifdef ASM_OUTPUT_OPCODE
3665 while ((c = *p) == '\t')
3667 putc (c, asm_out_file);
3668 p++;
3670 ASM_OUTPUT_OPCODE (asm_out_file, p);
3671 #endif
3672 break;
3674 #ifdef ASSEMBLER_DIALECT
3675 case '{':
3676 case '}':
3677 case '|':
3678 p = do_assembler_dialects (p, &dialect);
3679 break;
3680 #endif
3682 case '%':
3683 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3684 if ASSEMBLER_DIALECT defined and these characters have a special
3685 meaning as dialect delimiters.*/
3686 if (*p == '%'
3687 #ifdef ASSEMBLER_DIALECT
3688 || *p == '{' || *p == '}' || *p == '|'
3689 #endif
3692 putc (*p, asm_out_file);
3693 p++;
3695 /* %= outputs a number which is unique to each insn in the entire
3696 compilation. This is useful for making local labels that are
3697 referred to more than once in a given insn. */
3698 else if (*p == '=')
3700 p++;
3701 fprintf (asm_out_file, "%d", insn_counter);
3703 /* % followed by a letter and some digits
3704 outputs an operand in a special way depending on the letter.
3705 Letters `acln' are implemented directly.
3706 Other letters are passed to `output_operand' so that
3707 the TARGET_PRINT_OPERAND hook can define them. */
3708 else if (ISALPHA (*p))
3710 int letter = *p++;
3711 unsigned long opnum;
3712 char *endptr;
3714 opnum = strtoul (p, &endptr, 10);
3716 if (endptr == p)
3717 output_operand_lossage ("operand number missing "
3718 "after %%-letter");
3719 else if (this_is_asm_operands && opnum >= insn_noperands)
3720 output_operand_lossage ("operand number out of range");
3721 else if (letter == 'l')
3722 output_asm_label (operands[opnum]);
3723 else if (letter == 'a')
3724 output_address (VOIDmode, operands[opnum]);
3725 else if (letter == 'c')
3727 if (CONSTANT_ADDRESS_P (operands[opnum]))
3728 output_addr_const (asm_out_file, operands[opnum]);
3729 else
3730 output_operand (operands[opnum], 'c');
3732 else if (letter == 'n')
3734 if (CONST_INT_P (operands[opnum]))
3735 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3736 - INTVAL (operands[opnum]));
3737 else
3739 putc ('-', asm_out_file);
3740 output_addr_const (asm_out_file, operands[opnum]);
3743 else
3744 output_operand (operands[opnum], letter);
3746 if (!opoutput[opnum])
3747 oporder[ops++] = opnum;
3748 opoutput[opnum] = 1;
3750 p = endptr;
3751 c = *p;
3753 /* % followed by a digit outputs an operand the default way. */
3754 else if (ISDIGIT (*p))
3756 unsigned long opnum;
3757 char *endptr;
3759 opnum = strtoul (p, &endptr, 10);
3760 if (this_is_asm_operands && opnum >= insn_noperands)
3761 output_operand_lossage ("operand number out of range");
3762 else
3763 output_operand (operands[opnum], 0);
3765 if (!opoutput[opnum])
3766 oporder[ops++] = opnum;
3767 opoutput[opnum] = 1;
3769 p = endptr;
3770 c = *p;
3772 /* % followed by punctuation: output something for that
3773 punctuation character alone, with no operand. The
3774 TARGET_PRINT_OPERAND hook decides what is actually done. */
3775 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3776 output_operand (NULL_RTX, *p++);
3777 else
3778 output_operand_lossage ("invalid %%-code");
3779 break;
3781 default:
3782 putc (c, asm_out_file);
3785 /* Write out the variable names for operands, if we know them. */
3786 if (flag_verbose_asm)
3787 output_asm_operand_names (operands, oporder, ops);
3788 if (flag_print_asm_name)
3789 output_asm_name ();
3791 putc ('\n', asm_out_file);
3794 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3796 void
3797 output_asm_label (rtx x)
3799 char buf[256];
3801 if (GET_CODE (x) == LABEL_REF)
3802 x = LABEL_REF_LABEL (x);
3803 if (LABEL_P (x)
3804 || (NOTE_P (x)
3805 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3806 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3807 else
3808 output_operand_lossage ("'%%l' operand isn't a label");
3810 assemble_name (asm_out_file, buf);
3813 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3815 void
3816 mark_symbol_refs_as_used (rtx x)
3818 subrtx_iterator::array_type array;
3819 FOR_EACH_SUBRTX (iter, array, x, ALL)
3821 const_rtx x = *iter;
3822 if (GET_CODE (x) == SYMBOL_REF)
3823 if (tree t = SYMBOL_REF_DECL (x))
3824 assemble_external (t);
3828 /* Print operand X using machine-dependent assembler syntax.
3829 CODE is a non-digit that preceded the operand-number in the % spec,
3830 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3831 between the % and the digits.
3832 When CODE is a non-letter, X is 0.
3834 The meanings of the letters are machine-dependent and controlled
3835 by TARGET_PRINT_OPERAND. */
3837 void
3838 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3840 if (x && GET_CODE (x) == SUBREG)
3841 x = alter_subreg (&x, true);
3843 /* X must not be a pseudo reg. */
3844 if (!targetm.no_register_allocation)
3845 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3847 targetm.asm_out.print_operand (asm_out_file, x, code);
3849 if (x == NULL_RTX)
3850 return;
3852 mark_symbol_refs_as_used (x);
3855 /* Print a memory reference operand for address X using
3856 machine-dependent assembler syntax. */
3858 void
3859 output_address (machine_mode mode, rtx x)
3861 bool changed = false;
3862 walk_alter_subreg (&x, &changed);
3863 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3866 /* Print an integer constant expression in assembler syntax.
3867 Addition and subtraction are the only arithmetic
3868 that may appear in these expressions. */
3870 void
3871 output_addr_const (FILE *file, rtx x)
3873 char buf[256];
3875 restart:
3876 switch (GET_CODE (x))
3878 case PC:
3879 putc ('.', file);
3880 break;
3882 case SYMBOL_REF:
3883 if (SYMBOL_REF_DECL (x))
3884 assemble_external (SYMBOL_REF_DECL (x));
3885 #ifdef ASM_OUTPUT_SYMBOL_REF
3886 ASM_OUTPUT_SYMBOL_REF (file, x);
3887 #else
3888 assemble_name (file, XSTR (x, 0));
3889 #endif
3890 break;
3892 case LABEL_REF:
3893 x = LABEL_REF_LABEL (x);
3894 /* Fall through. */
3895 case CODE_LABEL:
3896 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3897 #ifdef ASM_OUTPUT_LABEL_REF
3898 ASM_OUTPUT_LABEL_REF (file, buf);
3899 #else
3900 assemble_name (file, buf);
3901 #endif
3902 break;
3904 case CONST_INT:
3905 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3906 break;
3908 case CONST:
3909 /* This used to output parentheses around the expression,
3910 but that does not work on the 386 (either ATT or BSD assembler). */
3911 output_addr_const (file, XEXP (x, 0));
3912 break;
3914 case CONST_WIDE_INT:
3915 /* We do not know the mode here so we have to use a round about
3916 way to build a wide-int to get it printed properly. */
3918 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3919 CONST_WIDE_INT_NUNITS (x),
3920 CONST_WIDE_INT_NUNITS (x)
3921 * HOST_BITS_PER_WIDE_INT,
3922 false);
3923 print_decs (w, file);
3925 break;
3927 case CONST_DOUBLE:
3928 if (CONST_DOUBLE_AS_INT_P (x))
3930 /* We can use %d if the number is one word and positive. */
3931 if (CONST_DOUBLE_HIGH (x))
3932 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3933 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3934 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3935 else if (CONST_DOUBLE_LOW (x) < 0)
3936 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3937 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3938 else
3939 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3941 else
3942 /* We can't handle floating point constants;
3943 PRINT_OPERAND must handle them. */
3944 output_operand_lossage ("floating constant misused");
3945 break;
3947 case CONST_FIXED:
3948 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3949 break;
3951 case PLUS:
3952 /* Some assemblers need integer constants to appear last (eg masm). */
3953 if (CONST_INT_P (XEXP (x, 0)))
3955 output_addr_const (file, XEXP (x, 1));
3956 if (INTVAL (XEXP (x, 0)) >= 0)
3957 fprintf (file, "+");
3958 output_addr_const (file, XEXP (x, 0));
3960 else
3962 output_addr_const (file, XEXP (x, 0));
3963 if (!CONST_INT_P (XEXP (x, 1))
3964 || INTVAL (XEXP (x, 1)) >= 0)
3965 fprintf (file, "+");
3966 output_addr_const (file, XEXP (x, 1));
3968 break;
3970 case MINUS:
3971 /* Avoid outputting things like x-x or x+5-x,
3972 since some assemblers can't handle that. */
3973 x = simplify_subtraction (x);
3974 if (GET_CODE (x) != MINUS)
3975 goto restart;
3977 output_addr_const (file, XEXP (x, 0));
3978 fprintf (file, "-");
3979 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3980 || GET_CODE (XEXP (x, 1)) == PC
3981 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3982 output_addr_const (file, XEXP (x, 1));
3983 else
3985 fputs (targetm.asm_out.open_paren, file);
3986 output_addr_const (file, XEXP (x, 1));
3987 fputs (targetm.asm_out.close_paren, file);
3989 break;
3991 case ZERO_EXTEND:
3992 case SIGN_EXTEND:
3993 case SUBREG:
3994 case TRUNCATE:
3995 output_addr_const (file, XEXP (x, 0));
3996 break;
3998 default:
3999 if (targetm.asm_out.output_addr_const_extra (file, x))
4000 break;
4002 output_operand_lossage ("invalid expression as operand");
4006 /* Output a quoted string. */
4008 void
4009 output_quoted_string (FILE *asm_file, const char *string)
4011 #ifdef OUTPUT_QUOTED_STRING
4012 OUTPUT_QUOTED_STRING (asm_file, string);
4013 #else
4014 char c;
4016 putc ('\"', asm_file);
4017 while ((c = *string++) != 0)
4019 if (ISPRINT (c))
4021 if (c == '\"' || c == '\\')
4022 putc ('\\', asm_file);
4023 putc (c, asm_file);
4025 else
4026 fprintf (asm_file, "\\%03o", (unsigned char) c);
4028 putc ('\"', asm_file);
4029 #endif
4032 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4034 void
4035 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4037 char buf[2 + CHAR_BIT * sizeof (value) / 4];
4038 if (value == 0)
4039 putc ('0', f);
4040 else
4042 char *p = buf + sizeof (buf);
4044 *--p = "0123456789abcdef"[value % 16];
4045 while ((value /= 16) != 0);
4046 *--p = 'x';
4047 *--p = '0';
4048 fwrite (p, 1, buf + sizeof (buf) - p, f);
4052 /* Internal function that prints an unsigned long in decimal in reverse.
4053 The output string IS NOT null-terminated. */
4055 static int
4056 sprint_ul_rev (char *s, unsigned long value)
4058 int i = 0;
4061 s[i] = "0123456789"[value % 10];
4062 value /= 10;
4063 i++;
4064 /* alternate version, without modulo */
4065 /* oldval = value; */
4066 /* value /= 10; */
4067 /* s[i] = "0123456789" [oldval - 10*value]; */
4068 /* i++ */
4070 while (value != 0);
4071 return i;
4074 /* Write an unsigned long as decimal to a file, fast. */
4076 void
4077 fprint_ul (FILE *f, unsigned long value)
4079 /* python says: len(str(2**64)) == 20 */
4080 char s[20];
4081 int i;
4083 i = sprint_ul_rev (s, value);
4085 /* It's probably too small to bother with string reversal and fputs. */
4088 i--;
4089 putc (s[i], f);
4091 while (i != 0);
4094 /* Write an unsigned long as decimal to a string, fast.
4095 s must be wide enough to not overflow, at least 21 chars.
4096 Returns the length of the string (without terminating '\0'). */
4099 sprint_ul (char *s, unsigned long value)
4101 int len = sprint_ul_rev (s, value);
4102 s[len] = '\0';
4104 std::reverse (s, s + len);
4105 return len;
4108 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4109 %R prints the value of REGISTER_PREFIX.
4110 %L prints the value of LOCAL_LABEL_PREFIX.
4111 %U prints the value of USER_LABEL_PREFIX.
4112 %I prints the value of IMMEDIATE_PREFIX.
4113 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4114 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4116 We handle alternate assembler dialects here, just like output_asm_insn. */
4118 void
4119 asm_fprintf (FILE *file, const char *p, ...)
4121 char buf[10];
4122 char *q, c;
4123 #ifdef ASSEMBLER_DIALECT
4124 int dialect = 0;
4125 #endif
4126 va_list argptr;
4128 va_start (argptr, p);
4130 buf[0] = '%';
4132 while ((c = *p++))
4133 switch (c)
4135 #ifdef ASSEMBLER_DIALECT
4136 case '{':
4137 case '}':
4138 case '|':
4139 p = do_assembler_dialects (p, &dialect);
4140 break;
4141 #endif
4143 case '%':
4144 c = *p++;
4145 q = &buf[1];
4146 while (strchr ("-+ #0", c))
4148 *q++ = c;
4149 c = *p++;
4151 while (ISDIGIT (c) || c == '.')
4153 *q++ = c;
4154 c = *p++;
4156 switch (c)
4158 case '%':
4159 putc ('%', file);
4160 break;
4162 case 'd': case 'i': case 'u':
4163 case 'x': case 'X': case 'o':
4164 case 'c':
4165 *q++ = c;
4166 *q = 0;
4167 fprintf (file, buf, va_arg (argptr, int));
4168 break;
4170 case 'w':
4171 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4172 'o' cases, but we do not check for those cases. It
4173 means that the value is a HOST_WIDE_INT, which may be
4174 either `long' or `long long'. */
4175 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4176 q += strlen (HOST_WIDE_INT_PRINT);
4177 *q++ = *p++;
4178 *q = 0;
4179 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4180 break;
4182 case 'l':
4183 *q++ = c;
4184 #ifdef HAVE_LONG_LONG
4185 if (*p == 'l')
4187 *q++ = *p++;
4188 *q++ = *p++;
4189 *q = 0;
4190 fprintf (file, buf, va_arg (argptr, long long));
4192 else
4193 #endif
4195 *q++ = *p++;
4196 *q = 0;
4197 fprintf (file, buf, va_arg (argptr, long));
4200 break;
4202 case 's':
4203 *q++ = c;
4204 *q = 0;
4205 fprintf (file, buf, va_arg (argptr, char *));
4206 break;
4208 case 'O':
4209 #ifdef ASM_OUTPUT_OPCODE
4210 ASM_OUTPUT_OPCODE (asm_out_file, p);
4211 #endif
4212 break;
4214 case 'R':
4215 #ifdef REGISTER_PREFIX
4216 fprintf (file, "%s", REGISTER_PREFIX);
4217 #endif
4218 break;
4220 case 'I':
4221 #ifdef IMMEDIATE_PREFIX
4222 fprintf (file, "%s", IMMEDIATE_PREFIX);
4223 #endif
4224 break;
4226 case 'L':
4227 #ifdef LOCAL_LABEL_PREFIX
4228 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4229 #endif
4230 break;
4232 case 'U':
4233 fputs (user_label_prefix, file);
4234 break;
4236 #ifdef ASM_FPRINTF_EXTENSIONS
4237 /* Uppercase letters are reserved for general use by asm_fprintf
4238 and so are not available to target specific code. In order to
4239 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4240 they are defined here. As they get turned into real extensions
4241 to asm_fprintf they should be removed from this list. */
4242 case 'A': case 'B': case 'C': case 'D': case 'E':
4243 case 'F': case 'G': case 'H': case 'J': case 'K':
4244 case 'M': case 'N': case 'P': case 'Q': case 'S':
4245 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4246 break;
4248 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4249 #endif
4250 default:
4251 gcc_unreachable ();
4253 break;
4255 default:
4256 putc (c, file);
4258 va_end (argptr);
4261 /* Return nonzero if this function has no function calls. */
4264 leaf_function_p (void)
4266 rtx_insn *insn;
4268 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4269 functions even if they call mcount. */
4270 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4271 return 0;
4273 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4275 if (CALL_P (insn)
4276 && ! SIBLING_CALL_P (insn))
4277 return 0;
4278 if (NONJUMP_INSN_P (insn)
4279 && GET_CODE (PATTERN (insn)) == SEQUENCE
4280 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4281 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4282 return 0;
4285 return 1;
4288 /* Return 1 if branch is a forward branch.
4289 Uses insn_shuid array, so it works only in the final pass. May be used by
4290 output templates to customary add branch prediction hints.
4293 final_forward_branch_p (rtx_insn *insn)
4295 int insn_id, label_id;
4297 gcc_assert (uid_shuid);
4298 insn_id = INSN_SHUID (insn);
4299 label_id = INSN_SHUID (JUMP_LABEL (insn));
4300 /* We've hit some insns that does not have id information available. */
4301 gcc_assert (insn_id && label_id);
4302 return insn_id < label_id;
4305 /* On some machines, a function with no call insns
4306 can run faster if it doesn't create its own register window.
4307 When output, the leaf function should use only the "output"
4308 registers. Ordinarily, the function would be compiled to use
4309 the "input" registers to find its arguments; it is a candidate
4310 for leaf treatment if it uses only the "input" registers.
4311 Leaf function treatment means renumbering so the function
4312 uses the "output" registers instead. */
4314 #ifdef LEAF_REGISTERS
4316 /* Return 1 if this function uses only the registers that can be
4317 safely renumbered. */
4320 only_leaf_regs_used (void)
4322 int i;
4323 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4325 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4326 if ((df_regs_ever_live_p (i) || global_regs[i])
4327 && ! permitted_reg_in_leaf_functions[i])
4328 return 0;
4330 if (crtl->uses_pic_offset_table
4331 && pic_offset_table_rtx != 0
4332 && REG_P (pic_offset_table_rtx)
4333 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4334 return 0;
4336 return 1;
4339 /* Scan all instructions and renumber all registers into those
4340 available in leaf functions. */
4342 static void
4343 leaf_renumber_regs (rtx_insn *first)
4345 rtx_insn *insn;
4347 /* Renumber only the actual patterns.
4348 The reg-notes can contain frame pointer refs,
4349 and renumbering them could crash, and should not be needed. */
4350 for (insn = first; insn; insn = NEXT_INSN (insn))
4351 if (INSN_P (insn))
4352 leaf_renumber_regs_insn (PATTERN (insn));
4355 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4356 available in leaf functions. */
4358 void
4359 leaf_renumber_regs_insn (rtx in_rtx)
4361 int i, j;
4362 const char *format_ptr;
4364 if (in_rtx == 0)
4365 return;
4367 /* Renumber all input-registers into output-registers.
4368 renumbered_regs would be 1 for an output-register;
4369 they */
4371 if (REG_P (in_rtx))
4373 int newreg;
4375 /* Don't renumber the same reg twice. */
4376 if (in_rtx->used)
4377 return;
4379 newreg = REGNO (in_rtx);
4380 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4381 to reach here as part of a REG_NOTE. */
4382 if (newreg >= FIRST_PSEUDO_REGISTER)
4384 in_rtx->used = 1;
4385 return;
4387 newreg = LEAF_REG_REMAP (newreg);
4388 gcc_assert (newreg >= 0);
4389 df_set_regs_ever_live (REGNO (in_rtx), false);
4390 df_set_regs_ever_live (newreg, true);
4391 SET_REGNO (in_rtx, newreg);
4392 in_rtx->used = 1;
4393 return;
4396 if (INSN_P (in_rtx))
4398 /* Inside a SEQUENCE, we find insns.
4399 Renumber just the patterns of these insns,
4400 just as we do for the top-level insns. */
4401 leaf_renumber_regs_insn (PATTERN (in_rtx));
4402 return;
4405 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4407 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4408 switch (*format_ptr++)
4410 case 'e':
4411 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4412 break;
4414 case 'E':
4415 if (NULL != XVEC (in_rtx, i))
4417 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4418 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4420 break;
4422 case 'S':
4423 case 's':
4424 case '0':
4425 case 'i':
4426 case 'w':
4427 case 'n':
4428 case 'u':
4429 break;
4431 default:
4432 gcc_unreachable ();
4435 #endif
4437 /* Turn the RTL into assembly. */
4438 static unsigned int
4439 rest_of_handle_final (void)
4441 const char *fnname = get_fnname_from_decl (current_function_decl);
4443 assemble_start_function (current_function_decl, fnname);
4444 final_start_function (get_insns (), asm_out_file, optimize);
4445 final (get_insns (), asm_out_file, optimize);
4446 if (flag_ipa_ra)
4447 collect_fn_hard_reg_usage ();
4448 final_end_function ();
4450 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4451 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4452 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4453 output_function_exception_table (fnname);
4455 assemble_end_function (current_function_decl, fnname);
4457 user_defined_section_attribute = false;
4459 /* Free up reg info memory. */
4460 free_reg_info ();
4462 if (! quiet_flag)
4463 fflush (asm_out_file);
4465 /* Write DBX symbols if requested. */
4467 /* Note that for those inline functions where we don't initially
4468 know for certain that we will be generating an out-of-line copy,
4469 the first invocation of this routine (rest_of_compilation) will
4470 skip over this code by doing a `goto exit_rest_of_compilation;'.
4471 Later on, wrapup_global_declarations will (indirectly) call
4472 rest_of_compilation again for those inline functions that need
4473 to have out-of-line copies generated. During that call, we
4474 *will* be routed past here. */
4476 timevar_push (TV_SYMOUT);
4477 if (!DECL_IGNORED_P (current_function_decl))
4478 debug_hooks->function_decl (current_function_decl);
4479 timevar_pop (TV_SYMOUT);
4481 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4482 DECL_INITIAL (current_function_decl) = error_mark_node;
4484 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4485 && targetm.have_ctors_dtors)
4486 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4487 decl_init_priority_lookup
4488 (current_function_decl));
4489 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4490 && targetm.have_ctors_dtors)
4491 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4492 decl_fini_priority_lookup
4493 (current_function_decl));
4494 return 0;
4497 namespace {
4499 const pass_data pass_data_final =
4501 RTL_PASS, /* type */
4502 "final", /* name */
4503 OPTGROUP_NONE, /* optinfo_flags */
4504 TV_FINAL, /* tv_id */
4505 0, /* properties_required */
4506 0, /* properties_provided */
4507 0, /* properties_destroyed */
4508 0, /* todo_flags_start */
4509 0, /* todo_flags_finish */
4512 class pass_final : public rtl_opt_pass
4514 public:
4515 pass_final (gcc::context *ctxt)
4516 : rtl_opt_pass (pass_data_final, ctxt)
4519 /* opt_pass methods: */
4520 virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4522 }; // class pass_final
4524 } // anon namespace
4526 rtl_opt_pass *
4527 make_pass_final (gcc::context *ctxt)
4529 return new pass_final (ctxt);
4533 static unsigned int
4534 rest_of_handle_shorten_branches (void)
4536 /* Shorten branches. */
4537 shorten_branches (get_insns ());
4538 return 0;
4541 namespace {
4543 const pass_data pass_data_shorten_branches =
4545 RTL_PASS, /* type */
4546 "shorten", /* name */
4547 OPTGROUP_NONE, /* optinfo_flags */
4548 TV_SHORTEN_BRANCH, /* tv_id */
4549 0, /* properties_required */
4550 0, /* properties_provided */
4551 0, /* properties_destroyed */
4552 0, /* todo_flags_start */
4553 0, /* todo_flags_finish */
4556 class pass_shorten_branches : public rtl_opt_pass
4558 public:
4559 pass_shorten_branches (gcc::context *ctxt)
4560 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4563 /* opt_pass methods: */
4564 virtual unsigned int execute (function *)
4566 return rest_of_handle_shorten_branches ();
4569 }; // class pass_shorten_branches
4571 } // anon namespace
4573 rtl_opt_pass *
4574 make_pass_shorten_branches (gcc::context *ctxt)
4576 return new pass_shorten_branches (ctxt);
4580 static unsigned int
4581 rest_of_clean_state (void)
4583 rtx_insn *insn, *next;
4584 FILE *final_output = NULL;
4585 int save_unnumbered = flag_dump_unnumbered;
4586 int save_noaddr = flag_dump_noaddr;
4588 if (flag_dump_final_insns)
4590 final_output = fopen (flag_dump_final_insns, "a");
4591 if (!final_output)
4593 error ("could not open final insn dump file %qs: %m",
4594 flag_dump_final_insns);
4595 flag_dump_final_insns = NULL;
4597 else
4599 flag_dump_noaddr = flag_dump_unnumbered = 1;
4600 if (flag_compare_debug_opt || flag_compare_debug)
4601 dump_flags |= TDF_NOUID;
4602 dump_function_header (final_output, current_function_decl,
4603 dump_flags);
4604 final_insns_dump_p = true;
4606 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4607 if (LABEL_P (insn))
4608 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4609 else
4611 if (NOTE_P (insn))
4612 set_block_for_insn (insn, NULL);
4613 INSN_UID (insn) = 0;
4618 /* It is very important to decompose the RTL instruction chain here:
4619 debug information keeps pointing into CODE_LABEL insns inside the function
4620 body. If these remain pointing to the other insns, we end up preserving
4621 whole RTL chain and attached detailed debug info in memory. */
4622 for (insn = get_insns (); insn; insn = next)
4624 next = NEXT_INSN (insn);
4625 SET_NEXT_INSN (insn) = NULL;
4626 SET_PREV_INSN (insn) = NULL;
4628 if (final_output
4629 && (!NOTE_P (insn) ||
4630 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4631 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4632 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4633 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4634 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4635 print_rtl_single (final_output, insn);
4638 if (final_output)
4640 flag_dump_noaddr = save_noaddr;
4641 flag_dump_unnumbered = save_unnumbered;
4642 final_insns_dump_p = false;
4644 if (fclose (final_output))
4646 error ("could not close final insn dump file %qs: %m",
4647 flag_dump_final_insns);
4648 flag_dump_final_insns = NULL;
4652 /* In case the function was not output,
4653 don't leave any temporary anonymous types
4654 queued up for sdb output. */
4655 if (SDB_DEBUGGING_INFO && write_symbols == SDB_DEBUG)
4656 sdbout_types (NULL_TREE);
4658 flag_rerun_cse_after_global_opts = 0;
4659 reload_completed = 0;
4660 epilogue_completed = 0;
4661 #ifdef STACK_REGS
4662 regstack_completed = 0;
4663 #endif
4665 /* Clear out the insn_length contents now that they are no
4666 longer valid. */
4667 init_insn_lengths ();
4669 /* Show no temporary slots allocated. */
4670 init_temp_slots ();
4672 free_bb_for_insn ();
4674 delete_tree_ssa (cfun);
4676 /* We can reduce stack alignment on call site only when we are sure that
4677 the function body just produced will be actually used in the final
4678 executable. */
4679 if (decl_binds_to_current_def_p (current_function_decl))
4681 unsigned int pref = crtl->preferred_stack_boundary;
4682 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4683 pref = crtl->stack_alignment_needed;
4684 cgraph_node::rtl_info (current_function_decl)
4685 ->preferred_incoming_stack_boundary = pref;
4688 /* Make sure volatile mem refs aren't considered valid operands for
4689 arithmetic insns. We must call this here if this is a nested inline
4690 function, since the above code leaves us in the init_recog state,
4691 and the function context push/pop code does not save/restore volatile_ok.
4693 ??? Maybe it isn't necessary for expand_start_function to call this
4694 anymore if we do it here? */
4696 init_recog_no_volatile ();
4698 /* We're done with this function. Free up memory if we can. */
4699 free_after_parsing (cfun);
4700 free_after_compilation (cfun);
4701 return 0;
4704 namespace {
4706 const pass_data pass_data_clean_state =
4708 RTL_PASS, /* type */
4709 "*clean_state", /* name */
4710 OPTGROUP_NONE, /* optinfo_flags */
4711 TV_FINAL, /* tv_id */
4712 0, /* properties_required */
4713 0, /* properties_provided */
4714 PROP_rtl, /* properties_destroyed */
4715 0, /* todo_flags_start */
4716 0, /* todo_flags_finish */
4719 class pass_clean_state : public rtl_opt_pass
4721 public:
4722 pass_clean_state (gcc::context *ctxt)
4723 : rtl_opt_pass (pass_data_clean_state, ctxt)
4726 /* opt_pass methods: */
4727 virtual unsigned int execute (function *)
4729 return rest_of_clean_state ();
4732 }; // class pass_clean_state
4734 } // anon namespace
4736 rtl_opt_pass *
4737 make_pass_clean_state (gcc::context *ctxt)
4739 return new pass_clean_state (ctxt);
4742 /* Return true if INSN is a call to the current function. */
4744 static bool
4745 self_recursive_call_p (rtx_insn *insn)
4747 tree fndecl = get_call_fndecl (insn);
4748 return (fndecl == current_function_decl
4749 && decl_binds_to_current_def_p (fndecl));
4752 /* Collect hard register usage for the current function. */
4754 static void
4755 collect_fn_hard_reg_usage (void)
4757 rtx_insn *insn;
4758 #ifdef STACK_REGS
4759 int i;
4760 #endif
4761 struct cgraph_rtl_info *node;
4762 HARD_REG_SET function_used_regs;
4764 /* ??? To be removed when all the ports have been fixed. */
4765 if (!targetm.call_fusage_contains_non_callee_clobbers)
4766 return;
4768 CLEAR_HARD_REG_SET (function_used_regs);
4770 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4772 HARD_REG_SET insn_used_regs;
4774 if (!NONDEBUG_INSN_P (insn))
4775 continue;
4777 if (CALL_P (insn)
4778 && !self_recursive_call_p (insn))
4780 if (!get_call_reg_set_usage (insn, &insn_used_regs,
4781 call_used_reg_set))
4782 return;
4784 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4787 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4788 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4791 /* Be conservative - mark fixed and global registers as used. */
4792 IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
4794 #ifdef STACK_REGS
4795 /* Handle STACK_REGS conservatively, since the df-framework does not
4796 provide accurate information for them. */
4798 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4799 SET_HARD_REG_BIT (function_used_regs, i);
4800 #endif
4802 /* The information we have gathered is only interesting if it exposes a
4803 register from the call_used_regs that is not used in this function. */
4804 if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
4805 return;
4807 node = cgraph_node::rtl_info (current_function_decl);
4808 gcc_assert (node != NULL);
4810 COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
4811 node->function_used_regs_valid = 1;
4814 /* Get the declaration of the function called by INSN. */
4816 static tree
4817 get_call_fndecl (rtx_insn *insn)
4819 rtx note, datum;
4821 note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
4822 if (note == NULL_RTX)
4823 return NULL_TREE;
4825 datum = XEXP (note, 0);
4826 if (datum != NULL_RTX)
4827 return SYMBOL_REF_DECL (datum);
4829 return NULL_TREE;
4832 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4833 call targets that can be overwritten. */
4835 static struct cgraph_rtl_info *
4836 get_call_cgraph_rtl_info (rtx_insn *insn)
4838 tree fndecl;
4840 if (insn == NULL_RTX)
4841 return NULL;
4843 fndecl = get_call_fndecl (insn);
4844 if (fndecl == NULL_TREE
4845 || !decl_binds_to_current_def_p (fndecl))
4846 return NULL;
4848 return cgraph_node::rtl_info (fndecl);
4851 /* Find hard registers used by function call instruction INSN, and return them
4852 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4854 bool
4855 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
4856 HARD_REG_SET default_set)
4858 if (flag_ipa_ra)
4860 struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
4861 if (node != NULL
4862 && node->function_used_regs_valid)
4864 COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
4865 AND_HARD_REG_SET (*reg_set, default_set);
4866 return true;
4870 COPY_HARD_REG_SET (*reg_set, default_set);
4871 return false;