c++: retval dtor on rethrow [PR112301]
[official-gcc.git] / gcc / final.cc
blobdd3e22547acf1424951ae311f63ef55dfc82a6cd
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "asan.h"
81 #include "rtl-iter.h"
82 #include "print-rtl.h"
83 #include "function-abi.h"
84 #include "common/common-target.h"
86 #include "dwarf2out.h"
88 /* Most ports don't need to define CC_STATUS_INIT.
89 So define a null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
92 #endif
94 /* Is the given character a logical line separator for the assembler? */
95 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
96 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
97 #endif
99 #ifndef JUMP_TABLES_IN_TEXT_SECTION
100 #define JUMP_TABLES_IN_TEXT_SECTION 0
101 #endif
103 /* Bitflags used by final_scan_insn. */
104 #define SEEN_NOTE 1
105 #define SEEN_EMITTED 2
106 #define SEEN_NEXT_VIEW 4
108 /* Last insn processed by final_scan_insn. */
109 static rtx_insn *debug_insn;
110 rtx_insn *current_output_insn;
112 /* Line number of last NOTE. */
113 static int last_linenum;
115 /* Column number of last NOTE. */
116 static int last_columnnum;
118 /* Discriminator written to assembly. */
119 static int last_discriminator;
121 /* Compute discriminator to be written to assembly for current instruction.
122 Note: actual usage depends on loc_discriminator_kind setting. */
123 static inline int compute_discriminator (location_t loc);
125 /* Highest line number in current block. */
126 static int high_block_linenum;
128 /* Likewise for function. */
129 static int high_function_linenum;
131 /* Filename of last NOTE. */
132 static const char *last_filename;
134 /* Override filename, line and column number. */
135 static const char *override_filename;
136 static int override_linenum;
137 static int override_columnnum;
138 static int override_discriminator;
140 /* Whether to force emission of a line note before the next insn. */
141 static bool force_source_line = false;
143 extern const int length_unit_log; /* This is defined in insn-attrtab.cc. */
145 /* Nonzero while outputting an `asm' with operands.
146 This means that inconsistencies are the user's fault, so don't die.
147 The precise value is the insn being output, to pass to error_for_asm. */
148 const rtx_insn *this_is_asm_operands;
150 /* Number of operands of this insn, for an `asm' with operands. */
151 static unsigned int insn_noperands;
153 /* Compare optimization flag. */
155 static rtx last_ignored_compare = 0;
157 /* Assign a unique number to each insn that is output.
158 This can be used to generate unique local labels. */
160 static int insn_counter = 0;
162 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
164 static int block_depth;
166 /* True if have enabled APP processing of our assembler output. */
168 static bool app_on;
170 /* If we are outputting an insn sequence, this contains the sequence rtx.
171 Zero otherwise. */
173 rtx_sequence *final_sequence;
175 #ifdef ASSEMBLER_DIALECT
177 /* Number of the assembler dialect to use, starting at 0. */
178 static int dialect_number;
179 #endif
181 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
182 rtx current_insn_predicate;
184 /* True if printing into -fdump-final-insns= dump. */
185 bool final_insns_dump_p;
187 /* True if profile_function should be called, but hasn't been called yet. */
188 static bool need_profile_function;
190 static int asm_insn_count (rtx);
191 static void profile_function (FILE *);
192 static void profile_after_prologue (FILE *);
193 static bool notice_source_line (rtx_insn *, bool *);
194 static rtx walk_alter_subreg (rtx *, bool *);
195 static void output_asm_name (void);
196 static void output_alternate_entry_point (FILE *, rtx_insn *);
197 static tree get_mem_expr_from_op (rtx, int *);
198 static void output_asm_operand_names (rtx *, int *, int);
199 #ifdef LEAF_REGISTERS
200 static void leaf_renumber_regs (rtx_insn *);
201 #endif
202 static int align_fuzz (rtx, rtx, int, unsigned);
203 static void collect_fn_hard_reg_usage (void);
205 /* Initialize data in final at the beginning of a compilation. */
207 void
208 init_final (const char *filename ATTRIBUTE_UNUSED)
210 app_on = 0;
211 final_sequence = 0;
213 #ifdef ASSEMBLER_DIALECT
214 dialect_number = ASSEMBLER_DIALECT;
215 #endif
218 /* Default target function prologue and epilogue assembler output.
220 If not overridden for epilogue code, then the function body itself
221 contains return instructions wherever needed. */
222 void
223 default_function_pro_epilogue (FILE *)
227 void
228 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
229 tree decl ATTRIBUTE_UNUSED,
230 bool new_is_cold ATTRIBUTE_UNUSED)
234 /* Default target hook that outputs nothing to a stream. */
235 void
236 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
240 /* Enable APP processing of subsequent output.
241 Used before the output from an `asm' statement. */
243 void
244 app_enable (void)
246 if (! app_on)
248 fputs (ASM_APP_ON, asm_out_file);
249 app_on = 1;
253 /* Disable APP processing of subsequent output.
254 Called from varasm.cc before most kinds of output. */
256 void
257 app_disable (void)
259 if (app_on)
261 fputs (ASM_APP_OFF, asm_out_file);
262 app_on = 0;
266 /* Return the number of slots filled in the current
267 delayed branch sequence (we don't count the insn needing the
268 delay slot). Zero if not in a delayed branch sequence. */
271 dbr_sequence_length (void)
273 if (final_sequence != 0)
274 return XVECLEN (final_sequence, 0) - 1;
275 else
276 return 0;
279 /* The next two pages contain routines used to compute the length of an insn
280 and to shorten branches. */
282 /* Arrays for insn lengths, and addresses. The latter is referenced by
283 `insn_current_length'. */
285 static int *insn_lengths;
287 vec<int> insn_addresses_;
289 /* Max uid for which the above arrays are valid. */
290 static int insn_lengths_max_uid;
292 /* Address of insn being processed. Used by `insn_current_length'. */
293 int insn_current_address;
295 /* Address of insn being processed in previous iteration. */
296 int insn_last_address;
298 /* known invariant alignment of insn being processed. */
299 int insn_current_align;
301 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
302 gives the next following alignment insn that increases the known
303 alignment, or NULL_RTX if there is no such insn.
304 For any alignment obtained this way, we can again index uid_align with
305 its uid to obtain the next following align that in turn increases the
306 alignment, till we reach NULL_RTX; the sequence obtained this way
307 for each insn we'll call the alignment chain of this insn in the following
308 comments. */
310 static rtx *uid_align;
311 static int *uid_shuid;
312 static vec<align_flags> label_align;
314 /* Indicate that branch shortening hasn't yet been done. */
316 void
317 init_insn_lengths (void)
319 if (uid_shuid)
321 free (uid_shuid);
322 uid_shuid = 0;
324 if (insn_lengths)
326 free (insn_lengths);
327 insn_lengths = 0;
328 insn_lengths_max_uid = 0;
330 if (HAVE_ATTR_length)
331 INSN_ADDRESSES_FREE ();
332 if (uid_align)
334 free (uid_align);
335 uid_align = 0;
339 /* Obtain the current length of an insn. If branch shortening has been done,
340 get its actual length. Otherwise, use FALLBACK_FN to calculate the
341 length. */
342 static int
343 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
345 rtx body;
346 int i;
347 int length = 0;
349 if (!HAVE_ATTR_length)
350 return 0;
352 if (insn_lengths_max_uid > INSN_UID (insn))
353 return insn_lengths[INSN_UID (insn)];
354 else
355 switch (GET_CODE (insn))
357 case NOTE:
358 case BARRIER:
359 case CODE_LABEL:
360 case DEBUG_INSN:
361 return 0;
363 case CALL_INSN:
364 case JUMP_INSN:
365 length = fallback_fn (insn);
366 break;
368 case INSN:
369 body = PATTERN (insn);
370 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
371 return 0;
373 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
374 length = asm_insn_count (body) * fallback_fn (insn);
375 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
376 for (i = 0; i < seq->len (); i++)
377 length += get_attr_length_1 (seq->insn (i), fallback_fn);
378 else
379 length = fallback_fn (insn);
380 break;
382 default:
383 break;
386 #ifdef ADJUST_INSN_LENGTH
387 ADJUST_INSN_LENGTH (insn, length);
388 #endif
389 return length;
392 /* Obtain the current length of an insn. If branch shortening has been done,
393 get its actual length. Otherwise, get its maximum length. */
395 get_attr_length (rtx_insn *insn)
397 return get_attr_length_1 (insn, insn_default_length);
400 /* Obtain the current length of an insn. If branch shortening has been done,
401 get its actual length. Otherwise, get its minimum length. */
403 get_attr_min_length (rtx_insn *insn)
405 return get_attr_length_1 (insn, insn_min_length);
408 /* Code to handle alignment inside shorten_branches. */
410 /* Here is an explanation how the algorithm in align_fuzz can give
411 proper results:
413 Call a sequence of instructions beginning with alignment point X
414 and continuing until the next alignment point `block X'. When `X'
415 is used in an expression, it means the alignment value of the
416 alignment point.
418 Call the distance between the start of the first insn of block X, and
419 the end of the last insn of block X `IX', for the `inner size of X'.
420 This is clearly the sum of the instruction lengths.
422 Likewise with the next alignment-delimited block following X, which we
423 shall call block Y.
425 Call the distance between the start of the first insn of block X, and
426 the start of the first insn of block Y `OX', for the `outer size of X'.
428 The estimated padding is then OX - IX.
430 OX can be safely estimated as
432 if (X >= Y)
433 OX = round_up(IX, Y)
434 else
435 OX = round_up(IX, X) + Y - X
437 Clearly est(IX) >= real(IX), because that only depends on the
438 instruction lengths, and those being overestimated is a given.
440 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
441 we needn't worry about that when thinking about OX.
443 When X >= Y, the alignment provided by Y adds no uncertainty factor
444 for branch ranges starting before X, so we can just round what we have.
445 But when X < Y, we don't know anything about the, so to speak,
446 `middle bits', so we have to assume the worst when aligning up from an
447 address mod X to one mod Y, which is Y - X. */
449 #ifndef LABEL_ALIGN
450 #define LABEL_ALIGN(LABEL) align_labels
451 #endif
453 #ifndef LOOP_ALIGN
454 #define LOOP_ALIGN(LABEL) align_loops
455 #endif
457 #ifndef LABEL_ALIGN_AFTER_BARRIER
458 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
459 #endif
461 #ifndef JUMP_ALIGN
462 #define JUMP_ALIGN(LABEL) align_jumps
463 #endif
465 #ifndef ADDR_VEC_ALIGN
466 static int
467 final_addr_vec_align (rtx_jump_table_data *addr_vec)
469 int align = GET_MODE_SIZE (addr_vec->get_data_mode ());
471 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
472 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
473 return exact_log2 (align);
477 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
478 #endif
480 #ifndef INSN_LENGTH_ALIGNMENT
481 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
482 #endif
484 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
486 static int min_labelno, max_labelno;
488 #define LABEL_TO_ALIGNMENT(LABEL) \
489 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno])
491 /* For the benefit of port specific code do this also as a function. */
493 align_flags
494 label_to_alignment (rtx label)
496 if (CODE_LABEL_NUMBER (label) <= max_labelno)
497 return LABEL_TO_ALIGNMENT (label);
498 return align_flags ();
501 /* The differences in addresses
502 between a branch and its target might grow or shrink depending on
503 the alignment the start insn of the range (the branch for a forward
504 branch or the label for a backward branch) starts out on; if these
505 differences are used naively, they can even oscillate infinitely.
506 We therefore want to compute a 'worst case' address difference that
507 is independent of the alignment the start insn of the range end
508 up on, and that is at least as large as the actual difference.
509 The function align_fuzz calculates the amount we have to add to the
510 naively computed difference, by traversing the part of the alignment
511 chain of the start insn of the range that is in front of the end insn
512 of the range, and considering for each alignment the maximum amount
513 that it might contribute to a size increase.
515 For casesi tables, we also want to know worst case minimum amounts of
516 address difference, in case a machine description wants to introduce
517 some common offset that is added to all offsets in a table.
518 For this purpose, align_fuzz with a growth argument of 0 computes the
519 appropriate adjustment. */
521 /* Compute the maximum delta by which the difference of the addresses of
522 START and END might grow / shrink due to a different address for start
523 which changes the size of alignment insns between START and END.
524 KNOWN_ALIGN_LOG is the alignment known for START.
525 GROWTH should be ~0 if the objective is to compute potential code size
526 increase, and 0 if the objective is to compute potential shrink.
527 The return value is undefined for any other value of GROWTH. */
529 static int
530 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
532 int uid = INSN_UID (start);
533 rtx align_label;
534 int known_align = 1 << known_align_log;
535 int end_shuid = INSN_SHUID (end);
536 int fuzz = 0;
538 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
540 int align_addr, new_align;
542 uid = INSN_UID (align_label);
543 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
544 if (uid_shuid[uid] > end_shuid)
545 break;
546 align_flags alignment = LABEL_TO_ALIGNMENT (align_label);
547 new_align = 1 << alignment.levels[0].log;
548 if (new_align < known_align)
549 continue;
550 fuzz += (-align_addr ^ growth) & (new_align - known_align);
551 known_align = new_align;
553 return fuzz;
556 /* Compute a worst-case reference address of a branch so that it
557 can be safely used in the presence of aligned labels. Since the
558 size of the branch itself is unknown, the size of the branch is
559 not included in the range. I.e. for a forward branch, the reference
560 address is the end address of the branch as known from the previous
561 branch shortening pass, minus a value to account for possible size
562 increase due to alignment. For a backward branch, it is the start
563 address of the branch as known from the current pass, plus a value
564 to account for possible size increase due to alignment.
565 NB.: Therefore, the maximum offset allowed for backward branches needs
566 to exclude the branch size. */
569 insn_current_reference_address (rtx_insn *branch)
571 rtx dest;
572 int seq_uid;
574 if (! INSN_ADDRESSES_SET_P ())
575 return 0;
577 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
578 seq_uid = INSN_UID (seq);
579 if (!jump_to_label_p (branch))
580 /* This can happen for example on the PA; the objective is to know the
581 offset to address something in front of the start of the function.
582 Thus, we can treat it like a backward branch.
583 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
584 any alignment we'd encounter, so we skip the call to align_fuzz. */
585 return insn_current_address;
586 dest = JUMP_LABEL (branch);
588 /* BRANCH has no proper alignment chain set, so use SEQ.
589 BRANCH also has no INSN_SHUID. */
590 if (INSN_SHUID (seq) < INSN_SHUID (dest))
592 /* Forward branch. */
593 return (insn_last_address + insn_lengths[seq_uid]
594 - align_fuzz (seq, dest, length_unit_log, ~0));
596 else
598 /* Backward branch. */
599 return (insn_current_address
600 + align_fuzz (dest, seq, length_unit_log, ~0));
604 /* Compute branch alignments based on CFG profile. */
606 void
607 compute_alignments (void)
609 basic_block bb;
610 align_flags max_alignment;
612 label_align.truncate (0);
614 max_labelno = max_label_num ();
615 min_labelno = get_first_label_num ();
616 label_align.safe_grow_cleared (max_labelno - min_labelno + 1, true);
618 /* If not optimizing or optimizing for size, don't assign any alignments. */
619 if (! optimize || optimize_function_for_size_p (cfun))
620 return;
622 if (dump_file)
624 dump_reg_info (dump_file);
625 dump_flow_info (dump_file, TDF_DETAILS);
626 flow_loops_dump (dump_file, NULL, 1);
628 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
629 profile_count count_threshold = cfun->cfg->count_max / param_align_threshold;
631 if (dump_file)
633 fprintf (dump_file, "count_max: ");
634 cfun->cfg->count_max.dump (dump_file);
635 fprintf (dump_file, "\n");
637 FOR_EACH_BB_FN (bb, cfun)
639 rtx_insn *label = BB_HEAD (bb);
640 bool has_fallthru = 0;
641 edge e;
642 edge_iterator ei;
644 if (!LABEL_P (label)
645 || optimize_bb_for_size_p (bb))
647 if (dump_file)
648 fprintf (dump_file,
649 "BB %4i loop %2i loop_depth %2i skipped.\n",
650 bb->index,
651 bb->loop_father->num,
652 bb_loop_depth (bb));
653 continue;
655 max_alignment = LABEL_ALIGN (label);
656 profile_count fallthru_count = profile_count::zero ();
657 profile_count branch_count = profile_count::zero ();
659 FOR_EACH_EDGE (e, ei, bb->preds)
661 if (e->flags & EDGE_FALLTHRU)
662 has_fallthru = 1, fallthru_count += e->count ();
663 else
664 branch_count += e->count ();
666 if (dump_file)
668 fprintf (dump_file, "BB %4i loop %2i loop_depth"
669 " %2i fall ",
670 bb->index, bb->loop_father->num,
671 bb_loop_depth (bb));
672 fallthru_count.dump (dump_file);
673 fprintf (dump_file, " branch ");
674 branch_count.dump (dump_file);
675 if (!bb->loop_father->inner && bb->loop_father->num)
676 fprintf (dump_file, " inner_loop");
677 if (bb->loop_father->header == bb)
678 fprintf (dump_file, " loop_header");
679 fprintf (dump_file, "\n");
681 if (!fallthru_count.initialized_p () || !branch_count.initialized_p ())
682 continue;
684 /* There are two purposes to align block with no fallthru incoming edge:
685 1) to avoid fetch stalls when branch destination is near cache boundary
686 2) to improve cache efficiency in case the previous block is not executed
687 (so it does not need to be in the cache).
689 We to catch first case, we align frequently executed blocks.
690 To catch the second, we align blocks that are executed more frequently
691 than the predecessor and the predecessor is likely to not be executed
692 when function is called. */
694 if (!has_fallthru
695 && (branch_count > count_threshold
696 || (bb->count > bb->prev_bb->count * 10
697 && (bb->prev_bb->count
698 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count / 2))))
700 align_flags alignment = JUMP_ALIGN (label);
701 if (dump_file)
702 fprintf (dump_file, " jump alignment added.\n");
703 max_alignment = align_flags::max (max_alignment, alignment);
705 /* In case block is frequent and reached mostly by non-fallthru edge,
706 align it. It is most likely a first block of loop. */
707 if (has_fallthru
708 && !(single_succ_p (bb)
709 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
710 && optimize_bb_for_speed_p (bb)
711 && branch_count + fallthru_count > count_threshold
712 && (branch_count > fallthru_count * param_align_loop_iterations))
714 align_flags alignment = LOOP_ALIGN (label);
715 if (dump_file)
716 fprintf (dump_file, " internal loop alignment added.\n");
717 max_alignment = align_flags::max (max_alignment, alignment);
719 LABEL_TO_ALIGNMENT (label) = max_alignment;
722 loop_optimizer_finalize ();
723 free_dominance_info (CDI_DOMINATORS);
726 /* Grow the LABEL_ALIGN array after new labels are created. */
728 static void
729 grow_label_align (void)
731 int old = max_labelno;
732 int n_labels;
733 int n_old_labels;
735 max_labelno = max_label_num ();
737 n_labels = max_labelno - min_labelno + 1;
738 n_old_labels = old - min_labelno + 1;
740 label_align.safe_grow_cleared (n_labels, true);
742 /* Range of labels grows monotonically in the function. Failing here
743 means that the initialization of array got lost. */
744 gcc_assert (n_old_labels <= n_labels);
747 /* Update the already computed alignment information. LABEL_PAIRS is a vector
748 made up of pairs of labels for which the alignment information of the first
749 element will be copied from that of the second element. */
751 void
752 update_alignments (vec<rtx> &label_pairs)
754 unsigned int i = 0;
755 rtx iter, label = NULL_RTX;
757 if (max_labelno != max_label_num ())
758 grow_label_align ();
760 FOR_EACH_VEC_ELT (label_pairs, i, iter)
761 if (i & 1)
762 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
763 else
764 label = iter;
767 namespace {
769 const pass_data pass_data_compute_alignments =
771 RTL_PASS, /* type */
772 "alignments", /* name */
773 OPTGROUP_NONE, /* optinfo_flags */
774 TV_NONE, /* tv_id */
775 0, /* properties_required */
776 0, /* properties_provided */
777 0, /* properties_destroyed */
778 0, /* todo_flags_start */
779 0, /* todo_flags_finish */
782 class pass_compute_alignments : public rtl_opt_pass
784 public:
785 pass_compute_alignments (gcc::context *ctxt)
786 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
789 /* opt_pass methods: */
790 unsigned int execute (function *) final override
792 compute_alignments ();
793 return 0;
796 }; // class pass_compute_alignments
798 } // anon namespace
800 rtl_opt_pass *
801 make_pass_compute_alignments (gcc::context *ctxt)
803 return new pass_compute_alignments (ctxt);
807 /* Make a pass over all insns and compute their actual lengths by shortening
808 any branches of variable length if possible. */
810 /* shorten_branches might be called multiple times: for example, the SH
811 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
812 In order to do this, it needs proper length information, which it obtains
813 by calling shorten_branches. This cannot be collapsed with
814 shorten_branches itself into a single pass unless we also want to integrate
815 reorg.cc, since the branch splitting exposes new instructions with delay
816 slots. */
818 void
819 shorten_branches (rtx_insn *first)
821 rtx_insn *insn;
822 int max_uid;
823 int i;
824 rtx_insn *seq;
825 bool something_changed = true;
826 char *varying_length;
827 rtx body;
828 int uid;
829 rtx align_tab[MAX_CODE_ALIGN + 1];
831 /* Compute maximum UID and allocate label_align / uid_shuid. */
832 max_uid = get_max_uid ();
834 /* Free uid_shuid before reallocating it. */
835 free (uid_shuid);
837 uid_shuid = XNEWVEC (int, max_uid);
839 if (max_labelno != max_label_num ())
840 grow_label_align ();
842 /* Initialize label_align and set up uid_shuid to be strictly
843 monotonically rising with insn order. */
844 /* We use alignment here to keep track of the maximum alignment we want to
845 impose on the next CODE_LABEL (or the current one if we are processing
846 the CODE_LABEL itself). */
848 align_flags max_alignment;
850 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
852 INSN_SHUID (insn) = i++;
853 if (INSN_P (insn))
854 continue;
856 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
858 /* Merge in alignments computed by compute_alignments. */
859 align_flags alignment = LABEL_TO_ALIGNMENT (label);
860 max_alignment = align_flags::max (max_alignment, alignment);
862 rtx_jump_table_data *table = jump_table_for_label (label);
863 if (!table)
865 align_flags alignment = LABEL_ALIGN (label);
866 max_alignment = align_flags::max (max_alignment, alignment);
868 /* ADDR_VECs only take room if read-only data goes into the text
869 section. */
870 if ((JUMP_TABLES_IN_TEXT_SECTION
871 || readonly_data_section == text_section)
872 && table)
874 align_flags alignment = align_flags (ADDR_VEC_ALIGN (table));
875 max_alignment = align_flags::max (max_alignment, alignment);
877 LABEL_TO_ALIGNMENT (label) = max_alignment;
878 max_alignment = align_flags ();
880 else if (BARRIER_P (insn))
882 rtx_insn *label;
884 for (label = insn; label && ! INSN_P (label);
885 label = NEXT_INSN (label))
886 if (LABEL_P (label))
888 align_flags alignment
889 = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn));
890 max_alignment = align_flags::max (max_alignment, alignment);
891 break;
895 if (!HAVE_ATTR_length)
896 return;
898 /* Allocate the rest of the arrays. */
899 insn_lengths = XNEWVEC (int, max_uid);
900 insn_lengths_max_uid = max_uid;
901 /* Syntax errors can lead to labels being outside of the main insn stream.
902 Initialize insn_addresses, so that we get reproducible results. */
903 INSN_ADDRESSES_ALLOC (max_uid);
905 varying_length = XCNEWVEC (char, max_uid);
907 /* Initialize uid_align. We scan instructions
908 from end to start, and keep in align_tab[n] the last seen insn
909 that does an alignment of at least n+1, i.e. the successor
910 in the alignment chain for an insn that does / has a known
911 alignment of n. */
912 uid_align = XCNEWVEC (rtx, max_uid);
914 for (i = MAX_CODE_ALIGN + 1; --i >= 0;)
915 align_tab[i] = NULL_RTX;
916 seq = get_last_insn ();
917 for (; seq; seq = PREV_INSN (seq))
919 int uid = INSN_UID (seq);
920 int log;
921 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq).levels[0].log : 0);
922 uid_align[uid] = align_tab[0];
923 if (log)
925 /* Found an alignment label. */
926 gcc_checking_assert (log < MAX_CODE_ALIGN + 1);
927 uid_align[uid] = align_tab[log];
928 for (i = log - 1; i >= 0; i--)
929 align_tab[i] = seq;
933 /* When optimizing, we start assuming minimum length, and keep increasing
934 lengths as we find the need for this, till nothing changes.
935 When not optimizing, we start assuming maximum lengths, and
936 do a single pass to update the lengths. */
937 bool increasing = optimize != 0;
939 #ifdef CASE_VECTOR_SHORTEN_MODE
940 if (optimize)
942 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
943 label fields. */
945 int min_shuid = INSN_SHUID (get_insns ()) - 1;
946 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
947 int rel;
949 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
951 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
952 int len, i, min, max, insn_shuid;
953 int min_align;
954 addr_diff_vec_flags flags;
956 if (! JUMP_TABLE_DATA_P (insn)
957 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
958 continue;
959 pat = PATTERN (insn);
960 len = XVECLEN (pat, 1);
961 gcc_assert (len > 0);
962 min_align = MAX_CODE_ALIGN;
963 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
965 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
966 int shuid = INSN_SHUID (lab);
967 if (shuid < min)
969 min = shuid;
970 min_lab = lab;
972 if (shuid > max)
974 max = shuid;
975 max_lab = lab;
978 int label_alignment = LABEL_TO_ALIGNMENT (lab).levels[0].log;
979 if (min_align > label_alignment)
980 min_align = label_alignment;
982 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
983 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
984 insn_shuid = INSN_SHUID (insn);
985 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
986 memset (&flags, 0, sizeof (flags));
987 flags.min_align = min_align;
988 flags.base_after_vec = rel > insn_shuid;
989 flags.min_after_vec = min > insn_shuid;
990 flags.max_after_vec = max > insn_shuid;
991 flags.min_after_base = min > rel;
992 flags.max_after_base = max > rel;
993 ADDR_DIFF_VEC_FLAGS (pat) = flags;
995 if (increasing)
996 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
999 #endif /* CASE_VECTOR_SHORTEN_MODE */
1001 /* Compute initial lengths, addresses, and varying flags for each insn. */
1002 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1004 for (insn_current_address = 0, insn = first;
1005 insn != 0;
1006 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1008 uid = INSN_UID (insn);
1010 insn_lengths[uid] = 0;
1012 if (LABEL_P (insn))
1014 int log = LABEL_TO_ALIGNMENT (insn).levels[0].log;
1015 if (log)
1017 int align = 1 << log;
1018 int new_address = (insn_current_address + align - 1) & -align;
1019 insn_lengths[uid] = new_address - insn_current_address;
1023 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1025 if (NOTE_P (insn) || BARRIER_P (insn)
1026 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1027 continue;
1028 if (insn->deleted ())
1029 continue;
1031 body = PATTERN (insn);
1032 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1034 /* This only takes room if read-only data goes into the text
1035 section. */
1036 if (JUMP_TABLES_IN_TEXT_SECTION
1037 || readonly_data_section == text_section)
1038 insn_lengths[uid] = (XVECLEN (body,
1039 GET_CODE (body) == ADDR_DIFF_VEC)
1040 * GET_MODE_SIZE (table->get_data_mode ()));
1041 /* Alignment is handled by ADDR_VEC_ALIGN. */
1043 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1044 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1045 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1047 int i;
1048 int const_delay_slots;
1049 if (DELAY_SLOTS)
1050 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1051 else
1052 const_delay_slots = 0;
1054 int (*inner_length_fun) (rtx_insn *)
1055 = const_delay_slots ? length_fun : insn_default_length;
1056 /* Inside a delay slot sequence, we do not do any branch shortening
1057 if the shortening could change the number of delay slots
1058 of the branch. */
1059 for (i = 0; i < body_seq->len (); i++)
1061 rtx_insn *inner_insn = body_seq->insn (i);
1062 int inner_uid = INSN_UID (inner_insn);
1063 int inner_length;
1065 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1066 || asm_noperands (PATTERN (inner_insn)) >= 0)
1067 inner_length = (asm_insn_count (PATTERN (inner_insn))
1068 * insn_default_length (inner_insn));
1069 else
1070 inner_length = inner_length_fun (inner_insn);
1072 insn_lengths[inner_uid] = inner_length;
1073 if (const_delay_slots)
1075 if ((varying_length[inner_uid]
1076 = insn_variable_length_p (inner_insn)) != 0)
1077 varying_length[uid] = 1;
1078 INSN_ADDRESSES (inner_uid) = (insn_current_address
1079 + insn_lengths[uid]);
1081 else
1082 varying_length[inner_uid] = 0;
1083 insn_lengths[uid] += inner_length;
1086 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1088 insn_lengths[uid] = length_fun (insn);
1089 varying_length[uid] = insn_variable_length_p (insn);
1092 /* If needed, do any adjustment. */
1093 #ifdef ADJUST_INSN_LENGTH
1094 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1095 if (insn_lengths[uid] < 0)
1096 fatal_insn ("negative insn length", insn);
1097 #endif
1100 /* Now loop over all the insns finding varying length insns. For each,
1101 get the current insn length. If it has changed, reflect the change.
1102 When nothing changes for a full pass, we are done. */
1104 while (something_changed)
1106 something_changed = false;
1107 insn_current_align = MAX_CODE_ALIGN - 1;
1108 for (insn_current_address = 0, insn = first;
1109 insn != 0;
1110 insn = NEXT_INSN (insn))
1112 int new_length;
1113 #ifdef ADJUST_INSN_LENGTH
1114 int tmp_length;
1115 #endif
1116 int length_align;
1118 uid = INSN_UID (insn);
1120 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
1122 int log = LABEL_TO_ALIGNMENT (label).levels[0].log;
1124 #ifdef CASE_VECTOR_SHORTEN_MODE
1125 /* If the mode of a following jump table was changed, we
1126 may need to update the alignment of this label. */
1128 if (JUMP_TABLES_IN_TEXT_SECTION
1129 || readonly_data_section == text_section)
1131 rtx_jump_table_data *table = jump_table_for_label (label);
1132 if (table)
1134 int newlog = ADDR_VEC_ALIGN (table);
1135 if (newlog != log)
1137 log = newlog;
1138 LABEL_TO_ALIGNMENT (insn) = log;
1139 something_changed = true;
1143 #endif
1145 if (log > insn_current_align)
1147 int align = 1 << log;
1148 int new_address= (insn_current_address + align - 1) & -align;
1149 insn_lengths[uid] = new_address - insn_current_address;
1150 insn_current_align = log;
1151 insn_current_address = new_address;
1153 else
1154 insn_lengths[uid] = 0;
1155 INSN_ADDRESSES (uid) = insn_current_address;
1156 continue;
1159 length_align = INSN_LENGTH_ALIGNMENT (insn);
1160 if (length_align < insn_current_align)
1161 insn_current_align = length_align;
1163 insn_last_address = INSN_ADDRESSES (uid);
1164 INSN_ADDRESSES (uid) = insn_current_address;
1166 #ifdef CASE_VECTOR_SHORTEN_MODE
1167 if (optimize
1168 && JUMP_TABLE_DATA_P (insn)
1169 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1171 rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn);
1172 rtx body = PATTERN (insn);
1173 int old_length = insn_lengths[uid];
1174 rtx_insn *rel_lab =
1175 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1176 rtx min_lab = XEXP (XEXP (body, 2), 0);
1177 rtx max_lab = XEXP (XEXP (body, 3), 0);
1178 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1179 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1180 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1181 rtx_insn *prev;
1182 int rel_align = 0;
1183 addr_diff_vec_flags flags;
1184 scalar_int_mode vec_mode;
1186 /* Avoid automatic aggregate initialization. */
1187 flags = ADDR_DIFF_VEC_FLAGS (body);
1189 /* Try to find a known alignment for rel_lab. */
1190 for (prev = rel_lab;
1191 prev
1192 && ! insn_lengths[INSN_UID (prev)]
1193 && ! (varying_length[INSN_UID (prev)] & 1);
1194 prev = PREV_INSN (prev))
1195 if (varying_length[INSN_UID (prev)] & 2)
1197 rel_align = LABEL_TO_ALIGNMENT (prev).levels[0].log;
1198 break;
1201 /* See the comment on addr_diff_vec_flags in rtl.h for the
1202 meaning of the flags values. base: REL_LAB vec: INSN */
1203 /* Anything after INSN has still addresses from the last
1204 pass; adjust these so that they reflect our current
1205 estimate for this pass. */
1206 if (flags.base_after_vec)
1207 rel_addr += insn_current_address - insn_last_address;
1208 if (flags.min_after_vec)
1209 min_addr += insn_current_address - insn_last_address;
1210 if (flags.max_after_vec)
1211 max_addr += insn_current_address - insn_last_address;
1212 /* We want to know the worst case, i.e. lowest possible value
1213 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1214 its offset is positive, and we have to be wary of code shrink;
1215 otherwise, it is negative, and we have to be vary of code
1216 size increase. */
1217 if (flags.min_after_base)
1219 /* If INSN is between REL_LAB and MIN_LAB, the size
1220 changes we are about to make can change the alignment
1221 within the observed offset, therefore we have to break
1222 it up into two parts that are independent. */
1223 if (! flags.base_after_vec && flags.min_after_vec)
1225 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1226 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1228 else
1229 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1231 else
1233 if (flags.base_after_vec && ! flags.min_after_vec)
1235 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1236 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1238 else
1239 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1241 /* Likewise, determine the highest lowest possible value
1242 for the offset of MAX_LAB. */
1243 if (flags.max_after_base)
1245 if (! flags.base_after_vec && flags.max_after_vec)
1247 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1248 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1250 else
1251 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1253 else
1255 if (flags.base_after_vec && ! flags.max_after_vec)
1257 max_addr += align_fuzz (max_lab, insn, 0, 0);
1258 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1260 else
1261 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1263 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1264 max_addr - rel_addr, body);
1265 if (!increasing
1266 || (GET_MODE_SIZE (vec_mode)
1267 >= GET_MODE_SIZE (table->get_data_mode ())))
1268 PUT_MODE (body, vec_mode);
1269 if (JUMP_TABLES_IN_TEXT_SECTION
1270 || readonly_data_section == text_section)
1272 insn_lengths[uid]
1273 = (XVECLEN (body, 1)
1274 * GET_MODE_SIZE (table->get_data_mode ()));
1275 insn_current_address += insn_lengths[uid];
1276 if (insn_lengths[uid] != old_length)
1277 something_changed = true;
1280 continue;
1282 #endif /* CASE_VECTOR_SHORTEN_MODE */
1284 if (! (varying_length[uid]))
1286 if (NONJUMP_INSN_P (insn)
1287 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1289 int i;
1291 body = PATTERN (insn);
1292 for (i = 0; i < XVECLEN (body, 0); i++)
1294 rtx inner_insn = XVECEXP (body, 0, i);
1295 int inner_uid = INSN_UID (inner_insn);
1297 INSN_ADDRESSES (inner_uid) = insn_current_address;
1299 insn_current_address += insn_lengths[inner_uid];
1302 else
1303 insn_current_address += insn_lengths[uid];
1305 continue;
1308 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1310 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1311 int i;
1313 body = PATTERN (insn);
1314 new_length = 0;
1315 for (i = 0; i < seqn->len (); i++)
1317 rtx_insn *inner_insn = seqn->insn (i);
1318 int inner_uid = INSN_UID (inner_insn);
1319 int inner_length;
1321 INSN_ADDRESSES (inner_uid) = insn_current_address;
1323 /* insn_current_length returns 0 for insns with a
1324 non-varying length. */
1325 if (! varying_length[inner_uid])
1326 inner_length = insn_lengths[inner_uid];
1327 else
1328 inner_length = insn_current_length (inner_insn);
1330 if (inner_length != insn_lengths[inner_uid])
1332 if (!increasing || inner_length > insn_lengths[inner_uid])
1334 insn_lengths[inner_uid] = inner_length;
1335 something_changed = true;
1337 else
1338 inner_length = insn_lengths[inner_uid];
1340 insn_current_address += inner_length;
1341 new_length += inner_length;
1344 else
1346 new_length = insn_current_length (insn);
1347 insn_current_address += new_length;
1350 #ifdef ADJUST_INSN_LENGTH
1351 /* If needed, do any adjustment. */
1352 tmp_length = new_length;
1353 ADJUST_INSN_LENGTH (insn, new_length);
1354 insn_current_address += (new_length - tmp_length);
1355 #endif
1357 if (new_length != insn_lengths[uid]
1358 && (!increasing || new_length > insn_lengths[uid]))
1360 insn_lengths[uid] = new_length;
1361 something_changed = true;
1363 else
1364 insn_current_address += insn_lengths[uid] - new_length;
1366 /* For a non-optimizing compile, do only a single pass. */
1367 if (!increasing)
1368 break;
1370 crtl->max_insn_address = insn_current_address;
1371 free (varying_length);
1374 /* Given the body of an INSN known to be generated by an ASM statement, return
1375 the number of machine instructions likely to be generated for this insn.
1376 This is used to compute its length. */
1378 static int
1379 asm_insn_count (rtx body)
1381 const char *templ;
1383 if (GET_CODE (body) == ASM_INPUT)
1384 templ = XSTR (body, 0);
1385 else
1386 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1388 return asm_str_count (templ);
1391 /* Return the number of machine instructions likely to be generated for the
1392 inline-asm template. */
1394 asm_str_count (const char *templ)
1396 int count = 1;
1398 if (!*templ)
1399 return 0;
1401 for (; *templ; templ++)
1402 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1403 || *templ == '\n')
1404 count++;
1406 return count;
1409 /* Return true if DWARF2 debug info can be emitted for DECL. */
1411 static bool
1412 dwarf2_debug_info_emitted_p (tree decl)
1414 /* When DWARF2 debug info is not generated internally. */
1415 if (!dwarf_debuginfo_p () && !dwarf_based_debuginfo_p ())
1416 return false;
1418 if (DECL_IGNORED_P (decl))
1419 return false;
1421 return true;
1424 /* Return scope resulting from combination of S1 and S2. */
1425 static tree
1426 choose_inner_scope (tree s1, tree s2)
1428 if (!s1)
1429 return s2;
1430 if (!s2)
1431 return s1;
1432 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1433 return s1;
1434 return s2;
1437 /* Emit lexical block notes needed to change scope from S1 to S2. */
1439 static void
1440 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1442 rtx_insn *insn = orig_insn;
1443 tree com = NULL_TREE;
1444 tree ts1 = s1, ts2 = s2;
1445 tree s;
1447 while (ts1 != ts2)
1449 gcc_assert (ts1 && ts2);
1450 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1451 ts1 = BLOCK_SUPERCONTEXT (ts1);
1452 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1453 ts2 = BLOCK_SUPERCONTEXT (ts2);
1454 else
1456 ts1 = BLOCK_SUPERCONTEXT (ts1);
1457 ts2 = BLOCK_SUPERCONTEXT (ts2);
1460 com = ts1;
1462 /* Close scopes. */
1463 s = s1;
1464 while (s != com)
1466 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1467 NOTE_BLOCK (note) = s;
1468 s = BLOCK_SUPERCONTEXT (s);
1471 /* Open scopes. */
1472 s = s2;
1473 while (s != com)
1475 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1476 NOTE_BLOCK (insn) = s;
1477 s = BLOCK_SUPERCONTEXT (s);
1481 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1482 on the scope tree and the newly reordered instructions. */
1484 static void
1485 reemit_insn_block_notes (void)
1487 tree cur_block = DECL_INITIAL (cfun->decl);
1488 rtx_insn *insn;
1490 insn = get_insns ();
1491 for (; insn; insn = NEXT_INSN (insn))
1493 tree this_block;
1495 /* Prevent lexical blocks from straddling section boundaries. */
1496 if (NOTE_P (insn))
1497 switch (NOTE_KIND (insn))
1499 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1501 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1502 s = BLOCK_SUPERCONTEXT (s))
1504 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1505 NOTE_BLOCK (note) = s;
1506 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1507 NOTE_BLOCK (note) = s;
1510 break;
1512 case NOTE_INSN_BEGIN_STMT:
1513 case NOTE_INSN_INLINE_ENTRY:
1514 this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn));
1515 goto set_cur_block_to_this_block;
1517 default:
1518 continue;
1521 if (!active_insn_p (insn))
1522 continue;
1524 /* Avoid putting scope notes between jump table and its label. */
1525 if (JUMP_TABLE_DATA_P (insn))
1526 continue;
1528 this_block = insn_scope (insn);
1529 /* For sequences compute scope resulting from merging all scopes
1530 of instructions nested inside. */
1531 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1533 int i;
1535 this_block = NULL;
1536 for (i = 0; i < body->len (); i++)
1537 this_block = choose_inner_scope (this_block,
1538 insn_scope (body->insn (i)));
1540 set_cur_block_to_this_block:
1541 if (! this_block)
1543 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1544 continue;
1545 else
1546 this_block = DECL_INITIAL (cfun->decl);
1549 if (this_block != cur_block)
1551 change_scope (insn, cur_block, this_block);
1552 cur_block = this_block;
1556 /* change_scope emits before the insn, not after. */
1557 rtx_note *note = emit_note (NOTE_INSN_DELETED);
1558 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1559 delete_insn (note);
1561 reorder_blocks ();
1564 static const char *some_local_dynamic_name;
1566 /* Locate some local-dynamic symbol still in use by this function
1567 so that we can print its name in local-dynamic base patterns.
1568 Return null if there are no local-dynamic references. */
1570 const char *
1571 get_some_local_dynamic_name ()
1573 subrtx_iterator::array_type array;
1574 rtx_insn *insn;
1576 if (some_local_dynamic_name)
1577 return some_local_dynamic_name;
1579 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1580 if (NONDEBUG_INSN_P (insn))
1581 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1583 const_rtx x = *iter;
1584 if (GET_CODE (x) == SYMBOL_REF)
1586 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1587 return some_local_dynamic_name = XSTR (x, 0);
1588 if (CONSTANT_POOL_ADDRESS_P (x))
1589 iter.substitute (get_pool_constant (x));
1593 return 0;
1596 /* Arrange for us to emit a source location note before any further
1597 real insns or section changes, by setting the SEEN_NEXT_VIEW bit in
1598 *SEEN, as long as we are keeping track of location views. The bit
1599 indicates we have referenced the next view at the current PC, so we
1600 have to emit it. This should be called next to the var_location
1601 debug hook. */
1603 static inline void
1604 set_next_view_needed (int *seen)
1606 if (debug_variable_location_views)
1607 *seen |= SEEN_NEXT_VIEW;
1610 /* Clear the flag in *SEEN indicating we need to emit the next view.
1611 This should be called next to the source_line debug hook. */
1613 static inline void
1614 clear_next_view_needed (int *seen)
1616 *seen &= ~SEEN_NEXT_VIEW;
1619 /* Test whether we have a pending request to emit the next view in
1620 *SEEN, and emit it if needed, clearing the request bit. */
1622 static inline void
1623 maybe_output_next_view (int *seen)
1625 if ((*seen & SEEN_NEXT_VIEW) != 0)
1627 clear_next_view_needed (seen);
1628 (*debug_hooks->source_line) (last_linenum, last_columnnum,
1629 last_filename, last_discriminator,
1630 false);
1634 /* We want to emit param bindings (before the first begin_stmt) in the
1635 initial view, if we are emitting views. To that end, we may
1636 consume initial notes in the function, processing them in
1637 final_start_function, before signaling the beginning of the
1638 prologue, rather than in final.
1640 We don't test whether the DECLs are PARM_DECLs: the assumption is
1641 that there will be a NOTE_INSN_BEGIN_STMT marker before any
1642 non-parameter NOTE_INSN_VAR_LOCATION. It's ok if the marker is not
1643 there, we'll just have more variable locations bound in the initial
1644 view, which is consistent with their being bound without any code
1645 that would give them a value. */
1647 static inline bool
1648 in_initial_view_p (rtx_insn *insn)
1650 return (!DECL_IGNORED_P (current_function_decl)
1651 && debug_variable_location_views
1652 && insn && GET_CODE (insn) == NOTE
1653 && (NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
1654 || NOTE_KIND (insn) == NOTE_INSN_DELETED));
1657 /* Output assembler code for the start of a function,
1658 and initialize some of the variables in this file
1659 for the new function. The label for the function and associated
1660 assembler pseudo-ops have already been output in `assemble_start_function'.
1662 FIRST is the first insn of the rtl for the function being compiled.
1663 FILE is the file to write assembler code to.
1664 SEEN should be initially set to zero, and it may be updated to
1665 indicate we have references to the next location view, that would
1666 require us to emit it at the current PC.
1667 OPTIMIZE_P is nonzero if we should eliminate redundant
1668 test and compare insns. */
1670 static void
1671 final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen,
1672 int optimize_p ATTRIBUTE_UNUSED)
1674 block_depth = 0;
1676 this_is_asm_operands = 0;
1678 need_profile_function = false;
1680 last_filename = LOCATION_FILE (prologue_location);
1681 last_linenum = LOCATION_LINE (prologue_location);
1682 last_columnnum = LOCATION_COLUMN (prologue_location);
1683 last_discriminator = 0;
1684 force_source_line = false;
1686 high_block_linenum = high_function_linenum = last_linenum;
1688 if (flag_sanitize & SANITIZE_ADDRESS)
1689 asan_function_start ();
1691 rtx_insn *first = *firstp;
1692 if (in_initial_view_p (first))
1696 final_scan_insn (first, file, 0, 0, seen);
1697 first = NEXT_INSN (first);
1699 while (in_initial_view_p (first));
1700 *firstp = first;
1703 if (!DECL_IGNORED_P (current_function_decl))
1704 debug_hooks->begin_prologue (last_linenum, last_columnnum,
1705 last_filename);
1707 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1708 dwarf2out_begin_prologue (0, 0, NULL);
1710 if (DECL_IGNORED_P (current_function_decl) && last_linenum && last_filename)
1711 debug_hooks->set_ignored_loc (last_linenum, last_columnnum, last_filename);
1713 #ifdef LEAF_REG_REMAP
1714 if (crtl->uses_only_leaf_regs)
1715 leaf_renumber_regs (first);
1716 #endif
1718 /* The Sun386i and perhaps other machines don't work right
1719 if the profiling code comes after the prologue. */
1720 if (targetm.profile_before_prologue () && crtl->profile)
1722 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1723 && targetm.have_prologue ())
1725 rtx_insn *insn;
1726 for (insn = first; insn; insn = NEXT_INSN (insn))
1727 if (!NOTE_P (insn))
1729 insn = NULL;
1730 break;
1732 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1733 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1734 break;
1735 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1736 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1737 continue;
1738 else
1740 insn = NULL;
1741 break;
1744 if (insn)
1745 need_profile_function = true;
1746 else
1747 profile_function (file);
1749 else
1750 profile_function (file);
1753 /* If debugging, assign block numbers to all of the blocks in this
1754 function. */
1755 if (write_symbols)
1757 reemit_insn_block_notes ();
1758 number_blocks (current_function_decl);
1759 /* We never actually put out begin/end notes for the top-level
1760 block in the function. But, conceptually, that block is
1761 always needed. */
1762 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1765 unsigned HOST_WIDE_INT min_frame_size
1766 = constant_lower_bound (get_frame_size ());
1767 if (min_frame_size > (unsigned HOST_WIDE_INT) warn_frame_larger_than_size)
1769 /* Issue a warning */
1770 warning (OPT_Wframe_larger_than_,
1771 "the frame size of %wu bytes is larger than %wu bytes",
1772 min_frame_size, warn_frame_larger_than_size);
1775 /* First output the function prologue: code to set up the stack frame. */
1776 targetm.asm_out.function_prologue (file);
1778 /* If the machine represents the prologue as RTL, the profiling code must
1779 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1780 if (! targetm.have_prologue ())
1781 profile_after_prologue (file);
1784 /* This is an exported final_start_function_1, callable without SEEN. */
1786 void
1787 final_start_function (rtx_insn *first, FILE *file,
1788 int optimize_p ATTRIBUTE_UNUSED)
1790 int seen = 0;
1791 final_start_function_1 (&first, file, &seen, optimize_p);
1792 gcc_assert (seen == 0);
1795 static void
1796 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1798 if (!targetm.profile_before_prologue () && crtl->profile)
1799 profile_function (file);
1802 static void
1803 profile_function (FILE *file ATTRIBUTE_UNUSED)
1805 #ifndef NO_PROFILE_COUNTERS
1806 # define NO_PROFILE_COUNTERS 0
1807 #endif
1808 #ifdef ASM_OUTPUT_REG_PUSH
1809 rtx sval = NULL, chain = NULL;
1811 if (cfun->returns_struct)
1812 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1813 true);
1814 if (cfun->static_chain_decl)
1815 chain = targetm.calls.static_chain (current_function_decl, true);
1816 #endif /* ASM_OUTPUT_REG_PUSH */
1818 if (! NO_PROFILE_COUNTERS)
1820 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1821 switch_to_section (data_section);
1822 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1823 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1824 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1827 switch_to_section (current_function_section ());
1829 #ifdef ASM_OUTPUT_REG_PUSH
1830 if (sval && REG_P (sval))
1831 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1832 if (chain && REG_P (chain))
1833 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1834 #endif
1836 FUNCTION_PROFILER (file, current_function_funcdef_no);
1838 #ifdef ASM_OUTPUT_REG_PUSH
1839 if (chain && REG_P (chain))
1840 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1841 if (sval && REG_P (sval))
1842 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1843 #endif
1846 /* Output assembler code for the end of a function.
1847 For clarity, args are same as those of `final_start_function'
1848 even though not all of them are needed. */
1850 void
1851 final_end_function (void)
1853 app_disable ();
1855 if (!DECL_IGNORED_P (current_function_decl))
1856 debug_hooks->end_function (high_function_linenum);
1858 /* Finally, output the function epilogue:
1859 code to restore the stack frame and return to the caller. */
1860 targetm.asm_out.function_epilogue (asm_out_file);
1862 /* And debug output. */
1863 if (!DECL_IGNORED_P (current_function_decl))
1864 debug_hooks->end_epilogue (last_linenum, last_filename);
1866 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1867 && dwarf2out_do_frame ())
1868 dwarf2out_end_epilogue (last_linenum, last_filename);
1870 some_local_dynamic_name = 0;
1874 /* Dumper helper for basic block information. FILE is the assembly
1875 output file, and INSN is the instruction being emitted. */
1877 static void
1878 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1879 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1881 basic_block bb;
1883 if (!flag_debug_asm)
1884 return;
1886 if (INSN_UID (insn) < bb_map_size
1887 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1889 edge e;
1890 edge_iterator ei;
1892 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1893 if (bb->count.initialized_p ())
1895 fprintf (file, ", count:");
1896 bb->count.dump (file);
1898 fprintf (file, " seq:%d", (*bb_seqn)++);
1899 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1900 FOR_EACH_EDGE (e, ei, bb->preds)
1902 dump_edge_info (file, e, TDF_DETAILS, 0);
1904 fprintf (file, "\n");
1906 if (INSN_UID (insn) < bb_map_size
1907 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1909 edge e;
1910 edge_iterator ei;
1912 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1913 FOR_EACH_EDGE (e, ei, bb->succs)
1915 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1917 fprintf (file, "\n");
1921 /* Output assembler code for some insns: all or part of a function.
1922 For description of args, see `final_start_function', above. */
1924 static void
1925 final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p)
1927 rtx_insn *insn, *next;
1929 /* Used for -dA dump. */
1930 basic_block *start_to_bb = NULL;
1931 basic_block *end_to_bb = NULL;
1932 int bb_map_size = 0;
1933 int bb_seqn = 0;
1935 last_ignored_compare = 0;
1937 init_recog ();
1939 CC_STATUS_INIT;
1941 if (flag_debug_asm)
1943 basic_block bb;
1945 bb_map_size = get_max_uid () + 1;
1946 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1947 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1949 /* There is no cfg for a thunk. */
1950 if (!cfun->is_thunk)
1951 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1953 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1954 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1958 /* Output the insns. */
1959 for (insn = first; insn;)
1961 if (HAVE_ATTR_length)
1963 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1965 /* This can be triggered by bugs elsewhere in the compiler if
1966 new insns are created after init_insn_lengths is called. */
1967 gcc_assert (NOTE_P (insn));
1968 insn_current_address = -1;
1970 else
1971 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1972 /* final can be seen as an iteration of shorten_branches that
1973 does nothing (since a fixed point has already been reached). */
1974 insn_last_address = insn_current_address;
1977 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1978 bb_map_size, &bb_seqn);
1979 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1982 maybe_output_next_view (&seen);
1984 if (flag_debug_asm)
1986 free (start_to_bb);
1987 free (end_to_bb);
1990 /* Remove CFI notes, to avoid compare-debug failures. */
1991 for (insn = first; insn; insn = next)
1993 next = NEXT_INSN (insn);
1994 if (NOTE_P (insn)
1995 && (NOTE_KIND (insn) == NOTE_INSN_CFI
1996 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
1997 delete_insn (insn);
2001 /* This is an exported final_1, callable without SEEN. */
2003 void
2004 final (rtx_insn *first, FILE *file, int optimize_p)
2006 /* Those that use the internal final_start_function_1/final_1 API
2007 skip initial debug bind notes in final_start_function_1, and pass
2008 the modified FIRST to final_1. But those that use the public
2009 final_start_function/final APIs, final_start_function can't move
2010 FIRST because it's not passed by reference, so if they were
2011 skipped there, skip them again here. */
2012 while (in_initial_view_p (first))
2013 first = NEXT_INSN (first);
2015 final_1 (first, file, 0, optimize_p);
2018 const char *
2019 get_insn_template (int code, rtx_insn *insn)
2021 switch (insn_data[code].output_format)
2023 case INSN_OUTPUT_FORMAT_SINGLE:
2024 return insn_data[code].output.single;
2025 case INSN_OUTPUT_FORMAT_MULTI:
2026 return insn_data[code].output.multi[which_alternative];
2027 case INSN_OUTPUT_FORMAT_FUNCTION:
2028 gcc_assert (insn);
2029 return (*insn_data[code].output.function) (recog_data.operand, insn);
2031 default:
2032 gcc_unreachable ();
2036 /* Emit the appropriate declaration for an alternate-entry-point
2037 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2038 LABEL_KIND != LABEL_NORMAL.
2040 The case fall-through in this function is intentional. */
2041 static void
2042 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2044 const char *name = LABEL_NAME (insn);
2046 switch (LABEL_KIND (insn))
2048 case LABEL_WEAK_ENTRY:
2049 #ifdef ASM_WEAKEN_LABEL
2050 ASM_WEAKEN_LABEL (file, name);
2051 gcc_fallthrough ();
2052 #endif
2053 case LABEL_GLOBAL_ENTRY:
2054 targetm.asm_out.globalize_label (file, name);
2055 gcc_fallthrough ();
2056 case LABEL_STATIC_ENTRY:
2057 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2058 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2059 #endif
2060 ASM_OUTPUT_LABEL (file, name);
2061 break;
2063 case LABEL_NORMAL:
2064 default:
2065 gcc_unreachable ();
2069 /* Given a CALL_INSN, find and return the nested CALL. */
2070 static rtx
2071 call_from_call_insn (rtx_call_insn *insn)
2073 rtx x;
2074 gcc_assert (CALL_P (insn));
2075 x = PATTERN (insn);
2077 while (GET_CODE (x) != CALL)
2079 switch (GET_CODE (x))
2081 default:
2082 gcc_unreachable ();
2083 case COND_EXEC:
2084 x = COND_EXEC_CODE (x);
2085 break;
2086 case PARALLEL:
2087 x = XVECEXP (x, 0, 0);
2088 break;
2089 case SET:
2090 x = XEXP (x, 1);
2091 break;
2094 return x;
2097 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2098 corresponding source line, if available. */
2100 static void
2101 asm_show_source (const char *filename, int linenum)
2103 if (!filename)
2104 return;
2106 char_span line = location_get_source_line (filename, linenum);
2107 if (!line)
2108 return;
2110 fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2111 /* "line" is not 0-terminated, so we must use its length. */
2112 fwrite (line.get_buffer (), 1, line.length (), asm_out_file);
2113 fputc ('\n', asm_out_file);
2116 /* Judge if an absolute jump table is relocatable. */
2118 bool
2119 jumptable_relocatable (void)
2121 bool relocatable = false;
2123 if (!CASE_VECTOR_PC_RELATIVE
2124 && !targetm.asm_out.generate_pic_addr_diff_vec ()
2125 && targetm_common.have_named_sections)
2126 relocatable = targetm.asm_out.reloc_rw_mask ();
2128 return relocatable;
2131 /* The final scan for one insn, INSN.
2132 Args are same as in `final', except that INSN
2133 is the insn being scanned.
2134 Value returned is the next insn to be scanned.
2136 NOPEEPHOLES is the flag to disallow peephole processing (currently
2137 used for within delayed branch sequence output).
2139 SEEN is used to track the end of the prologue, for emitting
2140 debug information. We force the emission of a line note after
2141 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2143 static rtx_insn *
2144 final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2145 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2147 rtx_insn *next;
2148 rtx_jump_table_data *table;
2150 insn_counter++;
2152 /* Ignore deleted insns. These can occur when we split insns (due to a
2153 template of "#") while not optimizing. */
2154 if (insn->deleted ())
2155 return NEXT_INSN (insn);
2157 switch (GET_CODE (insn))
2159 case NOTE:
2160 switch (NOTE_KIND (insn))
2162 case NOTE_INSN_DELETED:
2163 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2164 break;
2166 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2167 maybe_output_next_view (seen);
2169 output_function_exception_table (0);
2171 if (targetm.asm_out.unwind_emit)
2172 targetm.asm_out.unwind_emit (asm_out_file, insn);
2174 in_cold_section_p = !in_cold_section_p;
2176 gcc_checking_assert (in_cold_section_p);
2177 if (in_cold_section_p)
2178 cold_function_name
2179 = clone_function_name (current_function_decl, "cold");
2181 if (dwarf2out_do_frame ())
2183 dwarf2out_switch_text_section ();
2184 if (!dwarf2_debug_info_emitted_p (current_function_decl)
2185 && !DECL_IGNORED_P (current_function_decl))
2186 debug_hooks->switch_text_section ();
2188 else if (!DECL_IGNORED_P (current_function_decl))
2189 debug_hooks->switch_text_section ();
2190 if (DECL_IGNORED_P (current_function_decl) && last_linenum
2191 && last_filename)
2192 debug_hooks->set_ignored_loc (last_linenum, last_columnnum,
2193 last_filename);
2195 switch_to_section (current_function_section ());
2196 targetm.asm_out.function_switched_text_sections (asm_out_file,
2197 current_function_decl,
2198 in_cold_section_p);
2199 /* Emit a label for the split cold section. Form label name by
2200 suffixing "cold" to the original function's name. */
2201 if (in_cold_section_p)
2203 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2204 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2205 IDENTIFIER_POINTER
2206 (cold_function_name),
2207 current_function_decl);
2208 #else
2209 ASM_OUTPUT_LABEL (asm_out_file,
2210 IDENTIFIER_POINTER (cold_function_name));
2211 #endif
2212 if (dwarf2out_do_frame ()
2213 && cfun->fde->dw_fde_second_begin != NULL)
2214 ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin);
2216 break;
2218 case NOTE_INSN_BASIC_BLOCK:
2219 if (need_profile_function)
2221 profile_function (asm_out_file);
2222 need_profile_function = false;
2225 if (targetm.asm_out.unwind_emit)
2226 targetm.asm_out.unwind_emit (asm_out_file, insn);
2228 break;
2230 case NOTE_INSN_EH_REGION_BEG:
2231 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2232 NOTE_EH_HANDLER (insn));
2233 break;
2235 case NOTE_INSN_EH_REGION_END:
2236 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2237 NOTE_EH_HANDLER (insn));
2238 break;
2240 case NOTE_INSN_PROLOGUE_END:
2241 targetm.asm_out.function_end_prologue (file);
2242 profile_after_prologue (file);
2244 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2246 *seen |= SEEN_EMITTED;
2247 force_source_line = true;
2249 else
2250 *seen |= SEEN_NOTE;
2252 break;
2254 case NOTE_INSN_EPILOGUE_BEG:
2255 if (!DECL_IGNORED_P (current_function_decl))
2256 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2257 targetm.asm_out.function_begin_epilogue (file);
2258 break;
2260 case NOTE_INSN_CFI:
2261 dwarf2out_emit_cfi (NOTE_CFI (insn));
2262 break;
2264 case NOTE_INSN_CFI_LABEL:
2265 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2266 NOTE_LABEL_NUMBER (insn));
2267 break;
2269 case NOTE_INSN_FUNCTION_BEG:
2270 if (need_profile_function)
2272 profile_function (asm_out_file);
2273 need_profile_function = false;
2276 app_disable ();
2277 if (!DECL_IGNORED_P (current_function_decl))
2278 debug_hooks->end_prologue (last_linenum, last_filename);
2280 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2282 *seen |= SEEN_EMITTED;
2283 force_source_line = true;
2285 else
2286 *seen |= SEEN_NOTE;
2288 break;
2290 case NOTE_INSN_BLOCK_BEG:
2291 if (debug_info_level >= DINFO_LEVEL_NORMAL
2292 || dwarf_debuginfo_p ()
2293 || write_symbols == VMS_DEBUG)
2295 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2297 app_disable ();
2298 ++block_depth;
2299 high_block_linenum = last_linenum;
2301 /* Output debugging info about the symbol-block beginning. */
2302 if (!DECL_IGNORED_P (current_function_decl))
2303 debug_hooks->begin_block (last_linenum, n);
2305 /* Mark this block as output. */
2306 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2307 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2309 break;
2311 case NOTE_INSN_BLOCK_END:
2312 maybe_output_next_view (seen);
2314 if (debug_info_level >= DINFO_LEVEL_NORMAL
2315 || dwarf_debuginfo_p ()
2316 || write_symbols == VMS_DEBUG)
2318 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2320 app_disable ();
2322 /* End of a symbol-block. */
2323 --block_depth;
2324 gcc_assert (block_depth >= 0);
2326 if (!DECL_IGNORED_P (current_function_decl))
2327 debug_hooks->end_block (high_block_linenum, n);
2328 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2329 == in_cold_section_p);
2331 break;
2333 case NOTE_INSN_DELETED_LABEL:
2334 /* Emit the label. We may have deleted the CODE_LABEL because
2335 the label could be proved to be unreachable, though still
2336 referenced (in the form of having its address taken. */
2337 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2338 break;
2340 case NOTE_INSN_DELETED_DEBUG_LABEL:
2341 /* Similarly, but need to use different namespace for it. */
2342 if (CODE_LABEL_NUMBER (insn) != -1)
2343 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2344 break;
2346 case NOTE_INSN_VAR_LOCATION:
2347 if (!DECL_IGNORED_P (current_function_decl))
2349 debug_hooks->var_location (insn);
2350 set_next_view_needed (seen);
2352 break;
2354 case NOTE_INSN_BEGIN_STMT:
2355 gcc_checking_assert (cfun->debug_nonbind_markers);
2356 if (!DECL_IGNORED_P (current_function_decl)
2357 && notice_source_line (insn, NULL))
2359 output_source_line:
2360 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2361 last_filename, last_discriminator,
2362 true);
2363 clear_next_view_needed (seen);
2365 break;
2367 case NOTE_INSN_INLINE_ENTRY:
2368 gcc_checking_assert (cfun->debug_nonbind_markers);
2369 if (!DECL_IGNORED_P (current_function_decl)
2370 && notice_source_line (insn, NULL))
2372 (*debug_hooks->inline_entry) (LOCATION_BLOCK
2373 (NOTE_MARKER_LOCATION (insn)));
2374 goto output_source_line;
2376 break;
2378 default:
2379 gcc_unreachable ();
2380 break;
2382 break;
2384 case BARRIER:
2385 break;
2387 case CODE_LABEL:
2388 /* The target port might emit labels in the output function for
2389 some insn, e.g. sh.cc output_branchy_insn. */
2390 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2392 align_flags alignment = LABEL_TO_ALIGNMENT (insn);
2393 if (alignment.levels[0].log && NEXT_INSN (insn))
2395 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2396 /* Output both primary and secondary alignment. */
2397 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log,
2398 alignment.levels[0].maxskip);
2399 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log,
2400 alignment.levels[1].maxskip);
2401 #else
2402 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2403 ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log);
2404 #else
2405 ASM_OUTPUT_ALIGN (file, alignment.levels[0].log);
2406 #endif
2407 #endif
2410 CC_STATUS_INIT;
2412 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2413 debug_hooks->label (as_a <rtx_code_label *> (insn));
2415 app_disable ();
2417 /* If this label is followed by a jump-table, make sure we put
2418 the label in the read-only section. Also possibly write the
2419 label and jump table together. */
2420 table = jump_table_for_label (as_a <rtx_code_label *> (insn));
2421 if (table)
2423 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2424 /* In this case, the case vector is being moved by the
2425 target, so don't output the label at all. Leave that
2426 to the back end macros. */
2427 #else
2428 if (! JUMP_TABLES_IN_TEXT_SECTION)
2430 int log_align;
2432 switch_to_section (targetm.asm_out.function_rodata_section
2433 (current_function_decl,
2434 jumptable_relocatable ()));
2436 #ifdef ADDR_VEC_ALIGN
2437 log_align = ADDR_VEC_ALIGN (table);
2438 #else
2439 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2440 #endif
2441 ASM_OUTPUT_ALIGN (file, log_align);
2443 else
2444 switch_to_section (current_function_section ());
2446 #ifdef ASM_OUTPUT_CASE_LABEL
2447 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn), table);
2448 #else
2449 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2450 #endif
2451 #endif
2452 break;
2454 if (LABEL_ALT_ENTRY_P (insn))
2455 output_alternate_entry_point (file, insn);
2456 else
2457 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2458 break;
2460 default:
2462 rtx body = PATTERN (insn);
2463 int insn_code_number;
2464 const char *templ;
2465 bool is_stmt, *is_stmt_p;
2467 if (MAY_HAVE_DEBUG_MARKER_INSNS && cfun->debug_nonbind_markers)
2469 is_stmt = false;
2470 is_stmt_p = NULL;
2472 else
2473 is_stmt_p = &is_stmt;
2475 /* Reset this early so it is correct for ASM statements. */
2476 current_insn_predicate = NULL_RTX;
2478 /* An INSN, JUMP_INSN or CALL_INSN.
2479 First check for special kinds that recog doesn't recognize. */
2481 if (GET_CODE (body) == USE /* These are just declarations. */
2482 || GET_CODE (body) == CLOBBER)
2483 break;
2485 /* Detect insns that are really jump-tables
2486 and output them as such. */
2488 if (JUMP_TABLE_DATA_P (insn))
2490 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2491 int vlen, idx;
2492 #endif
2494 if (! JUMP_TABLES_IN_TEXT_SECTION)
2495 switch_to_section (targetm.asm_out.function_rodata_section
2496 (current_function_decl,
2497 jumptable_relocatable ()));
2498 else
2499 switch_to_section (current_function_section ());
2501 app_disable ();
2503 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2504 if (GET_CODE (body) == ADDR_VEC)
2506 #ifdef ASM_OUTPUT_ADDR_VEC
2507 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2508 #else
2509 gcc_unreachable ();
2510 #endif
2512 else
2514 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2515 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2516 #else
2517 gcc_unreachable ();
2518 #endif
2520 #else
2521 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2522 for (idx = 0; idx < vlen; idx++)
2524 if (GET_CODE (body) == ADDR_VEC)
2526 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2527 ASM_OUTPUT_ADDR_VEC_ELT
2528 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2529 #else
2530 gcc_unreachable ();
2531 #endif
2533 else
2535 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2536 ASM_OUTPUT_ADDR_DIFF_ELT
2537 (file,
2538 body,
2539 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2540 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2541 #else
2542 gcc_unreachable ();
2543 #endif
2546 #ifdef ASM_OUTPUT_CASE_END
2547 ASM_OUTPUT_CASE_END (file,
2548 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2549 insn);
2550 #endif
2551 #endif
2553 switch_to_section (current_function_section ());
2555 if (debug_variable_location_views
2556 && !DECL_IGNORED_P (current_function_decl))
2557 debug_hooks->var_location (insn);
2559 break;
2561 /* Output this line note if it is the first or the last line
2562 note in a row. */
2563 if (!DECL_IGNORED_P (current_function_decl)
2564 && notice_source_line (insn, is_stmt_p))
2566 if (flag_verbose_asm)
2567 asm_show_source (last_filename, last_linenum);
2568 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2569 last_filename, last_discriminator,
2570 is_stmt);
2571 clear_next_view_needed (seen);
2573 else
2574 maybe_output_next_view (seen);
2576 gcc_checking_assert (!DEBUG_INSN_P (insn));
2578 if (GET_CODE (body) == PARALLEL
2579 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2580 body = XVECEXP (body, 0, 0);
2582 if (GET_CODE (body) == ASM_INPUT)
2584 const char *string = XSTR (body, 0);
2586 /* There's no telling what that did to the condition codes. */
2587 CC_STATUS_INIT;
2589 if (string[0])
2591 expanded_location loc;
2593 app_enable ();
2594 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2595 if (*loc.file && loc.line)
2596 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2597 ASM_COMMENT_START, loc.line, loc.file);
2598 fprintf (asm_out_file, "\t%s\n", string);
2599 #if HAVE_AS_LINE_ZERO
2600 if (*loc.file && loc.line)
2601 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2602 #endif
2604 break;
2607 /* Detect `asm' construct with operands. */
2608 if (asm_noperands (body) >= 0)
2610 unsigned int noperands = asm_noperands (body);
2611 rtx *ops = XALLOCAVEC (rtx, noperands);
2612 const char *string;
2613 location_t loc;
2614 expanded_location expanded;
2616 /* There's no telling what that did to the condition codes. */
2617 CC_STATUS_INIT;
2619 /* Get out the operand values. */
2620 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2621 /* Inhibit dying on what would otherwise be compiler bugs. */
2622 insn_noperands = noperands;
2623 this_is_asm_operands = insn;
2624 expanded = expand_location (loc);
2626 #ifdef FINAL_PRESCAN_INSN
2627 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2628 #endif
2630 /* Output the insn using them. */
2631 if (string[0])
2633 app_enable ();
2634 if (expanded.file && expanded.line)
2635 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2636 ASM_COMMENT_START, expanded.line, expanded.file);
2637 output_asm_insn (string, ops);
2638 #if HAVE_AS_LINE_ZERO
2639 if (expanded.file && expanded.line)
2640 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2641 #endif
2644 if (targetm.asm_out.final_postscan_insn)
2645 targetm.asm_out.final_postscan_insn (file, insn, ops,
2646 insn_noperands);
2648 this_is_asm_operands = 0;
2649 break;
2652 app_disable ();
2654 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2656 /* A delayed-branch sequence */
2657 int i;
2659 final_sequence = seq;
2661 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2662 force the restoration of a comparison that was previously
2663 thought unnecessary. If that happens, cancel this sequence
2664 and cause that insn to be restored. */
2666 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2667 if (next != seq->insn (1))
2669 final_sequence = 0;
2670 return next;
2673 for (i = 1; i < seq->len (); i++)
2675 rtx_insn *insn = seq->insn (i);
2676 rtx_insn *next = NEXT_INSN (insn);
2677 /* We loop in case any instruction in a delay slot gets
2678 split. */
2680 insn = final_scan_insn (insn, file, 0, 1, seen);
2681 while (insn != next);
2683 #ifdef DBR_OUTPUT_SEQEND
2684 DBR_OUTPUT_SEQEND (file);
2685 #endif
2686 final_sequence = 0;
2688 /* If the insn requiring the delay slot was a CALL_INSN, the
2689 insns in the delay slot are actually executed before the
2690 called function. Hence we don't preserve any CC-setting
2691 actions in these insns and the CC must be marked as being
2692 clobbered by the function. */
2693 if (CALL_P (seq->insn (0)))
2695 CC_STATUS_INIT;
2697 break;
2700 /* We have a real machine instruction as rtl. */
2702 body = PATTERN (insn);
2704 /* Do machine-specific peephole optimizations if desired. */
2706 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2708 rtx_insn *next = peephole (insn);
2709 /* When peepholing, if there were notes within the peephole,
2710 emit them before the peephole. */
2711 if (next != 0 && next != NEXT_INSN (insn))
2713 rtx_insn *note, *prev = PREV_INSN (insn);
2715 for (note = NEXT_INSN (insn); note != next;
2716 note = NEXT_INSN (note))
2717 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2719 /* Put the notes in the proper position for a later
2720 rescan. For example, the SH target can do this
2721 when generating a far jump in a delayed branch
2722 sequence. */
2723 note = NEXT_INSN (insn);
2724 SET_PREV_INSN (note) = prev;
2725 SET_NEXT_INSN (prev) = note;
2726 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2727 SET_PREV_INSN (insn) = PREV_INSN (next);
2728 SET_NEXT_INSN (insn) = next;
2729 SET_PREV_INSN (next) = insn;
2732 /* PEEPHOLE might have changed this. */
2733 body = PATTERN (insn);
2736 /* Try to recognize the instruction.
2737 If successful, verify that the operands satisfy the
2738 constraints for the instruction. Crash if they don't,
2739 since `reload' should have changed them so that they do. */
2741 insn_code_number = recog_memoized (insn);
2742 cleanup_subreg_operands (insn);
2744 /* Dump the insn in the assembly for debugging (-dAP).
2745 If the final dump is requested as slim RTL, dump slim
2746 RTL to the assembly file also. */
2747 if (flag_dump_rtl_in_asm)
2749 print_rtx_head = ASM_COMMENT_START;
2750 if (! (dump_flags & TDF_SLIM))
2751 print_rtl_single (asm_out_file, insn);
2752 else
2753 dump_insn_slim (asm_out_file, insn);
2754 print_rtx_head = "";
2757 if (! constrain_operands_cached (insn, 1))
2758 fatal_insn_not_found (insn);
2760 /* Some target machines need to prescan each insn before
2761 it is output. */
2763 #ifdef FINAL_PRESCAN_INSN
2764 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2765 #endif
2767 if (targetm.have_conditional_execution ()
2768 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2769 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2771 current_output_insn = debug_insn = insn;
2773 /* Find the proper template for this insn. */
2774 templ = get_insn_template (insn_code_number, insn);
2776 /* If the C code returns 0, it means that it is a jump insn
2777 which follows a deleted test insn, and that test insn
2778 needs to be reinserted. */
2779 if (templ == 0)
2781 rtx_insn *prev;
2783 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2785 /* We have already processed the notes between the setter and
2786 the user. Make sure we don't process them again, this is
2787 particularly important if one of the notes is a block
2788 scope note or an EH note. */
2789 for (prev = insn;
2790 prev != last_ignored_compare;
2791 prev = PREV_INSN (prev))
2793 if (NOTE_P (prev))
2794 delete_insn (prev); /* Use delete_note. */
2797 return prev;
2800 /* If the template is the string "#", it means that this insn must
2801 be split. */
2802 if (templ[0] == '#' && templ[1] == '\0')
2804 rtx_insn *new_rtx = try_split (body, insn, 0);
2806 /* If we didn't split the insn, go away. */
2807 if (new_rtx == insn && PATTERN (new_rtx) == body)
2808 fatal_insn ("could not split insn", insn);
2810 /* If we have a length attribute, this instruction should have
2811 been split in shorten_branches, to ensure that we would have
2812 valid length info for the splitees. */
2813 gcc_assert (!HAVE_ATTR_length);
2815 return new_rtx;
2818 /* ??? This will put the directives in the wrong place if
2819 get_insn_template outputs assembly directly. However calling it
2820 before get_insn_template breaks if the insns is split. */
2821 if (targetm.asm_out.unwind_emit_before_insn
2822 && targetm.asm_out.unwind_emit)
2823 targetm.asm_out.unwind_emit (asm_out_file, insn);
2825 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
2826 if (call_insn != NULL)
2828 rtx x = call_from_call_insn (call_insn);
2829 x = XEXP (x, 0);
2830 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2832 tree t;
2833 x = XEXP (x, 0);
2834 t = SYMBOL_REF_DECL (x);
2835 if (t)
2836 assemble_external (t);
2840 /* Output assembler code from the template. */
2841 output_asm_insn (templ, recog_data.operand);
2843 /* Some target machines need to postscan each insn after
2844 it is output. */
2845 if (targetm.asm_out.final_postscan_insn)
2846 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2847 recog_data.n_operands);
2849 if (!targetm.asm_out.unwind_emit_before_insn
2850 && targetm.asm_out.unwind_emit)
2851 targetm.asm_out.unwind_emit (asm_out_file, insn);
2853 /* Let the debug info back-end know about this call. We do this only
2854 after the instruction has been emitted because labels that may be
2855 created to reference the call instruction must appear after it. */
2856 if ((debug_variable_location_views || call_insn != NULL)
2857 && !DECL_IGNORED_P (current_function_decl))
2858 debug_hooks->var_location (insn);
2860 current_output_insn = debug_insn = 0;
2863 return NEXT_INSN (insn);
2866 /* This is a wrapper around final_scan_insn_1 that allows ports to
2867 call it recursively without a known value for SEEN. The value is
2868 saved at the outermost call, and recovered for recursive calls.
2869 Recursive calls MUST pass NULL, or the same pointer if they can
2870 otherwise get to it. */
2872 rtx_insn *
2873 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p,
2874 int nopeepholes, int *seen)
2876 static int *enclosing_seen;
2877 static int recursion_counter;
2879 gcc_assert (seen || recursion_counter);
2880 gcc_assert (!recursion_counter || !seen || seen == enclosing_seen);
2882 if (!recursion_counter++)
2883 enclosing_seen = seen;
2884 else if (!seen)
2885 seen = enclosing_seen;
2887 rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen);
2889 if (!--recursion_counter)
2890 enclosing_seen = NULL;
2892 return ret;
2897 /* Map DECLs to instance discriminators. This is allocated and
2898 defined in ada/gcc-interfaces/trans.cc, when compiling with -gnateS.
2899 Mappings from this table are saved and restored for LTO, so
2900 link-time compilation will have this map set, at least in
2901 partitions containing at least one DECL with an associated instance
2902 discriminator. */
2904 decl_to_instance_map_t *decl_to_instance_map;
2906 /* Return the instance number assigned to DECL. */
2908 static inline int
2909 map_decl_to_instance (const_tree decl)
2911 int *inst;
2913 if (!decl_to_instance_map || !decl || !DECL_P (decl))
2914 return 0;
2916 inst = decl_to_instance_map->get (decl);
2918 if (!inst)
2919 return 0;
2921 return *inst;
2924 /* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC. */
2926 static inline int
2927 compute_discriminator (location_t loc)
2929 int discriminator;
2931 if (!decl_to_instance_map)
2932 discriminator = get_discriminator_from_loc (loc);
2933 else
2935 tree block = LOCATION_BLOCK (loc);
2937 while (block && TREE_CODE (block) == BLOCK
2938 && !inlined_function_outer_scope_p (block))
2939 block = BLOCK_SUPERCONTEXT (block);
2941 tree decl;
2943 if (!block)
2944 decl = current_function_decl;
2945 else if (DECL_P (block))
2946 decl = block;
2947 else
2948 decl = block_ultimate_origin (block);
2950 discriminator = map_decl_to_instance (decl);
2953 return discriminator;
2956 /* Return discriminator of the statement that produced this insn. */
2958 insn_discriminator (const rtx_insn *insn)
2960 return compute_discriminator (INSN_LOCATION (insn));
2963 /* Return whether a source line note needs to be emitted before INSN.
2964 Sets IS_STMT to TRUE if the line should be marked as a possible
2965 breakpoint location. */
2967 static bool
2968 notice_source_line (rtx_insn *insn, bool *is_stmt)
2970 const char *filename;
2971 int linenum, columnnum;
2972 int discriminator;
2974 if (NOTE_MARKER_P (insn))
2976 location_t loc = NOTE_MARKER_LOCATION (insn);
2977 expanded_location xloc = expand_location (loc);
2978 if (xloc.line == 0
2979 && (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION
2980 || LOCATION_LOCUS (loc) == BUILTINS_LOCATION))
2981 return false;
2983 filename = xloc.file;
2984 linenum = xloc.line;
2985 columnnum = xloc.column;
2986 discriminator = compute_discriminator (loc);
2987 force_source_line = true;
2989 else if (override_filename)
2991 filename = override_filename;
2992 linenum = override_linenum;
2993 columnnum = override_columnnum;
2994 discriminator = override_discriminator;
2996 else if (INSN_HAS_LOCATION (insn))
2998 expanded_location xloc = insn_location (insn);
2999 filename = xloc.file;
3000 linenum = xloc.line;
3001 columnnum = xloc.column;
3002 discriminator = insn_discriminator (insn);
3004 else
3006 filename = NULL;
3007 linenum = 0;
3008 columnnum = 0;
3009 discriminator = 0;
3012 if (filename == NULL)
3013 return false;
3015 if (force_source_line
3016 || filename != last_filename
3017 || last_linenum != linenum
3018 || (debug_column_info && last_columnnum != columnnum))
3020 force_source_line = false;
3021 last_filename = filename;
3022 last_linenum = linenum;
3023 last_columnnum = columnnum;
3024 last_discriminator = discriminator;
3025 if (is_stmt)
3026 *is_stmt = true;
3027 high_block_linenum = MAX (last_linenum, high_block_linenum);
3028 high_function_linenum = MAX (last_linenum, high_function_linenum);
3029 return true;
3032 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3034 /* If the discriminator changed, but the line number did not,
3035 output the line table entry with is_stmt false so the
3036 debugger does not treat this as a breakpoint location. */
3037 last_discriminator = discriminator;
3038 if (is_stmt)
3039 *is_stmt = false;
3040 return true;
3043 return false;
3046 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3047 directly to the desired hard register. */
3049 void
3050 cleanup_subreg_operands (rtx_insn *insn)
3052 int i;
3053 bool changed = false;
3054 extract_insn_cached (insn);
3055 for (i = 0; i < recog_data.n_operands; i++)
3057 /* The following test cannot use recog_data.operand when testing
3058 for a SUBREG: the underlying object might have been changed
3059 already if we are inside a match_operator expression that
3060 matches the else clause. Instead we test the underlying
3061 expression directly. */
3062 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3064 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3065 changed = true;
3067 else if (GET_CODE (recog_data.operand[i]) == PLUS
3068 || GET_CODE (recog_data.operand[i]) == MULT
3069 || MEM_P (recog_data.operand[i]))
3070 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3073 for (i = 0; i < recog_data.n_dups; i++)
3075 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3077 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3078 changed = true;
3080 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3081 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3082 || MEM_P (*recog_data.dup_loc[i]))
3083 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3085 if (changed)
3086 df_insn_rescan (insn);
3089 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3090 the thing it is a subreg of. Do it anyway if FINAL_P. */
3093 alter_subreg (rtx *xp, bool final_p)
3095 rtx x = *xp;
3096 rtx y = SUBREG_REG (x);
3098 /* simplify_subreg does not remove subreg from volatile references.
3099 We are required to. */
3100 if (MEM_P (y))
3102 poly_int64 offset = SUBREG_BYTE (x);
3104 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3105 contains 0 instead of the proper offset. See simplify_subreg. */
3106 if (paradoxical_subreg_p (x))
3107 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3109 if (final_p)
3110 *xp = adjust_address (y, GET_MODE (x), offset);
3111 else
3112 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3114 else if (REG_P (y) && HARD_REGISTER_P (y))
3116 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3117 SUBREG_BYTE (x));
3119 if (new_rtx != 0)
3120 *xp = new_rtx;
3121 else if (final_p && REG_P (y))
3123 /* Simplify_subreg can't handle some REG cases, but we have to. */
3124 unsigned int regno;
3125 poly_int64 offset;
3127 regno = subreg_regno (x);
3128 if (subreg_lowpart_p (x))
3129 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3130 else
3131 offset = SUBREG_BYTE (x);
3132 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3136 return *xp;
3139 /* Do alter_subreg on all the SUBREGs contained in X. */
3141 static rtx
3142 walk_alter_subreg (rtx *xp, bool *changed)
3144 rtx x = *xp;
3145 switch (GET_CODE (x))
3147 case PLUS:
3148 case MULT:
3149 case AND:
3150 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3151 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3152 break;
3154 case MEM:
3155 case ZERO_EXTEND:
3156 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3157 break;
3159 case SUBREG:
3160 *changed = true;
3161 return alter_subreg (xp, true);
3163 default:
3164 break;
3167 return *xp;
3170 /* Report inconsistency between the assembler template and the operands.
3171 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3173 void
3174 output_operand_lossage (const char *cmsgid, ...)
3176 char *fmt_string;
3177 char *new_message;
3178 const char *pfx_str;
3179 va_list ap;
3181 va_start (ap, cmsgid);
3183 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3184 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3185 new_message = xvasprintf (fmt_string, ap);
3187 if (this_is_asm_operands)
3188 error_for_asm (this_is_asm_operands, "%s", new_message);
3189 else
3190 internal_error ("%s", new_message);
3192 free (fmt_string);
3193 free (new_message);
3194 va_end (ap);
3197 /* Output of assembler code from a template, and its subroutines. */
3199 /* Annotate the assembly with a comment describing the pattern and
3200 alternative used. */
3202 static void
3203 output_asm_name (void)
3205 if (debug_insn)
3207 fprintf (asm_out_file, "\t%s %d\t",
3208 ASM_COMMENT_START, INSN_UID (debug_insn));
3210 fprintf (asm_out_file, "[c=%d",
3211 insn_cost (debug_insn, optimize_insn_for_speed_p ()));
3212 if (HAVE_ATTR_length)
3213 fprintf (asm_out_file, " l=%d",
3214 get_attr_length (debug_insn));
3215 fprintf (asm_out_file, "] ");
3217 int num = INSN_CODE (debug_insn);
3218 fprintf (asm_out_file, "%s", insn_data[num].name);
3219 if (insn_data[num].n_alternatives > 1)
3220 fprintf (asm_out_file, "/%d", which_alternative);
3222 /* Clear this so only the first assembler insn
3223 of any rtl insn will get the special comment for -dp. */
3224 debug_insn = 0;
3228 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3229 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3230 corresponds to the address of the object and 0 if to the object. */
3232 static tree
3233 get_mem_expr_from_op (rtx op, int *paddressp)
3235 tree expr;
3236 int inner_addressp;
3238 *paddressp = 0;
3240 if (REG_P (op))
3241 return REG_EXPR (op);
3242 else if (!MEM_P (op))
3243 return 0;
3245 if (MEM_EXPR (op) != 0)
3246 return MEM_EXPR (op);
3248 /* Otherwise we have an address, so indicate it and look at the address. */
3249 *paddressp = 1;
3250 op = XEXP (op, 0);
3252 /* First check if we have a decl for the address, then look at the right side
3253 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3254 But don't allow the address to itself be indirect. */
3255 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3256 return expr;
3257 else if (GET_CODE (op) == PLUS
3258 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3259 return expr;
3261 while (UNARY_P (op)
3262 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3263 op = XEXP (op, 0);
3265 expr = get_mem_expr_from_op (op, &inner_addressp);
3266 return inner_addressp ? 0 : expr;
3269 /* Output operand names for assembler instructions. OPERANDS is the
3270 operand vector, OPORDER is the order to write the operands, and NOPS
3271 is the number of operands to write. */
3273 static void
3274 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3276 int wrote = 0;
3277 int i;
3279 for (i = 0; i < nops; i++)
3281 int addressp;
3282 rtx op = operands[oporder[i]];
3283 tree expr = get_mem_expr_from_op (op, &addressp);
3285 fprintf (asm_out_file, "%c%s",
3286 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3287 wrote = 1;
3288 if (expr)
3290 fprintf (asm_out_file, "%s",
3291 addressp ? "*" : "");
3292 print_mem_expr (asm_out_file, expr);
3293 wrote = 1;
3295 else if (REG_P (op) && ORIGINAL_REGNO (op)
3296 && ORIGINAL_REGNO (op) != REGNO (op))
3297 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3301 #ifdef ASSEMBLER_DIALECT
3302 /* Helper function to parse assembler dialects in the asm string.
3303 This is called from output_asm_insn and asm_fprintf. */
3304 static const char *
3305 do_assembler_dialects (const char *p, int *dialect)
3307 char c = *(p - 1);
3309 switch (c)
3311 case '{':
3313 int i;
3315 if (*dialect)
3316 output_operand_lossage ("nested assembly dialect alternatives");
3317 else
3318 *dialect = 1;
3320 /* If we want the first dialect, do nothing. Otherwise, skip
3321 DIALECT_NUMBER of strings ending with '|'. */
3322 for (i = 0; i < dialect_number; i++)
3324 while (*p && *p != '}')
3326 if (*p == '|')
3328 p++;
3329 break;
3332 /* Skip over any character after a percent sign. */
3333 if (*p == '%')
3334 p++;
3335 if (*p)
3336 p++;
3339 if (*p == '}')
3340 break;
3343 if (*p == '\0')
3344 output_operand_lossage ("unterminated assembly dialect alternative");
3346 break;
3348 case '|':
3349 if (*dialect)
3351 /* Skip to close brace. */
3354 if (*p == '\0')
3356 output_operand_lossage ("unterminated assembly dialect alternative");
3357 break;
3360 /* Skip over any character after a percent sign. */
3361 if (*p == '%' && p[1])
3363 p += 2;
3364 continue;
3367 if (*p++ == '}')
3368 break;
3370 while (1);
3372 *dialect = 0;
3374 else
3375 putc (c, asm_out_file);
3376 break;
3378 case '}':
3379 if (! *dialect)
3380 putc (c, asm_out_file);
3381 *dialect = 0;
3382 break;
3383 default:
3384 gcc_unreachable ();
3387 return p;
3389 #endif
3391 /* Output text from TEMPLATE to the assembler output file,
3392 obeying %-directions to substitute operands taken from
3393 the vector OPERANDS.
3395 %N (for N a digit) means print operand N in usual manner.
3396 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3397 and print the label name with no punctuation.
3398 %cN means require operand N to be a constant
3399 and print the constant expression with no punctuation.
3400 %aN means expect operand N to be a memory address
3401 (not a memory reference!) and print a reference
3402 to that address.
3403 %nN means expect operand N to be a constant
3404 and print a constant expression for minus the value
3405 of the operand, with no other punctuation. */
3407 void
3408 output_asm_insn (const char *templ, rtx *operands)
3410 const char *p;
3411 int c;
3412 #ifdef ASSEMBLER_DIALECT
3413 int dialect = 0;
3414 #endif
3415 int oporder[MAX_RECOG_OPERANDS];
3416 char opoutput[MAX_RECOG_OPERANDS];
3417 int ops = 0;
3419 /* An insn may return a null string template
3420 in a case where no assembler code is needed. */
3421 if (*templ == 0)
3422 return;
3424 memset (opoutput, 0, sizeof opoutput);
3425 p = templ;
3426 putc ('\t', asm_out_file);
3428 #ifdef ASM_OUTPUT_OPCODE
3429 ASM_OUTPUT_OPCODE (asm_out_file, p);
3430 #endif
3432 while ((c = *p++))
3433 switch (c)
3435 case '\n':
3436 if (flag_verbose_asm)
3437 output_asm_operand_names (operands, oporder, ops);
3438 if (flag_print_asm_name)
3439 output_asm_name ();
3441 ops = 0;
3442 memset (opoutput, 0, sizeof opoutput);
3444 putc (c, asm_out_file);
3445 #ifdef ASM_OUTPUT_OPCODE
3446 while ((c = *p) == '\t')
3448 putc (c, asm_out_file);
3449 p++;
3451 ASM_OUTPUT_OPCODE (asm_out_file, p);
3452 #endif
3453 break;
3455 #ifdef ASSEMBLER_DIALECT
3456 case '{':
3457 case '}':
3458 case '|':
3459 p = do_assembler_dialects (p, &dialect);
3460 break;
3461 #endif
3463 case '%':
3464 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3465 if ASSEMBLER_DIALECT defined and these characters have a special
3466 meaning as dialect delimiters.*/
3467 if (*p == '%'
3468 #ifdef ASSEMBLER_DIALECT
3469 || *p == '{' || *p == '}' || *p == '|'
3470 #endif
3473 putc (*p, asm_out_file);
3474 p++;
3476 /* %= outputs a number which is unique to each insn in the entire
3477 compilation. This is useful for making local labels that are
3478 referred to more than once in a given insn. */
3479 else if (*p == '=')
3481 p++;
3482 fprintf (asm_out_file, "%d", insn_counter);
3484 /* % followed by a letter and some digits
3485 outputs an operand in a special way depending on the letter.
3486 Letters `acln' are implemented directly.
3487 Other letters are passed to `output_operand' so that
3488 the TARGET_PRINT_OPERAND hook can define them. */
3489 else if (ISALPHA (*p))
3491 int letter = *p++;
3492 unsigned long opnum;
3493 char *endptr;
3495 opnum = strtoul (p, &endptr, 10);
3497 if (endptr == p)
3498 output_operand_lossage ("operand number missing "
3499 "after %%-letter");
3500 else if (this_is_asm_operands && opnum >= insn_noperands)
3501 output_operand_lossage ("operand number out of range");
3502 else if (letter == 'l')
3503 output_asm_label (operands[opnum]);
3504 else if (letter == 'a')
3505 output_address (VOIDmode, operands[opnum]);
3506 else if (letter == 'c')
3508 if (CONSTANT_ADDRESS_P (operands[opnum]))
3509 output_addr_const (asm_out_file, operands[opnum]);
3510 else
3511 output_operand (operands[opnum], 'c');
3513 else if (letter == 'n')
3515 if (CONST_INT_P (operands[opnum]))
3516 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3517 - INTVAL (operands[opnum]));
3518 else
3520 putc ('-', asm_out_file);
3521 output_addr_const (asm_out_file, operands[opnum]);
3524 else
3525 output_operand (operands[opnum], letter);
3527 if (!opoutput[opnum])
3528 oporder[ops++] = opnum;
3529 opoutput[opnum] = 1;
3531 p = endptr;
3532 c = *p;
3534 /* % followed by a digit outputs an operand the default way. */
3535 else if (ISDIGIT (*p))
3537 unsigned long opnum;
3538 char *endptr;
3540 opnum = strtoul (p, &endptr, 10);
3541 if (this_is_asm_operands && opnum >= insn_noperands)
3542 output_operand_lossage ("operand number out of range");
3543 else
3544 output_operand (operands[opnum], 0);
3546 if (!opoutput[opnum])
3547 oporder[ops++] = opnum;
3548 opoutput[opnum] = 1;
3550 p = endptr;
3551 c = *p;
3553 /* % followed by punctuation: output something for that
3554 punctuation character alone, with no operand. The
3555 TARGET_PRINT_OPERAND hook decides what is actually done. */
3556 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3557 output_operand (NULL_RTX, *p++);
3558 else
3559 output_operand_lossage ("invalid %%-code");
3560 break;
3562 default:
3563 putc (c, asm_out_file);
3566 /* Try to keep the asm a bit more readable. */
3567 if ((flag_verbose_asm || flag_print_asm_name) && strlen (templ) < 9)
3568 putc ('\t', asm_out_file);
3570 /* Write out the variable names for operands, if we know them. */
3571 if (flag_verbose_asm)
3572 output_asm_operand_names (operands, oporder, ops);
3573 if (flag_print_asm_name)
3574 output_asm_name ();
3576 putc ('\n', asm_out_file);
3579 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3581 void
3582 output_asm_label (rtx x)
3584 char buf[256];
3586 if (GET_CODE (x) == LABEL_REF)
3587 x = label_ref_label (x);
3588 if (LABEL_P (x)
3589 || (NOTE_P (x)
3590 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3591 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3592 else
3593 output_operand_lossage ("'%%l' operand isn't a label");
3595 assemble_name (asm_out_file, buf);
3598 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3600 void
3601 mark_symbol_refs_as_used (rtx x)
3603 subrtx_iterator::array_type array;
3604 FOR_EACH_SUBRTX (iter, array, x, ALL)
3606 const_rtx x = *iter;
3607 if (GET_CODE (x) == SYMBOL_REF)
3608 if (tree t = SYMBOL_REF_DECL (x))
3609 assemble_external (t);
3613 /* Print operand X using machine-dependent assembler syntax.
3614 CODE is a non-digit that preceded the operand-number in the % spec,
3615 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3616 between the % and the digits.
3617 When CODE is a non-letter, X is 0.
3619 The meanings of the letters are machine-dependent and controlled
3620 by TARGET_PRINT_OPERAND. */
3622 void
3623 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3625 if (x && GET_CODE (x) == SUBREG)
3626 x = alter_subreg (&x, true);
3628 /* X must not be a pseudo reg. */
3629 if (!targetm.no_register_allocation)
3630 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3632 targetm.asm_out.print_operand (asm_out_file, x, code);
3634 if (x == NULL_RTX)
3635 return;
3637 mark_symbol_refs_as_used (x);
3640 /* Print a memory reference operand for address X using
3641 machine-dependent assembler syntax. */
3643 void
3644 output_address (machine_mode mode, rtx x)
3646 bool changed = false;
3647 walk_alter_subreg (&x, &changed);
3648 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3651 /* Print an integer constant expression in assembler syntax.
3652 Addition and subtraction are the only arithmetic
3653 that may appear in these expressions. */
3655 void
3656 output_addr_const (FILE *file, rtx x)
3658 char buf[256];
3660 restart:
3661 switch (GET_CODE (x))
3663 case PC:
3664 putc ('.', file);
3665 break;
3667 case SYMBOL_REF:
3668 if (SYMBOL_REF_DECL (x))
3669 assemble_external (SYMBOL_REF_DECL (x));
3670 #ifdef ASM_OUTPUT_SYMBOL_REF
3671 ASM_OUTPUT_SYMBOL_REF (file, x);
3672 #else
3673 assemble_name (file, XSTR (x, 0));
3674 #endif
3675 break;
3677 case LABEL_REF:
3678 x = label_ref_label (x);
3679 /* Fall through. */
3680 case CODE_LABEL:
3681 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3682 #ifdef ASM_OUTPUT_LABEL_REF
3683 ASM_OUTPUT_LABEL_REF (file, buf);
3684 #else
3685 assemble_name (file, buf);
3686 #endif
3687 break;
3689 case CONST_INT:
3690 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3691 break;
3693 case CONST:
3694 /* This used to output parentheses around the expression,
3695 but that does not work on the 386 (either ATT or BSD assembler). */
3696 output_addr_const (file, XEXP (x, 0));
3697 break;
3699 case CONST_WIDE_INT:
3700 /* We do not know the mode here so we have to use a round about
3701 way to build a wide-int to get it printed properly. */
3703 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3704 CONST_WIDE_INT_NUNITS (x),
3705 CONST_WIDE_INT_NUNITS (x)
3706 * HOST_BITS_PER_WIDE_INT,
3707 false);
3708 print_decs (w, file);
3710 break;
3712 case CONST_DOUBLE:
3713 if (CONST_DOUBLE_AS_INT_P (x))
3715 /* We can use %d if the number is one word and positive. */
3716 if (CONST_DOUBLE_HIGH (x))
3717 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3718 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3719 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3720 else if (CONST_DOUBLE_LOW (x) < 0)
3721 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3722 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3723 else
3724 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3726 else
3727 /* We can't handle floating point constants;
3728 PRINT_OPERAND must handle them. */
3729 output_operand_lossage ("floating constant misused");
3730 break;
3732 case CONST_FIXED:
3733 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3734 break;
3736 case PLUS:
3737 /* Some assemblers need integer constants to appear last (eg masm). */
3738 if (CONST_INT_P (XEXP (x, 0)))
3740 output_addr_const (file, XEXP (x, 1));
3741 if (INTVAL (XEXP (x, 0)) >= 0)
3742 fprintf (file, "+");
3743 output_addr_const (file, XEXP (x, 0));
3745 else
3747 output_addr_const (file, XEXP (x, 0));
3748 if (!CONST_INT_P (XEXP (x, 1))
3749 || INTVAL (XEXP (x, 1)) >= 0)
3750 fprintf (file, "+");
3751 output_addr_const (file, XEXP (x, 1));
3753 break;
3755 case MINUS:
3756 /* Avoid outputting things like x-x or x+5-x,
3757 since some assemblers can't handle that. */
3758 x = simplify_subtraction (x);
3759 if (GET_CODE (x) != MINUS)
3760 goto restart;
3762 output_addr_const (file, XEXP (x, 0));
3763 fprintf (file, "-");
3764 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3765 || GET_CODE (XEXP (x, 1)) == PC
3766 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3767 output_addr_const (file, XEXP (x, 1));
3768 else
3770 fputs (targetm.asm_out.open_paren, file);
3771 output_addr_const (file, XEXP (x, 1));
3772 fputs (targetm.asm_out.close_paren, file);
3774 break;
3776 case ZERO_EXTEND:
3777 case SIGN_EXTEND:
3778 case SUBREG:
3779 case TRUNCATE:
3780 output_addr_const (file, XEXP (x, 0));
3781 break;
3783 default:
3784 if (targetm.asm_out.output_addr_const_extra (file, x))
3785 break;
3787 output_operand_lossage ("invalid expression as operand");
3791 /* Output a quoted string. */
3793 void
3794 output_quoted_string (FILE *asm_file, const char *string)
3796 #ifdef OUTPUT_QUOTED_STRING
3797 OUTPUT_QUOTED_STRING (asm_file, string);
3798 #else
3799 char c;
3801 putc ('\"', asm_file);
3802 while ((c = *string++) != 0)
3804 if (ISPRINT (c))
3806 if (c == '\"' || c == '\\')
3807 putc ('\\', asm_file);
3808 putc (c, asm_file);
3810 else
3811 fprintf (asm_file, "\\%03o", (unsigned char) c);
3813 putc ('\"', asm_file);
3814 #endif
3817 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3819 void
3820 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3822 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3823 if (value == 0)
3824 putc ('0', f);
3825 else
3827 char *p = buf + sizeof (buf);
3829 *--p = "0123456789abcdef"[value % 16];
3830 while ((value /= 16) != 0);
3831 *--p = 'x';
3832 *--p = '0';
3833 fwrite (p, 1, buf + sizeof (buf) - p, f);
3837 /* Internal function that prints an unsigned long in decimal in reverse.
3838 The output string IS NOT null-terminated. */
3840 static int
3841 sprint_ul_rev (char *s, unsigned long value)
3843 int i = 0;
3846 s[i] = "0123456789"[value % 10];
3847 value /= 10;
3848 i++;
3849 /* alternate version, without modulo */
3850 /* oldval = value; */
3851 /* value /= 10; */
3852 /* s[i] = "0123456789" [oldval - 10*value]; */
3853 /* i++ */
3855 while (value != 0);
3856 return i;
3859 /* Write an unsigned long as decimal to a file, fast. */
3861 void
3862 fprint_ul (FILE *f, unsigned long value)
3864 /* python says: len(str(2**64)) == 20 */
3865 char s[20];
3866 int i;
3868 i = sprint_ul_rev (s, value);
3870 /* It's probably too small to bother with string reversal and fputs. */
3873 i--;
3874 putc (s[i], f);
3876 while (i != 0);
3879 /* Write an unsigned long as decimal to a string, fast.
3880 s must be wide enough to not overflow, at least 21 chars.
3881 Returns the length of the string (without terminating '\0'). */
3884 sprint_ul (char *s, unsigned long value)
3886 int len = sprint_ul_rev (s, value);
3887 s[len] = '\0';
3889 std::reverse (s, s + len);
3890 return len;
3893 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3894 %R prints the value of REGISTER_PREFIX.
3895 %L prints the value of LOCAL_LABEL_PREFIX.
3896 %U prints the value of USER_LABEL_PREFIX.
3897 %I prints the value of IMMEDIATE_PREFIX.
3898 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3899 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3901 We handle alternate assembler dialects here, just like output_asm_insn. */
3903 void
3904 asm_fprintf (FILE *file, const char *p, ...)
3906 char buf[10];
3907 char *q, c;
3908 #ifdef ASSEMBLER_DIALECT
3909 int dialect = 0;
3910 #endif
3911 va_list argptr;
3913 va_start (argptr, p);
3915 buf[0] = '%';
3917 while ((c = *p++))
3918 switch (c)
3920 #ifdef ASSEMBLER_DIALECT
3921 case '{':
3922 case '}':
3923 case '|':
3924 p = do_assembler_dialects (p, &dialect);
3925 break;
3926 #endif
3928 case '%':
3929 c = *p++;
3930 q = &buf[1];
3931 while (strchr ("-+ #0", c))
3933 *q++ = c;
3934 c = *p++;
3936 while (ISDIGIT (c) || c == '.')
3938 *q++ = c;
3939 c = *p++;
3941 switch (c)
3943 case '%':
3944 putc ('%', file);
3945 break;
3947 case 'd': case 'i': case 'u':
3948 case 'x': case 'X': case 'o':
3949 case 'c':
3950 *q++ = c;
3951 *q = 0;
3952 fprintf (file, buf, va_arg (argptr, int));
3953 break;
3955 case 'w':
3956 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3957 'o' cases, but we do not check for those cases. It
3958 means that the value is a HOST_WIDE_INT, which may be
3959 either `long' or `long long'. */
3960 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3961 q += strlen (HOST_WIDE_INT_PRINT);
3962 *q++ = *p++;
3963 *q = 0;
3964 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3965 break;
3967 case 'l':
3968 *q++ = c;
3969 #ifdef HAVE_LONG_LONG
3970 if (*p == 'l')
3972 *q++ = *p++;
3973 *q++ = *p++;
3974 *q = 0;
3975 fprintf (file, buf, va_arg (argptr, long long));
3977 else
3978 #endif
3980 *q++ = *p++;
3981 *q = 0;
3982 fprintf (file, buf, va_arg (argptr, long));
3985 break;
3987 case 's':
3988 *q++ = c;
3989 *q = 0;
3990 fprintf (file, buf, va_arg (argptr, char *));
3991 break;
3993 case 'O':
3994 #ifdef ASM_OUTPUT_OPCODE
3995 ASM_OUTPUT_OPCODE (asm_out_file, p);
3996 #endif
3997 break;
3999 case 'R':
4000 #ifdef REGISTER_PREFIX
4001 fprintf (file, "%s", REGISTER_PREFIX);
4002 #endif
4003 break;
4005 case 'I':
4006 #ifdef IMMEDIATE_PREFIX
4007 fprintf (file, "%s", IMMEDIATE_PREFIX);
4008 #endif
4009 break;
4011 case 'L':
4012 #ifdef LOCAL_LABEL_PREFIX
4013 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4014 #endif
4015 break;
4017 case 'U':
4018 fputs (user_label_prefix, file);
4019 break;
4021 #ifdef ASM_FPRINTF_EXTENSIONS
4022 /* Uppercase letters are reserved for general use by asm_fprintf
4023 and so are not available to target specific code. In order to
4024 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4025 they are defined here. As they get turned into real extensions
4026 to asm_fprintf they should be removed from this list. */
4027 case 'A': case 'B': case 'C': case 'D': case 'E':
4028 case 'F': case 'G': case 'H': case 'J': case 'K':
4029 case 'M': case 'N': case 'P': case 'Q': case 'S':
4030 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4031 break;
4033 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4034 #endif
4035 default:
4036 gcc_unreachable ();
4038 break;
4040 default:
4041 putc (c, file);
4043 va_end (argptr);
4046 /* Return true if this function has no function calls. */
4048 bool
4049 leaf_function_p (void)
4051 rtx_insn *insn;
4053 /* Ensure we walk the entire function body. */
4054 gcc_assert (!in_sequence_p ());
4056 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4057 functions even if they call mcount. */
4058 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4059 return false;
4061 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4063 if (CALL_P (insn)
4064 && ! SIBLING_CALL_P (insn)
4065 && ! FAKE_CALL_P (insn))
4066 return false;
4067 if (NONJUMP_INSN_P (insn)
4068 && GET_CODE (PATTERN (insn)) == SEQUENCE
4069 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4070 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4071 return false;
4074 return true;
4077 /* Return true if branch is a forward branch.
4078 Uses insn_shuid array, so it works only in the final pass. May be used by
4079 output templates to customary add branch prediction hints.
4081 bool
4082 final_forward_branch_p (rtx_insn *insn)
4084 int insn_id, label_id;
4086 gcc_assert (uid_shuid);
4087 insn_id = INSN_SHUID (insn);
4088 label_id = INSN_SHUID (JUMP_LABEL (insn));
4089 /* We've hit some insns that does not have id information available. */
4090 gcc_assert (insn_id && label_id);
4091 return insn_id < label_id;
4094 /* On some machines, a function with no call insns
4095 can run faster if it doesn't create its own register window.
4096 When output, the leaf function should use only the "output"
4097 registers. Ordinarily, the function would be compiled to use
4098 the "input" registers to find its arguments; it is a candidate
4099 for leaf treatment if it uses only the "input" registers.
4100 Leaf function treatment means renumbering so the function
4101 uses the "output" registers instead. */
4103 #ifdef LEAF_REGISTERS
4105 /* Return bool if this function uses only the registers that can be
4106 safely renumbered. */
4108 bool
4109 only_leaf_regs_used (void)
4111 int i;
4112 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4114 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4115 if ((df_regs_ever_live_p (i) || global_regs[i])
4116 && ! permitted_reg_in_leaf_functions[i])
4117 return false;
4119 if (crtl->uses_pic_offset_table
4120 && pic_offset_table_rtx != 0
4121 && REG_P (pic_offset_table_rtx)
4122 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4123 return false;
4125 return true;
4128 /* Scan all instructions and renumber all registers into those
4129 available in leaf functions. */
4131 static void
4132 leaf_renumber_regs (rtx_insn *first)
4134 rtx_insn *insn;
4136 /* Renumber only the actual patterns.
4137 The reg-notes can contain frame pointer refs,
4138 and renumbering them could crash, and should not be needed. */
4139 for (insn = first; insn; insn = NEXT_INSN (insn))
4140 if (INSN_P (insn))
4141 leaf_renumber_regs_insn (PATTERN (insn));
4144 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4145 available in leaf functions. */
4147 void
4148 leaf_renumber_regs_insn (rtx in_rtx)
4150 int i, j;
4151 const char *format_ptr;
4153 if (in_rtx == 0)
4154 return;
4156 /* Renumber all input-registers into output-registers.
4157 renumbered_regs would be 1 for an output-register;
4158 they */
4160 if (REG_P (in_rtx))
4162 int newreg;
4164 /* Don't renumber the same reg twice. */
4165 if (in_rtx->used)
4166 return;
4168 newreg = REGNO (in_rtx);
4169 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4170 to reach here as part of a REG_NOTE. */
4171 if (newreg >= FIRST_PSEUDO_REGISTER)
4173 in_rtx->used = 1;
4174 return;
4176 newreg = LEAF_REG_REMAP (newreg);
4177 gcc_assert (newreg >= 0);
4178 df_set_regs_ever_live (REGNO (in_rtx), false);
4179 df_set_regs_ever_live (newreg, true);
4180 SET_REGNO (in_rtx, newreg);
4181 in_rtx->used = 1;
4182 return;
4185 if (INSN_P (in_rtx))
4187 /* Inside a SEQUENCE, we find insns.
4188 Renumber just the patterns of these insns,
4189 just as we do for the top-level insns. */
4190 leaf_renumber_regs_insn (PATTERN (in_rtx));
4191 return;
4194 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4196 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4197 switch (*format_ptr++)
4199 case 'e':
4200 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4201 break;
4203 case 'E':
4204 if (XVEC (in_rtx, i) != NULL)
4205 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4206 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4207 break;
4209 case 'S':
4210 case 's':
4211 case '0':
4212 case 'i':
4213 case 'w':
4214 case 'p':
4215 case 'n':
4216 case 'u':
4217 break;
4219 default:
4220 gcc_unreachable ();
4223 #endif
4225 /* Turn the RTL into assembly. */
4226 static unsigned int
4227 rest_of_handle_final (void)
4229 const char *fnname = get_fnname_from_decl (current_function_decl);
4231 /* Turn debug markers into notes if the var-tracking pass has not
4232 been invoked. */
4233 if (!flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNS)
4234 delete_vta_debug_insns (false);
4236 assemble_start_function (current_function_decl, fnname);
4237 rtx_insn *first = get_insns ();
4238 int seen = 0;
4239 final_start_function_1 (&first, asm_out_file, &seen, optimize);
4240 final_1 (first, asm_out_file, seen, optimize);
4241 if (flag_ipa_ra
4242 && !lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl))
4243 /* Functions with naked attributes are supported only with basic asm
4244 statements in the body, thus for supported use cases the information
4245 on clobbered registers is not available. */
4246 && !lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)))
4247 collect_fn_hard_reg_usage ();
4248 final_end_function ();
4250 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4251 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4252 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4253 output_function_exception_table (crtl->has_bb_partition ? 1 : 0);
4255 assemble_end_function (current_function_decl, fnname);
4257 /* Free up reg info memory. */
4258 free_reg_info ();
4260 if (! quiet_flag)
4261 fflush (asm_out_file);
4263 /* Note that for those inline functions where we don't initially
4264 know for certain that we will be generating an out-of-line copy,
4265 the first invocation of this routine (rest_of_compilation) will
4266 skip over this code by doing a `goto exit_rest_of_compilation;'.
4267 Later on, wrapup_global_declarations will (indirectly) call
4268 rest_of_compilation again for those inline functions that need
4269 to have out-of-line copies generated. During that call, we
4270 *will* be routed past here. */
4272 timevar_push (TV_SYMOUT);
4273 if (!DECL_IGNORED_P (current_function_decl))
4274 debug_hooks->function_decl (current_function_decl);
4275 timevar_pop (TV_SYMOUT);
4277 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4278 DECL_INITIAL (current_function_decl) = error_mark_node;
4280 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4281 && targetm.have_ctors_dtors)
4282 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4283 decl_init_priority_lookup
4284 (current_function_decl));
4285 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4286 && targetm.have_ctors_dtors)
4287 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4288 decl_fini_priority_lookup
4289 (current_function_decl));
4290 return 0;
4293 namespace {
4295 const pass_data pass_data_final =
4297 RTL_PASS, /* type */
4298 "final", /* name */
4299 OPTGROUP_NONE, /* optinfo_flags */
4300 TV_FINAL, /* tv_id */
4301 0, /* properties_required */
4302 0, /* properties_provided */
4303 0, /* properties_destroyed */
4304 0, /* todo_flags_start */
4305 0, /* todo_flags_finish */
4308 class pass_final : public rtl_opt_pass
4310 public:
4311 pass_final (gcc::context *ctxt)
4312 : rtl_opt_pass (pass_data_final, ctxt)
4315 /* opt_pass methods: */
4316 unsigned int execute (function *) final override
4318 return rest_of_handle_final ();
4321 }; // class pass_final
4323 } // anon namespace
4325 rtl_opt_pass *
4326 make_pass_final (gcc::context *ctxt)
4328 return new pass_final (ctxt);
4332 static unsigned int
4333 rest_of_handle_shorten_branches (void)
4335 /* Shorten branches. */
4336 shorten_branches (get_insns ());
4337 return 0;
4340 namespace {
4342 const pass_data pass_data_shorten_branches =
4344 RTL_PASS, /* type */
4345 "shorten", /* name */
4346 OPTGROUP_NONE, /* optinfo_flags */
4347 TV_SHORTEN_BRANCH, /* tv_id */
4348 0, /* properties_required */
4349 0, /* properties_provided */
4350 0, /* properties_destroyed */
4351 0, /* todo_flags_start */
4352 0, /* todo_flags_finish */
4355 class pass_shorten_branches : public rtl_opt_pass
4357 public:
4358 pass_shorten_branches (gcc::context *ctxt)
4359 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4362 /* opt_pass methods: */
4363 unsigned int execute (function *) final override
4365 return rest_of_handle_shorten_branches ();
4368 }; // class pass_shorten_branches
4370 } // anon namespace
4372 rtl_opt_pass *
4373 make_pass_shorten_branches (gcc::context *ctxt)
4375 return new pass_shorten_branches (ctxt);
4379 static unsigned int
4380 rest_of_clean_state (void)
4382 rtx_insn *insn, *next;
4383 FILE *final_output = NULL;
4384 int save_unnumbered = flag_dump_unnumbered;
4385 int save_noaddr = flag_dump_noaddr;
4387 if (flag_dump_final_insns)
4389 final_output = fopen (flag_dump_final_insns, "a");
4390 if (!final_output)
4392 error ("could not open final insn dump file %qs: %m",
4393 flag_dump_final_insns);
4394 flag_dump_final_insns = NULL;
4396 else
4398 flag_dump_noaddr = flag_dump_unnumbered = 1;
4399 if (flag_compare_debug_opt || flag_compare_debug)
4400 dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG;
4401 dump_function_header (final_output, current_function_decl,
4402 dump_flags);
4403 final_insns_dump_p = true;
4405 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4406 if (LABEL_P (insn))
4407 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4408 else
4410 if (NOTE_P (insn))
4411 set_block_for_insn (insn, NULL);
4412 INSN_UID (insn) = 0;
4417 /* It is very important to decompose the RTL instruction chain here:
4418 debug information keeps pointing into CODE_LABEL insns inside the function
4419 body. If these remain pointing to the other insns, we end up preserving
4420 whole RTL chain and attached detailed debug info in memory. */
4421 for (insn = get_insns (); insn; insn = next)
4423 next = NEXT_INSN (insn);
4424 SET_NEXT_INSN (insn) = NULL;
4425 SET_PREV_INSN (insn) = NULL;
4427 rtx_insn *call_insn = insn;
4428 if (NONJUMP_INSN_P (call_insn)
4429 && GET_CODE (PATTERN (call_insn)) == SEQUENCE)
4431 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (call_insn));
4432 call_insn = seq->insn (0);
4434 if (CALL_P (call_insn))
4436 rtx note
4437 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
4438 if (note)
4439 remove_note (call_insn, note);
4442 if (final_output
4443 && (!NOTE_P (insn)
4444 || (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4445 && NOTE_KIND (insn) != NOTE_INSN_BEGIN_STMT
4446 && NOTE_KIND (insn) != NOTE_INSN_INLINE_ENTRY
4447 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4448 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4449 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4450 print_rtl_single (final_output, insn);
4453 if (final_output)
4455 flag_dump_noaddr = save_noaddr;
4456 flag_dump_unnumbered = save_unnumbered;
4457 final_insns_dump_p = false;
4459 if (fclose (final_output))
4461 error ("could not close final insn dump file %qs: %m",
4462 flag_dump_final_insns);
4463 flag_dump_final_insns = NULL;
4467 flag_rerun_cse_after_global_opts = 0;
4468 reload_completed = 0;
4469 epilogue_completed = 0;
4470 #ifdef STACK_REGS
4471 regstack_completed = 0;
4472 #endif
4474 /* Clear out the insn_length contents now that they are no
4475 longer valid. */
4476 init_insn_lengths ();
4478 /* Show no temporary slots allocated. */
4479 init_temp_slots ();
4481 free_bb_for_insn ();
4483 if (cfun->gimple_df)
4484 delete_tree_ssa (cfun);
4486 /* We can reduce stack alignment on call site only when we are sure that
4487 the function body just produced will be actually used in the final
4488 executable. */
4489 if (flag_ipa_stack_alignment
4490 && decl_binds_to_current_def_p (current_function_decl))
4492 unsigned int pref = crtl->preferred_stack_boundary;
4493 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4494 pref = crtl->stack_alignment_needed;
4495 cgraph_node::rtl_info (current_function_decl)
4496 ->preferred_incoming_stack_boundary = pref;
4499 /* Make sure volatile mem refs aren't considered valid operands for
4500 arithmetic insns. We must call this here if this is a nested inline
4501 function, since the above code leaves us in the init_recog state,
4502 and the function context push/pop code does not save/restore volatile_ok.
4504 ??? Maybe it isn't necessary for expand_start_function to call this
4505 anymore if we do it here? */
4507 init_recog_no_volatile ();
4509 /* We're done with this function. Free up memory if we can. */
4510 free_after_parsing (cfun);
4511 free_after_compilation (cfun);
4512 return 0;
4515 namespace {
4517 const pass_data pass_data_clean_state =
4519 RTL_PASS, /* type */
4520 "*clean_state", /* name */
4521 OPTGROUP_NONE, /* optinfo_flags */
4522 TV_FINAL, /* tv_id */
4523 0, /* properties_required */
4524 0, /* properties_provided */
4525 PROP_rtl, /* properties_destroyed */
4526 0, /* todo_flags_start */
4527 0, /* todo_flags_finish */
4530 class pass_clean_state : public rtl_opt_pass
4532 public:
4533 pass_clean_state (gcc::context *ctxt)
4534 : rtl_opt_pass (pass_data_clean_state, ctxt)
4537 /* opt_pass methods: */
4538 unsigned int execute (function *) final override
4540 return rest_of_clean_state ();
4543 }; // class pass_clean_state
4545 } // anon namespace
4547 rtl_opt_pass *
4548 make_pass_clean_state (gcc::context *ctxt)
4550 return new pass_clean_state (ctxt);
4553 /* Return true if INSN is a call to the current function. */
4555 static bool
4556 self_recursive_call_p (rtx_insn *insn)
4558 tree fndecl = get_call_fndecl (insn);
4559 return (fndecl == current_function_decl
4560 && decl_binds_to_current_def_p (fndecl));
4563 /* Collect hard register usage for the current function. */
4565 static void
4566 collect_fn_hard_reg_usage (void)
4568 rtx_insn *insn;
4569 #ifdef STACK_REGS
4570 int i;
4571 #endif
4572 struct cgraph_rtl_info *node;
4573 HARD_REG_SET function_used_regs;
4575 /* ??? To be removed when all the ports have been fixed. */
4576 if (!targetm.call_fusage_contains_non_callee_clobbers)
4577 return;
4579 /* Be conservative - mark fixed and global registers as used. */
4580 function_used_regs = fixed_reg_set;
4582 #ifdef STACK_REGS
4583 /* Handle STACK_REGS conservatively, since the df-framework does not
4584 provide accurate information for them. */
4586 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4587 SET_HARD_REG_BIT (function_used_regs, i);
4588 #endif
4590 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4592 HARD_REG_SET insn_used_regs;
4594 if (!NONDEBUG_INSN_P (insn))
4595 continue;
4597 if (CALL_P (insn)
4598 && !self_recursive_call_p (insn))
4599 function_used_regs
4600 |= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4602 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4603 function_used_regs |= insn_used_regs;
4605 if (hard_reg_set_subset_p (crtl->abi->full_and_partial_reg_clobbers (),
4606 function_used_regs))
4607 return;
4610 /* Mask out fully-saved registers, so that they don't affect equality
4611 comparisons between function_abis. */
4612 function_used_regs &= crtl->abi->full_and_partial_reg_clobbers ();
4614 node = cgraph_node::rtl_info (current_function_decl);
4615 gcc_assert (node != NULL);
4617 node->function_used_regs = function_used_regs;