c++: fix explicit/copy problem [PR109247]
[official-gcc.git] / gcc / tree-cfg.cc
bloba1a607666dc71922889443c51456b80b0b31e6c2
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2023 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map<edge, tree> *edge_to_cases;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
101 /* CFG statistics. */
102 struct cfg_stats_d
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
119 int location_line;
120 int discriminator;
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
135 inline hashval_t
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
138 return item->location_line;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
144 inline bool
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
148 return a->location_line == b->location_line;
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
181 void
182 init_empty_tree_cfg_for_function (struct function *fn)
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
205 void
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
218 static void
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
228 make_blocks (seq);
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
321 for (auto loop : loops_list (cfun, 0))
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
330 /* Push the global flag_finite_loops state down to individual loops. */
331 loop->finite_p = flag_finite_loops;
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
353 break;
354 default:
355 gcc_unreachable ();
358 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 gimple_call_arg (stmt, 0));
361 gsi_replace (&gsi, stmt, true);
366 static unsigned int
367 execute_build_cfg (void)
369 gimple_seq body = gimple_body (current_function_decl);
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
378 cleanup_tree_cfg ();
380 bb_to_omp_idx.release ();
382 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383 replace_loop_annotate ();
384 return 0;
387 namespace {
389 const pass_data pass_data_build_cfg =
391 GIMPLE_PASS, /* type */
392 "cfg", /* name */
393 OPTGROUP_NONE, /* optinfo_flags */
394 TV_TREE_CFG, /* tv_id */
395 PROP_gimple_leh, /* properties_required */
396 ( PROP_cfg | PROP_loops ), /* properties_provided */
397 0, /* properties_destroyed */
398 0, /* todo_flags_start */
399 0, /* todo_flags_finish */
402 class pass_build_cfg : public gimple_opt_pass
404 public:
405 pass_build_cfg (gcc::context *ctxt)
406 : gimple_opt_pass (pass_data_build_cfg, ctxt)
409 /* opt_pass methods: */
410 unsigned int execute (function *) final override
412 return execute_build_cfg ();
415 }; // class pass_build_cfg
417 } // anon namespace
419 gimple_opt_pass *
420 make_pass_build_cfg (gcc::context *ctxt)
422 return new pass_build_cfg (ctxt);
426 /* Return true if T is a computed goto. */
428 bool
429 computed_goto_p (gimple *t)
431 return (gimple_code (t) == GIMPLE_GOTO
432 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
435 /* Returns true if the sequence of statements STMTS only contains
436 a call to __builtin_unreachable (). */
438 bool
439 gimple_seq_unreachable_p (gimple_seq stmts)
441 if (stmts == NULL
442 /* Return false if -fsanitize=unreachable, we don't want to
443 optimize away those calls, but rather turn them into
444 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
445 later. */
446 || sanitize_flags_p (SANITIZE_UNREACHABLE))
447 return false;
449 gimple_stmt_iterator gsi = gsi_last (stmts);
451 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
452 return false;
454 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
456 gimple *stmt = gsi_stmt (gsi);
457 if (gimple_code (stmt) != GIMPLE_LABEL
458 && !is_gimple_debug (stmt)
459 && !gimple_clobber_p (stmt))
460 return false;
462 return true;
465 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
466 the other edge points to a bb with just __builtin_unreachable ().
467 I.e. return true for C->M edge in:
468 <bb C>:
470 if (something)
471 goto <bb N>;
472 else
473 goto <bb M>;
474 <bb N>:
475 __builtin_unreachable ();
476 <bb M>: */
478 bool
479 assert_unreachable_fallthru_edge_p (edge e)
481 basic_block pred_bb = e->src;
482 if (safe_is_a <gcond *> (*gsi_last_bb (pred_bb)))
484 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
485 if (other_bb == e->dest)
486 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
487 if (EDGE_COUNT (other_bb->succs) == 0)
488 return gimple_seq_unreachable_p (bb_seq (other_bb));
490 return false;
494 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
495 could alter control flow except via eh. We initialize the flag at
496 CFG build time and only ever clear it later. */
498 static void
499 gimple_call_initialize_ctrl_altering (gimple *stmt)
501 int flags = gimple_call_flags (stmt);
503 /* A call alters control flow if it can make an abnormal goto. */
504 if (call_can_make_abnormal_goto (stmt)
505 /* A call also alters control flow if it does not return. */
506 || flags & ECF_NORETURN
507 /* TM ending statements have backedges out of the transaction.
508 Return true so we split the basic block containing them.
509 Note that the TM_BUILTIN test is merely an optimization. */
510 || ((flags & ECF_TM_BUILTIN)
511 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
512 /* BUILT_IN_RETURN call is same as return statement. */
513 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
514 /* IFN_UNIQUE should be the last insn, to make checking for it
515 as cheap as possible. */
516 || (gimple_call_internal_p (stmt)
517 && gimple_call_internal_unique_p (stmt)))
518 gimple_call_set_ctrl_altering (stmt, true);
519 else
520 gimple_call_set_ctrl_altering (stmt, false);
524 /* Insert SEQ after BB and build a flowgraph. */
526 static basic_block
527 make_blocks_1 (gimple_seq seq, basic_block bb)
529 gimple_stmt_iterator i = gsi_start (seq);
530 gimple *stmt = NULL;
531 gimple *prev_stmt = NULL;
532 bool start_new_block = true;
533 bool first_stmt_of_seq = true;
535 while (!gsi_end_p (i))
537 /* PREV_STMT should only be set to a debug stmt if the debug
538 stmt is before nondebug stmts. Once stmt reaches a nondebug
539 nonlabel, prev_stmt will be set to it, so that
540 stmt_starts_bb_p will know to start a new block if a label is
541 found. However, if stmt was a label after debug stmts only,
542 keep the label in prev_stmt even if we find further debug
543 stmts, for there may be other labels after them, and they
544 should land in the same block. */
545 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
546 prev_stmt = stmt;
547 stmt = gsi_stmt (i);
549 if (stmt && is_gimple_call (stmt))
550 gimple_call_initialize_ctrl_altering (stmt);
552 /* If the statement starts a new basic block or if we have determined
553 in a previous pass that we need to create a new block for STMT, do
554 so now. */
555 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
557 if (!first_stmt_of_seq)
558 gsi_split_seq_before (&i, &seq);
559 bb = create_basic_block (seq, bb);
560 start_new_block = false;
561 prev_stmt = NULL;
564 /* Now add STMT to BB and create the subgraphs for special statement
565 codes. */
566 gimple_set_bb (stmt, bb);
568 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
569 next iteration. */
570 if (stmt_ends_bb_p (stmt))
572 /* If the stmt can make abnormal goto use a new temporary
573 for the assignment to the LHS. This makes sure the old value
574 of the LHS is available on the abnormal edge. Otherwise
575 we will end up with overlapping life-ranges for abnormal
576 SSA names. */
577 if (gimple_has_lhs (stmt)
578 && stmt_can_make_abnormal_goto (stmt)
579 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
581 tree lhs = gimple_get_lhs (stmt);
582 tree tmp = create_tmp_var (TREE_TYPE (lhs));
583 gimple *s = gimple_build_assign (lhs, tmp);
584 gimple_set_location (s, gimple_location (stmt));
585 gimple_set_block (s, gimple_block (stmt));
586 gimple_set_lhs (stmt, tmp);
587 gsi_insert_after (&i, s, GSI_SAME_STMT);
589 start_new_block = true;
592 gsi_next (&i);
593 first_stmt_of_seq = false;
595 return bb;
598 /* Build a flowgraph for the sequence of stmts SEQ. */
600 static void
601 make_blocks (gimple_seq seq)
603 /* Look for debug markers right before labels, and move the debug
604 stmts after the labels. Accepting labels among debug markers
605 adds no value, just complexity; if we wanted to annotate labels
606 with view numbers (so sequencing among markers would matter) or
607 somesuch, we're probably better off still moving the labels, but
608 adding other debug annotations in their original positions or
609 emitting nonbind or bind markers associated with the labels in
610 the original position of the labels.
612 Moving labels would probably be simpler, but we can't do that:
613 moving labels assigns label ids to them, and doing so because of
614 debug markers makes for -fcompare-debug and possibly even codegen
615 differences. So, we have to move the debug stmts instead. To
616 that end, we scan SEQ backwards, marking the position of the
617 latest (earliest we find) label, and moving debug stmts that are
618 not separated from it by nondebug nonlabel stmts after the
619 label. */
620 if (MAY_HAVE_DEBUG_MARKER_STMTS)
622 gimple_stmt_iterator label = gsi_none ();
624 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
626 gimple *stmt = gsi_stmt (i);
628 /* If this is the first label we encounter (latest in SEQ)
629 before nondebug stmts, record its position. */
630 if (is_a <glabel *> (stmt))
632 if (gsi_end_p (label))
633 label = i;
634 continue;
637 /* Without a recorded label position to move debug stmts to,
638 there's nothing to do. */
639 if (gsi_end_p (label))
640 continue;
642 /* Move the debug stmt at I after LABEL. */
643 if (is_gimple_debug (stmt))
645 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
646 /* As STMT is removed, I advances to the stmt after
647 STMT, so the gsi_prev in the for "increment"
648 expression gets us to the stmt we're to visit after
649 STMT. LABEL, however, would advance to the moved
650 stmt if we passed it to gsi_move_after, so pass it a
651 copy instead, so as to keep LABEL pointing to the
652 LABEL. */
653 gimple_stmt_iterator copy = label;
654 gsi_move_after (&i, &copy);
655 continue;
658 /* There aren't any (more?) debug stmts before label, so
659 there isn't anything else to move after it. */
660 label = gsi_none ();
664 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
667 /* Create and return a new empty basic block after bb AFTER. */
669 static basic_block
670 create_bb (void *h, void *e, basic_block after)
672 basic_block bb;
674 gcc_assert (!e);
676 /* Create and initialize a new basic block. Since alloc_block uses
677 GC allocation that clears memory to allocate a basic block, we do
678 not have to clear the newly allocated basic block here. */
679 bb = alloc_block ();
681 bb->index = last_basic_block_for_fn (cfun);
682 bb->flags = BB_NEW;
683 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
685 /* Add the new block to the linked list of blocks. */
686 link_block (bb, after);
688 /* Grow the basic block array if needed. */
689 if ((size_t) last_basic_block_for_fn (cfun)
690 == basic_block_info_for_fn (cfun)->length ())
691 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
692 last_basic_block_for_fn (cfun) + 1);
694 /* Add the newly created block to the array. */
695 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
697 n_basic_blocks_for_fn (cfun)++;
698 last_basic_block_for_fn (cfun)++;
700 return bb;
704 /*---------------------------------------------------------------------------
705 Edge creation
706 ---------------------------------------------------------------------------*/
708 /* If basic block BB has an abnormal edge to a basic block
709 containing IFN_ABNORMAL_DISPATCHER internal call, return
710 that the dispatcher's basic block, otherwise return NULL. */
712 basic_block
713 get_abnormal_succ_dispatcher (basic_block bb)
715 edge e;
716 edge_iterator ei;
718 FOR_EACH_EDGE (e, ei, bb->succs)
719 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
721 gimple_stmt_iterator gsi
722 = gsi_start_nondebug_after_labels_bb (e->dest);
723 gimple *g = gsi_stmt (gsi);
724 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
725 return e->dest;
727 return NULL;
730 /* Helper function for make_edges. Create a basic block with
731 with ABNORMAL_DISPATCHER internal call in it if needed, and
732 create abnormal edges from BBS to it and from it to FOR_BB
733 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
735 static void
736 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
737 auto_vec<basic_block> *bbs, bool computed_goto)
739 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
740 unsigned int idx = 0;
741 basic_block bb;
742 bool inner = false;
744 if (!bb_to_omp_idx.is_empty ())
746 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
747 if (bb_to_omp_idx[for_bb->index] != 0)
748 inner = true;
751 /* If the dispatcher has been created already, then there are basic
752 blocks with abnormal edges to it, so just make a new edge to
753 for_bb. */
754 if (*dispatcher == NULL)
756 /* Check if there are any basic blocks that need to have
757 abnormal edges to this dispatcher. If there are none, return
758 early. */
759 if (bb_to_omp_idx.is_empty ())
761 if (bbs->is_empty ())
762 return;
764 else
766 FOR_EACH_VEC_ELT (*bbs, idx, bb)
767 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
768 break;
769 if (bb == NULL)
770 return;
773 /* Create the dispatcher bb. */
774 *dispatcher = create_basic_block (NULL, for_bb);
775 if (computed_goto)
777 /* Factor computed gotos into a common computed goto site. Also
778 record the location of that site so that we can un-factor the
779 gotos after we have converted back to normal form. */
780 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
782 /* Create the destination of the factored goto. Each original
783 computed goto will put its desired destination into this
784 variable and jump to the label we create immediately below. */
785 tree var = create_tmp_var (ptr_type_node, "gotovar");
787 /* Build a label for the new block which will contain the
788 factored computed goto. */
789 tree factored_label_decl
790 = create_artificial_label (UNKNOWN_LOCATION);
791 gimple *factored_computed_goto_label
792 = gimple_build_label (factored_label_decl);
793 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
795 /* Build our new computed goto. */
796 gimple *factored_computed_goto = gimple_build_goto (var);
797 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
799 FOR_EACH_VEC_ELT (*bbs, idx, bb)
801 if (!bb_to_omp_idx.is_empty ()
802 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
803 continue;
805 gsi = gsi_last_bb (bb);
806 gimple *last = gsi_stmt (gsi);
808 gcc_assert (computed_goto_p (last));
810 /* Copy the original computed goto's destination into VAR. */
811 gimple *assignment
812 = gimple_build_assign (var, gimple_goto_dest (last));
813 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
815 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
816 e->goto_locus = gimple_location (last);
817 gsi_remove (&gsi, true);
820 else
822 tree arg = inner ? boolean_true_node : boolean_false_node;
823 gcall *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
824 1, arg);
825 gimple_call_set_ctrl_altering (g, true);
826 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
827 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
829 /* Create predecessor edges of the dispatcher. */
830 FOR_EACH_VEC_ELT (*bbs, idx, bb)
832 if (!bb_to_omp_idx.is_empty ()
833 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
834 continue;
835 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
840 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
843 /* Creates outgoing edges for BB. Returns 1 when it ends with an
844 computed goto, returns 2 when it ends with a statement that
845 might return to this function via an nonlocal goto, otherwise
846 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
848 static int
849 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
851 gimple *last = *gsi_last_bb (bb);
852 bool fallthru = false;
853 int ret = 0;
855 if (!last)
856 return ret;
858 switch (gimple_code (last))
860 case GIMPLE_GOTO:
861 if (make_goto_expr_edges (bb))
862 ret = 1;
863 fallthru = false;
864 break;
865 case GIMPLE_RETURN:
867 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
868 e->goto_locus = gimple_location (last);
869 fallthru = false;
871 break;
872 case GIMPLE_COND:
873 make_cond_expr_edges (bb);
874 fallthru = false;
875 break;
876 case GIMPLE_SWITCH:
877 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
878 fallthru = false;
879 break;
880 case GIMPLE_RESX:
881 make_eh_edges (last);
882 fallthru = false;
883 break;
884 case GIMPLE_EH_DISPATCH:
885 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
886 break;
888 case GIMPLE_CALL:
889 /* If this function receives a nonlocal goto, then we need to
890 make edges from this call site to all the nonlocal goto
891 handlers. */
892 if (stmt_can_make_abnormal_goto (last))
893 ret = 2;
895 /* If this statement has reachable exception handlers, then
896 create abnormal edges to them. */
897 make_eh_edges (last);
899 /* BUILTIN_RETURN is really a return statement. */
900 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
902 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
903 fallthru = false;
905 /* Some calls are known not to return. */
906 else
907 fallthru = !gimple_call_noreturn_p (last);
908 break;
910 case GIMPLE_ASSIGN:
911 /* A GIMPLE_ASSIGN may throw internally and thus be considered
912 control-altering. */
913 if (is_ctrl_altering_stmt (last))
914 make_eh_edges (last);
915 fallthru = true;
916 break;
918 case GIMPLE_ASM:
919 make_gimple_asm_edges (bb);
920 fallthru = true;
921 break;
923 CASE_GIMPLE_OMP:
924 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
925 break;
927 case GIMPLE_TRANSACTION:
929 gtransaction *txn = as_a <gtransaction *> (last);
930 tree label1 = gimple_transaction_label_norm (txn);
931 tree label2 = gimple_transaction_label_uninst (txn);
933 if (label1)
934 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
935 if (label2)
936 make_edge (bb, label_to_block (cfun, label2),
937 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
939 tree label3 = gimple_transaction_label_over (txn);
940 if (gimple_transaction_subcode (txn)
941 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
942 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
944 fallthru = false;
946 break;
948 default:
949 gcc_assert (!stmt_ends_bb_p (last));
950 fallthru = true;
951 break;
954 if (fallthru)
955 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
957 return ret;
960 /* Join all the blocks in the flowgraph. */
962 static void
963 make_edges (void)
965 basic_block bb;
966 struct omp_region *cur_region = NULL;
967 auto_vec<basic_block> ab_edge_goto;
968 auto_vec<basic_block> ab_edge_call;
969 int cur_omp_region_idx = 0;
971 /* Create an edge from entry to the first block with executable
972 statements in it. */
973 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
974 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
975 EDGE_FALLTHRU);
977 /* Traverse the basic block array placing edges. */
978 FOR_EACH_BB_FN (bb, cfun)
980 int mer;
982 if (!bb_to_omp_idx.is_empty ())
983 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
985 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
986 if (mer == 1)
987 ab_edge_goto.safe_push (bb);
988 else if (mer == 2)
989 ab_edge_call.safe_push (bb);
991 if (cur_region && bb_to_omp_idx.is_empty ())
992 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
995 /* Computed gotos are hell to deal with, especially if there are
996 lots of them with a large number of destinations. So we factor
997 them to a common computed goto location before we build the
998 edge list. After we convert back to normal form, we will un-factor
999 the computed gotos since factoring introduces an unwanted jump.
1000 For non-local gotos and abnormal edges from calls to calls that return
1001 twice or forced labels, factor the abnormal edges too, by having all
1002 abnormal edges from the calls go to a common artificial basic block
1003 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1004 basic block to all forced labels and calls returning twice.
1005 We do this per-OpenMP structured block, because those regions
1006 are guaranteed to be single entry single exit by the standard,
1007 so it is not allowed to enter or exit such regions abnormally this way,
1008 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1009 must not transfer control across SESE region boundaries. */
1010 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1012 gimple_stmt_iterator gsi;
1013 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1014 basic_block *dispatcher_bbs = dispatcher_bb_array;
1015 int count = n_basic_blocks_for_fn (cfun);
1017 if (!bb_to_omp_idx.is_empty ())
1018 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1020 FOR_EACH_BB_FN (bb, cfun)
1022 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1024 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1025 tree target;
1027 if (!label_stmt)
1028 break;
1030 target = gimple_label_label (label_stmt);
1032 /* Make an edge to every label block that has been marked as a
1033 potential target for a computed goto or a non-local goto. */
1034 if (FORCED_LABEL (target))
1035 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1036 true);
1037 if (DECL_NONLOCAL (target))
1039 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1040 false);
1041 break;
1045 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1046 gsi_next_nondebug (&gsi);
1047 if (!gsi_end_p (gsi))
1049 /* Make an edge to every setjmp-like call. */
1050 gimple *call_stmt = gsi_stmt (gsi);
1051 if (is_gimple_call (call_stmt)
1052 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1053 || gimple_call_builtin_p (call_stmt,
1054 BUILT_IN_SETJMP_RECEIVER)))
1055 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1056 false);
1060 if (!bb_to_omp_idx.is_empty ())
1061 XDELETE (dispatcher_bbs);
1064 omp_free_regions ();
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1116 bb = bb->next_bb;
1118 return true;
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1126 static int
1127 next_discriminator_for_locus (int line)
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1152 expanded_location to;
1154 if (locus1 == locus2)
1155 return true;
1157 to = expand_location (locus2);
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1168 /* Assign a unique discriminator value to all statements in block bb that
1169 have the same line number as locus. */
1171 static void
1172 assign_discriminator (location_t locus, basic_block bb)
1174 gimple_stmt_iterator gsi;
1175 int discriminator;
1177 if (locus == UNKNOWN_LOCATION)
1178 return;
1180 expanded_location locus_e = expand_location (locus);
1182 discriminator = next_discriminator_for_locus (locus_e.line);
1184 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1186 gimple *stmt = gsi_stmt (gsi);
1187 location_t stmt_locus = gimple_location (stmt);
1188 if (same_line_p (locus, &locus_e, stmt_locus))
1189 gimple_set_location (stmt,
1190 location_with_discriminator (stmt_locus, discriminator));
1194 /* Assign discriminators to statement locations. */
1196 static void
1197 assign_discriminators (void)
1199 basic_block bb;
1201 FOR_EACH_BB_FN (bb, cfun)
1203 edge e;
1204 edge_iterator ei;
1205 gimple_stmt_iterator gsi;
1206 location_t curr_locus = UNKNOWN_LOCATION;
1207 expanded_location curr_locus_e = {};
1208 int curr_discr = 0;
1210 /* Traverse the basic block, if two function calls within a basic block
1211 are mapped to the same line, assign a new discriminator because a call
1212 stmt could be a split point of a basic block. */
1213 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1215 gimple *stmt = gsi_stmt (gsi);
1217 if (curr_locus == UNKNOWN_LOCATION)
1219 curr_locus = gimple_location (stmt);
1220 curr_locus_e = expand_location (curr_locus);
1222 else if (!same_line_p (curr_locus, &curr_locus_e, gimple_location (stmt)))
1224 curr_locus = gimple_location (stmt);
1225 curr_locus_e = expand_location (curr_locus);
1226 curr_discr = 0;
1228 else if (curr_discr != 0)
1230 location_t loc = gimple_location (stmt);
1231 location_t dloc = location_with_discriminator (loc, curr_discr);
1232 gimple_set_location (stmt, dloc);
1234 /* Allocate a new discriminator for CALL stmt. */
1235 if (gimple_code (stmt) == GIMPLE_CALL)
1236 curr_discr = next_discriminator_for_locus (curr_locus);
1239 gimple *last = last_nondebug_stmt (bb);
1240 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1241 if (locus == UNKNOWN_LOCATION)
1242 continue;
1244 expanded_location locus_e = expand_location (locus);
1246 FOR_EACH_EDGE (e, ei, bb->succs)
1248 gimple *first = first_non_label_stmt (e->dest);
1249 gimple *last = last_nondebug_stmt (e->dest);
1251 gimple *stmt_on_same_line = NULL;
1252 if (first && same_line_p (locus, &locus_e,
1253 gimple_location (first)))
1254 stmt_on_same_line = first;
1255 else if (last && same_line_p (locus, &locus_e,
1256 gimple_location (last)))
1257 stmt_on_same_line = last;
1259 if (stmt_on_same_line)
1261 if (has_discriminator (gimple_location (stmt_on_same_line))
1262 && !has_discriminator (locus))
1263 assign_discriminator (locus, bb);
1264 else
1265 assign_discriminator (locus, e->dest);
1271 /* Create the edges for a GIMPLE_COND starting at block BB. */
1273 static void
1274 make_cond_expr_edges (basic_block bb)
1276 gcond *entry = as_a <gcond *> (*gsi_last_bb (bb));
1277 gimple *then_stmt, *else_stmt;
1278 basic_block then_bb, else_bb;
1279 tree then_label, else_label;
1280 edge e;
1282 gcc_assert (entry);
1284 /* Entry basic blocks for each component. */
1285 then_label = gimple_cond_true_label (entry);
1286 else_label = gimple_cond_false_label (entry);
1287 then_bb = label_to_block (cfun, then_label);
1288 else_bb = label_to_block (cfun, else_label);
1289 then_stmt = first_stmt (then_bb);
1290 else_stmt = first_stmt (else_bb);
1292 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1293 e->goto_locus = gimple_location (then_stmt);
1294 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1295 if (e)
1296 e->goto_locus = gimple_location (else_stmt);
1298 /* We do not need the labels anymore. */
1299 gimple_cond_set_true_label (entry, NULL_TREE);
1300 gimple_cond_set_false_label (entry, NULL_TREE);
1304 /* Called for each element in the hash table (P) as we delete the
1305 edge to cases hash table.
1307 Clear all the CASE_CHAINs to prevent problems with copying of
1308 SWITCH_EXPRs and structure sharing rules, then free the hash table
1309 element. */
1311 bool
1312 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1314 tree t, next;
1316 for (t = value; t; t = next)
1318 next = CASE_CHAIN (t);
1319 CASE_CHAIN (t) = NULL;
1322 return true;
1325 /* Start recording information mapping edges to case labels. */
1327 void
1328 start_recording_case_labels (void)
1330 gcc_assert (edge_to_cases == NULL);
1331 edge_to_cases = new hash_map<edge, tree>;
1332 touched_switch_bbs = BITMAP_ALLOC (NULL);
1335 /* Return nonzero if we are recording information for case labels. */
1337 static bool
1338 recording_case_labels_p (void)
1340 return (edge_to_cases != NULL);
1343 /* Stop recording information mapping edges to case labels and
1344 remove any information we have recorded. */
1345 void
1346 end_recording_case_labels (void)
1348 bitmap_iterator bi;
1349 unsigned i;
1350 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1351 delete edge_to_cases;
1352 edge_to_cases = NULL;
1353 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1355 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1356 if (bb)
1358 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1359 group_case_labels_stmt (stmt);
1362 BITMAP_FREE (touched_switch_bbs);
1365 /* If we are inside a {start,end}_recording_cases block, then return
1366 a chain of CASE_LABEL_EXPRs from T which reference E.
1368 Otherwise return NULL. */
1370 tree
1371 get_cases_for_edge (edge e, gswitch *t)
1373 tree *slot;
1374 size_t i, n;
1376 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1377 chains available. Return NULL so the caller can detect this case. */
1378 if (!recording_case_labels_p ())
1379 return NULL;
1381 slot = edge_to_cases->get (e);
1382 if (slot)
1383 return *slot;
1385 /* If we did not find E in the hash table, then this must be the first
1386 time we have been queried for information about E & T. Add all the
1387 elements from T to the hash table then perform the query again. */
1389 n = gimple_switch_num_labels (t);
1390 for (i = 0; i < n; i++)
1392 tree elt = gimple_switch_label (t, i);
1393 tree lab = CASE_LABEL (elt);
1394 basic_block label_bb = label_to_block (cfun, lab);
1395 edge this_edge = find_edge (e->src, label_bb);
1397 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1398 a new chain. */
1399 tree &s = edge_to_cases->get_or_insert (this_edge);
1400 CASE_CHAIN (elt) = s;
1401 s = elt;
1404 return *edge_to_cases->get (e);
1407 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1409 static void
1410 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1412 size_t i, n;
1414 n = gimple_switch_num_labels (entry);
1416 for (i = 0; i < n; ++i)
1418 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1419 make_edge (bb, label_bb, 0);
1424 /* Return the basic block holding label DEST. */
1426 basic_block
1427 label_to_block (struct function *ifun, tree dest)
1429 int uid = LABEL_DECL_UID (dest);
1431 /* We would die hard when faced by an undefined label. Emit a label to
1432 the very first basic block. This will hopefully make even the dataflow
1433 and undefined variable warnings quite right. */
1434 if (seen_error () && uid < 0)
1436 gimple_stmt_iterator gsi =
1437 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1438 gimple *stmt;
1440 stmt = gimple_build_label (dest);
1441 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1442 uid = LABEL_DECL_UID (dest);
1444 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1445 return NULL;
1446 return (*ifun->cfg->x_label_to_block_map)[uid];
1449 /* Create edges for a goto statement at block BB. Returns true
1450 if abnormal edges should be created. */
1452 static bool
1453 make_goto_expr_edges (basic_block bb)
1455 gimple_stmt_iterator last = gsi_last_bb (bb);
1456 gimple *goto_t = gsi_stmt (last);
1458 /* A simple GOTO creates normal edges. */
1459 if (simple_goto_p (goto_t))
1461 tree dest = gimple_goto_dest (goto_t);
1462 basic_block label_bb = label_to_block (cfun, dest);
1463 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1464 e->goto_locus = gimple_location (goto_t);
1465 gsi_remove (&last, true);
1466 return false;
1469 /* A computed GOTO creates abnormal edges. */
1470 return true;
1473 /* Create edges for an asm statement with labels at block BB. */
1475 static void
1476 make_gimple_asm_edges (basic_block bb)
1478 gasm *stmt = as_a <gasm *> (*gsi_last_bb (bb));
1479 int i, n = gimple_asm_nlabels (stmt);
1481 for (i = 0; i < n; ++i)
1483 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1484 basic_block label_bb = label_to_block (cfun, label);
1485 make_edge (bb, label_bb, 0);
1489 /*---------------------------------------------------------------------------
1490 Flowgraph analysis
1491 ---------------------------------------------------------------------------*/
1493 /* Cleanup useless labels in basic blocks. This is something we wish
1494 to do early because it allows us to group case labels before creating
1495 the edges for the CFG, and it speeds up block statement iterators in
1496 all passes later on.
1497 We rerun this pass after CFG is created, to get rid of the labels that
1498 are no longer referenced. After then we do not run it any more, since
1499 (almost) no new labels should be created. */
1501 /* A map from basic block index to the leading label of that block. */
1502 struct label_record
1504 /* The label. */
1505 tree label;
1507 /* True if the label is referenced from somewhere. */
1508 bool used;
1511 /* Given LABEL return the first label in the same basic block. */
1513 static tree
1514 main_block_label (tree label, label_record *label_for_bb)
1516 basic_block bb = label_to_block (cfun, label);
1517 tree main_label = label_for_bb[bb->index].label;
1519 /* label_to_block possibly inserted undefined label into the chain. */
1520 if (!main_label)
1522 label_for_bb[bb->index].label = label;
1523 main_label = label;
1526 label_for_bb[bb->index].used = true;
1527 return main_label;
1530 /* Clean up redundant labels within the exception tree. */
1532 static void
1533 cleanup_dead_labels_eh (label_record *label_for_bb)
1535 eh_landing_pad lp;
1536 eh_region r;
1537 tree lab;
1538 int i;
1540 if (cfun->eh == NULL)
1541 return;
1543 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1544 if (lp && lp->post_landing_pad)
1546 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1547 if (lab != lp->post_landing_pad)
1549 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1550 lp->post_landing_pad = lab;
1551 EH_LANDING_PAD_NR (lab) = lp->index;
1555 FOR_ALL_EH_REGION (r)
1556 switch (r->type)
1558 case ERT_CLEANUP:
1559 case ERT_MUST_NOT_THROW:
1560 break;
1562 case ERT_TRY:
1564 eh_catch c;
1565 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1567 lab = c->label;
1568 if (lab)
1569 c->label = main_block_label (lab, label_for_bb);
1572 break;
1574 case ERT_ALLOWED_EXCEPTIONS:
1575 lab = r->u.allowed.label;
1576 if (lab)
1577 r->u.allowed.label = main_block_label (lab, label_for_bb);
1578 break;
1583 /* Cleanup redundant labels. This is a three-step process:
1584 1) Find the leading label for each block.
1585 2) Redirect all references to labels to the leading labels.
1586 3) Cleanup all useless labels. */
1588 void
1589 cleanup_dead_labels (void)
1591 basic_block bb;
1592 label_record *label_for_bb = XCNEWVEC (struct label_record,
1593 last_basic_block_for_fn (cfun));
1595 /* Find a suitable label for each block. We use the first user-defined
1596 label if there is one, or otherwise just the first label we see. */
1597 FOR_EACH_BB_FN (bb, cfun)
1599 gimple_stmt_iterator i;
1601 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1603 tree label;
1604 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1606 if (!label_stmt)
1607 break;
1609 label = gimple_label_label (label_stmt);
1611 /* If we have not yet seen a label for the current block,
1612 remember this one and see if there are more labels. */
1613 if (!label_for_bb[bb->index].label)
1615 label_for_bb[bb->index].label = label;
1616 continue;
1619 /* If we did see a label for the current block already, but it
1620 is an artificially created label, replace it if the current
1621 label is a user defined label. */
1622 if (!DECL_ARTIFICIAL (label)
1623 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1625 label_for_bb[bb->index].label = label;
1626 break;
1631 /* Now redirect all jumps/branches to the selected label.
1632 First do so for each block ending in a control statement. */
1633 FOR_EACH_BB_FN (bb, cfun)
1635 gimple *stmt = *gsi_last_bb (bb);
1636 tree label, new_label;
1638 if (!stmt)
1639 continue;
1641 switch (gimple_code (stmt))
1643 case GIMPLE_COND:
1645 gcond *cond_stmt = as_a <gcond *> (stmt);
1646 label = gimple_cond_true_label (cond_stmt);
1647 if (label)
1649 new_label = main_block_label (label, label_for_bb);
1650 if (new_label != label)
1651 gimple_cond_set_true_label (cond_stmt, new_label);
1654 label = gimple_cond_false_label (cond_stmt);
1655 if (label)
1657 new_label = main_block_label (label, label_for_bb);
1658 if (new_label != label)
1659 gimple_cond_set_false_label (cond_stmt, new_label);
1662 break;
1664 case GIMPLE_SWITCH:
1666 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1667 size_t i, n = gimple_switch_num_labels (switch_stmt);
1669 /* Replace all destination labels. */
1670 for (i = 0; i < n; ++i)
1672 tree case_label = gimple_switch_label (switch_stmt, i);
1673 label = CASE_LABEL (case_label);
1674 new_label = main_block_label (label, label_for_bb);
1675 if (new_label != label)
1676 CASE_LABEL (case_label) = new_label;
1678 break;
1681 case GIMPLE_ASM:
1683 gasm *asm_stmt = as_a <gasm *> (stmt);
1684 int i, n = gimple_asm_nlabels (asm_stmt);
1686 for (i = 0; i < n; ++i)
1688 tree cons = gimple_asm_label_op (asm_stmt, i);
1689 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1690 TREE_VALUE (cons) = label;
1692 break;
1695 /* We have to handle gotos until they're removed, and we don't
1696 remove them until after we've created the CFG edges. */
1697 case GIMPLE_GOTO:
1698 if (!computed_goto_p (stmt))
1700 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1701 label = gimple_goto_dest (goto_stmt);
1702 new_label = main_block_label (label, label_for_bb);
1703 if (new_label != label)
1704 gimple_goto_set_dest (goto_stmt, new_label);
1706 break;
1708 case GIMPLE_TRANSACTION:
1710 gtransaction *txn = as_a <gtransaction *> (stmt);
1712 label = gimple_transaction_label_norm (txn);
1713 if (label)
1715 new_label = main_block_label (label, label_for_bb);
1716 if (new_label != label)
1717 gimple_transaction_set_label_norm (txn, new_label);
1720 label = gimple_transaction_label_uninst (txn);
1721 if (label)
1723 new_label = main_block_label (label, label_for_bb);
1724 if (new_label != label)
1725 gimple_transaction_set_label_uninst (txn, new_label);
1728 label = gimple_transaction_label_over (txn);
1729 if (label)
1731 new_label = main_block_label (label, label_for_bb);
1732 if (new_label != label)
1733 gimple_transaction_set_label_over (txn, new_label);
1736 break;
1738 default:
1739 break;
1743 /* Do the same for the exception region tree labels. */
1744 cleanup_dead_labels_eh (label_for_bb);
1746 /* Finally, purge dead labels. All user-defined labels and labels that
1747 can be the target of non-local gotos and labels which have their
1748 address taken are preserved. */
1749 FOR_EACH_BB_FN (bb, cfun)
1751 gimple_stmt_iterator i;
1752 tree label_for_this_bb = label_for_bb[bb->index].label;
1754 if (!label_for_this_bb)
1755 continue;
1757 /* If the main label of the block is unused, we may still remove it. */
1758 if (!label_for_bb[bb->index].used)
1759 label_for_this_bb = NULL;
1761 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1763 tree label;
1764 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1766 if (!label_stmt)
1767 break;
1769 label = gimple_label_label (label_stmt);
1771 if (label == label_for_this_bb
1772 || !DECL_ARTIFICIAL (label)
1773 || DECL_NONLOCAL (label)
1774 || FORCED_LABEL (label))
1775 gsi_next (&i);
1776 else
1778 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1779 gsi_remove (&i, true);
1784 free (label_for_bb);
1787 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1788 the ones jumping to the same label.
1789 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1791 bool
1792 group_case_labels_stmt (gswitch *stmt)
1794 int old_size = gimple_switch_num_labels (stmt);
1795 int i, next_index, new_size;
1796 basic_block default_bb = NULL;
1797 hash_set<tree> *removed_labels = NULL;
1799 default_bb = gimple_switch_default_bb (cfun, stmt);
1801 /* Look for possible opportunities to merge cases. */
1802 new_size = i = 1;
1803 while (i < old_size)
1805 tree base_case, base_high;
1806 basic_block base_bb;
1808 base_case = gimple_switch_label (stmt, i);
1810 gcc_assert (base_case);
1811 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1813 /* Discard cases that have the same destination as the default case or
1814 whose destination blocks have already been removed as unreachable. */
1815 if (base_bb == NULL
1816 || base_bb == default_bb
1817 || (removed_labels
1818 && removed_labels->contains (CASE_LABEL (base_case))))
1820 i++;
1821 continue;
1824 base_high = CASE_HIGH (base_case)
1825 ? CASE_HIGH (base_case)
1826 : CASE_LOW (base_case);
1827 next_index = i + 1;
1829 /* Try to merge case labels. Break out when we reach the end
1830 of the label vector or when we cannot merge the next case
1831 label with the current one. */
1832 while (next_index < old_size)
1834 tree merge_case = gimple_switch_label (stmt, next_index);
1835 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1836 wide_int bhp1 = wi::to_wide (base_high) + 1;
1838 /* Merge the cases if they jump to the same place,
1839 and their ranges are consecutive. */
1840 if (merge_bb == base_bb
1841 && (removed_labels == NULL
1842 || !removed_labels->contains (CASE_LABEL (merge_case)))
1843 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1845 base_high
1846 = (CASE_HIGH (merge_case)
1847 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1848 CASE_HIGH (base_case) = base_high;
1849 next_index++;
1851 else
1852 break;
1855 /* Discard cases that have an unreachable destination block. */
1856 if (EDGE_COUNT (base_bb->succs) == 0
1857 && gimple_seq_unreachable_p (bb_seq (base_bb))
1858 /* Don't optimize this if __builtin_unreachable () is the
1859 implicitly added one by the C++ FE too early, before
1860 -Wreturn-type can be diagnosed. We'll optimize it later
1861 during switchconv pass or any other cfg cleanup. */
1862 && (gimple_in_ssa_p (cfun)
1863 || (LOCATION_LOCUS (gimple_location (last_nondebug_stmt (base_bb)))
1864 != BUILTINS_LOCATION)))
1866 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1867 if (base_edge != NULL)
1869 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1870 !gsi_end_p (gsi); gsi_next (&gsi))
1871 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1873 if (FORCED_LABEL (gimple_label_label (stmt))
1874 || DECL_NONLOCAL (gimple_label_label (stmt)))
1876 /* Forced/non-local labels aren't going to be removed,
1877 but they will be moved to some neighbouring basic
1878 block. If some later case label refers to one of
1879 those labels, we should throw that case away rather
1880 than keeping it around and refering to some random
1881 other basic block without an edge to it. */
1882 if (removed_labels == NULL)
1883 removed_labels = new hash_set<tree>;
1884 removed_labels->add (gimple_label_label (stmt));
1887 else
1888 break;
1889 remove_edge_and_dominated_blocks (base_edge);
1891 i = next_index;
1892 continue;
1895 if (new_size < i)
1896 gimple_switch_set_label (stmt, new_size,
1897 gimple_switch_label (stmt, i));
1898 i = next_index;
1899 new_size++;
1902 gcc_assert (new_size <= old_size);
1904 if (new_size < old_size)
1905 gimple_switch_set_num_labels (stmt, new_size);
1907 delete removed_labels;
1908 return new_size < old_size;
1911 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1912 and scan the sorted vector of cases. Combine the ones jumping to the
1913 same label. */
1915 bool
1916 group_case_labels (void)
1918 basic_block bb;
1919 bool changed = false;
1921 FOR_EACH_BB_FN (bb, cfun)
1923 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1924 changed |= group_case_labels_stmt (stmt);
1927 return changed;
1930 /* Checks whether we can merge block B into block A. */
1932 static bool
1933 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1935 gimple *stmt;
1937 if (!single_succ_p (a))
1938 return false;
1940 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1941 return false;
1943 if (single_succ (a) != b)
1944 return false;
1946 if (!single_pred_p (b))
1947 return false;
1949 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1950 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1951 return false;
1953 /* If A ends by a statement causing exceptions or something similar, we
1954 cannot merge the blocks. */
1955 stmt = *gsi_last_bb (a);
1956 if (stmt && stmt_ends_bb_p (stmt))
1957 return false;
1959 /* Examine the labels at the beginning of B. */
1960 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1961 gsi_next (&gsi))
1963 tree lab;
1964 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1965 if (!label_stmt)
1966 break;
1967 lab = gimple_label_label (label_stmt);
1969 /* Do not remove user forced labels or for -O0 any user labels. */
1970 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1971 return false;
1974 /* Protect simple loop latches. We only want to avoid merging
1975 the latch with the loop header or with a block in another
1976 loop in this case. */
1977 if (current_loops
1978 && b->loop_father->latch == b
1979 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1980 && (b->loop_father->header == a
1981 || b->loop_father != a->loop_father))
1982 return false;
1984 /* It must be possible to eliminate all phi nodes in B. If ssa form
1985 is not up-to-date and a name-mapping is registered, we cannot eliminate
1986 any phis. Symbols marked for renaming are never a problem though. */
1987 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1988 gsi_next (&gsi))
1990 gphi *phi = gsi.phi ();
1991 /* Technically only new names matter. */
1992 if (name_registered_for_update_p (PHI_RESULT (phi)))
1993 return false;
1996 /* When not optimizing, don't merge if we'd lose goto_locus. */
1997 if (!optimize
1998 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
2000 location_t goto_locus = single_succ_edge (a)->goto_locus;
2001 gimple_stmt_iterator prev, next;
2002 prev = gsi_last_nondebug_bb (a);
2003 next = gsi_after_labels (b);
2004 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
2005 gsi_next_nondebug (&next);
2006 if ((gsi_end_p (prev)
2007 || gimple_location (gsi_stmt (prev)) != goto_locus)
2008 && (gsi_end_p (next)
2009 || gimple_location (gsi_stmt (next)) != goto_locus))
2010 return false;
2013 return true;
2016 /* Replaces all uses of NAME by VAL. */
2018 void
2019 replace_uses_by (tree name, tree val)
2021 imm_use_iterator imm_iter;
2022 use_operand_p use;
2023 gimple *stmt;
2024 edge e;
2026 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2028 /* Mark the block if we change the last stmt in it. */
2029 if (cfgcleanup_altered_bbs
2030 && stmt_ends_bb_p (stmt))
2031 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2033 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2035 replace_exp (use, val);
2037 if (gimple_code (stmt) == GIMPLE_PHI)
2039 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2040 PHI_ARG_INDEX_FROM_USE (use));
2041 if (e->flags & EDGE_ABNORMAL
2042 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2044 /* This can only occur for virtual operands, since
2045 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2046 would prevent replacement. */
2047 gcc_checking_assert (virtual_operand_p (name));
2048 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2053 if (gimple_code (stmt) != GIMPLE_PHI)
2055 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2056 gimple *orig_stmt = stmt;
2057 size_t i;
2059 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2060 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2061 only change sth from non-invariant to invariant, and only
2062 when propagating constants. */
2063 if (is_gimple_min_invariant (val))
2064 for (i = 0; i < gimple_num_ops (stmt); i++)
2066 tree op = gimple_op (stmt, i);
2067 /* Operands may be empty here. For example, the labels
2068 of a GIMPLE_COND are nulled out following the creation
2069 of the corresponding CFG edges. */
2070 if (op && TREE_CODE (op) == ADDR_EXPR)
2071 recompute_tree_invariant_for_addr_expr (op);
2074 if (fold_stmt (&gsi))
2075 stmt = gsi_stmt (gsi);
2077 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2078 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2080 update_stmt (stmt);
2084 gcc_checking_assert (has_zero_uses (name));
2086 /* Also update the trees stored in loop structures. */
2087 if (current_loops)
2089 for (auto loop : loops_list (cfun, 0))
2090 substitute_in_loop_info (loop, name, val);
2094 /* Merge block B into block A. */
2096 static void
2097 gimple_merge_blocks (basic_block a, basic_block b)
2099 gimple_stmt_iterator last, gsi;
2100 gphi_iterator psi;
2102 if (dump_file)
2103 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2105 /* Remove all single-valued PHI nodes from block B of the form
2106 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2107 gsi = gsi_last_bb (a);
2108 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2110 gimple *phi = gsi_stmt (psi);
2111 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2112 gimple *copy;
2113 bool may_replace_uses = (virtual_operand_p (def)
2114 || may_propagate_copy (def, use));
2116 /* In case we maintain loop closed ssa form, do not propagate arguments
2117 of loop exit phi nodes. */
2118 if (current_loops
2119 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2120 && !virtual_operand_p (def)
2121 && TREE_CODE (use) == SSA_NAME
2122 && a->loop_father != b->loop_father)
2123 may_replace_uses = false;
2125 if (!may_replace_uses)
2127 gcc_assert (!virtual_operand_p (def));
2129 /* Note that just emitting the copies is fine -- there is no problem
2130 with ordering of phi nodes. This is because A is the single
2131 predecessor of B, therefore results of the phi nodes cannot
2132 appear as arguments of the phi nodes. */
2133 copy = gimple_build_assign (def, use);
2134 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2135 remove_phi_node (&psi, false);
2137 else
2139 /* If we deal with a PHI for virtual operands, we can simply
2140 propagate these without fussing with folding or updating
2141 the stmt. */
2142 if (virtual_operand_p (def))
2144 imm_use_iterator iter;
2145 use_operand_p use_p;
2146 gimple *stmt;
2148 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2149 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2150 SET_USE (use_p, use);
2152 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2153 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2155 else
2156 replace_uses_by (def, use);
2158 remove_phi_node (&psi, true);
2162 /* Ensure that B follows A. */
2163 move_block_after (b, a);
2165 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2166 gcc_assert (!*gsi_last_bb (a)
2167 || !stmt_ends_bb_p (*gsi_last_bb (a)));
2169 /* Remove labels from B and set gimple_bb to A for other statements. */
2170 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2172 gimple *stmt = gsi_stmt (gsi);
2173 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2175 tree label = gimple_label_label (label_stmt);
2176 int lp_nr;
2178 gsi_remove (&gsi, false);
2180 /* Now that we can thread computed gotos, we might have
2181 a situation where we have a forced label in block B
2182 However, the label at the start of block B might still be
2183 used in other ways (think about the runtime checking for
2184 Fortran assigned gotos). So we cannot just delete the
2185 label. Instead we move the label to the start of block A. */
2186 if (FORCED_LABEL (label))
2188 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2189 tree first_label = NULL_TREE;
2190 if (!gsi_end_p (dest_gsi))
2191 if (glabel *first_label_stmt
2192 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2193 first_label = gimple_label_label (first_label_stmt);
2194 if (first_label
2195 && (DECL_NONLOCAL (first_label)
2196 || EH_LANDING_PAD_NR (first_label) != 0))
2197 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2198 else
2199 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2201 /* Other user labels keep around in a form of a debug stmt. */
2202 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2204 gimple *dbg = gimple_build_debug_bind (label,
2205 integer_zero_node,
2206 stmt);
2207 gimple_debug_bind_reset_value (dbg);
2208 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2211 lp_nr = EH_LANDING_PAD_NR (label);
2212 if (lp_nr)
2214 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2215 lp->post_landing_pad = NULL;
2218 else
2220 gimple_set_bb (stmt, a);
2221 gsi_next (&gsi);
2225 /* When merging two BBs, if their counts are different, the larger count
2226 is selected as the new bb count. This is to handle inconsistent
2227 profiles. */
2228 if (a->loop_father == b->loop_father)
2230 a->count = a->count.merge (b->count);
2233 /* Merge the sequences. */
2234 last = gsi_last_bb (a);
2235 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2236 set_bb_seq (b, NULL);
2238 if (cfgcleanup_altered_bbs)
2239 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2243 /* Return the one of two successors of BB that is not reachable by a
2244 complex edge, if there is one. Else, return BB. We use
2245 this in optimizations that use post-dominators for their heuristics,
2246 to catch the cases in C++ where function calls are involved. */
2248 basic_block
2249 single_noncomplex_succ (basic_block bb)
2251 edge e0, e1;
2252 if (EDGE_COUNT (bb->succs) != 2)
2253 return bb;
2255 e0 = EDGE_SUCC (bb, 0);
2256 e1 = EDGE_SUCC (bb, 1);
2257 if (e0->flags & EDGE_COMPLEX)
2258 return e1->dest;
2259 if (e1->flags & EDGE_COMPLEX)
2260 return e0->dest;
2262 return bb;
2265 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2267 void
2268 notice_special_calls (gcall *call)
2270 int flags = gimple_call_flags (call);
2272 if (flags & ECF_MAY_BE_ALLOCA)
2273 cfun->calls_alloca = true;
2274 if (flags & ECF_RETURNS_TWICE)
2275 cfun->calls_setjmp = true;
2279 /* Clear flags set by notice_special_calls. Used by dead code removal
2280 to update the flags. */
2282 void
2283 clear_special_calls (void)
2285 cfun->calls_alloca = false;
2286 cfun->calls_setjmp = false;
2289 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2291 static void
2292 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2294 /* Since this block is no longer reachable, we can just delete all
2295 of its PHI nodes. */
2296 remove_phi_nodes (bb);
2298 /* Remove edges to BB's successors. */
2299 while (EDGE_COUNT (bb->succs) > 0)
2300 remove_edge (EDGE_SUCC (bb, 0));
2304 /* Remove statements of basic block BB. */
2306 static void
2307 remove_bb (basic_block bb)
2309 gimple_stmt_iterator i;
2311 if (dump_file)
2313 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2314 if (dump_flags & TDF_DETAILS)
2316 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2317 fprintf (dump_file, "\n");
2321 if (current_loops)
2323 class loop *loop = bb->loop_father;
2325 /* If a loop gets removed, clean up the information associated
2326 with it. */
2327 if (loop->latch == bb
2328 || loop->header == bb)
2329 free_numbers_of_iterations_estimates (loop);
2332 /* Remove all the instructions in the block. */
2333 if (bb_seq (bb) != NULL)
2335 /* Walk backwards so as to get a chance to substitute all
2336 released DEFs into debug stmts. See
2337 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2338 details. */
2339 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2341 gimple *stmt = gsi_stmt (i);
2342 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2343 if (label_stmt
2344 && (FORCED_LABEL (gimple_label_label (label_stmt))
2345 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2347 basic_block new_bb;
2348 gimple_stmt_iterator new_gsi;
2350 /* A non-reachable non-local label may still be referenced.
2351 But it no longer needs to carry the extra semantics of
2352 non-locality. */
2353 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2355 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2356 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2359 new_bb = bb->prev_bb;
2360 /* Don't move any labels into ENTRY block. */
2361 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2363 new_bb = single_succ (new_bb);
2364 gcc_assert (new_bb != bb);
2366 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2367 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2368 || (bb_to_omp_idx[bb->index]
2369 != bb_to_omp_idx[new_bb->index])))
2371 /* During cfg pass make sure to put orphaned labels
2372 into the right OMP region. */
2373 unsigned int i;
2374 int idx;
2375 new_bb = NULL;
2376 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2377 if (i >= NUM_FIXED_BLOCKS
2378 && idx == bb_to_omp_idx[bb->index]
2379 && i != (unsigned) bb->index)
2381 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2382 break;
2384 if (new_bb == NULL)
2386 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2387 gcc_assert (new_bb != bb);
2390 new_gsi = gsi_after_labels (new_bb);
2391 gsi_remove (&i, false);
2392 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2394 else
2396 /* Release SSA definitions. */
2397 release_defs (stmt);
2398 gsi_remove (&i, true);
2401 if (gsi_end_p (i))
2402 i = gsi_last_bb (bb);
2403 else
2404 gsi_prev (&i);
2408 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2409 bb_to_omp_idx[bb->index] = -1;
2410 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2411 bb->il.gimple.seq = NULL;
2412 bb->il.gimple.phi_nodes = NULL;
2416 /* Given a basic block BB and a value VAL for use in the final statement
2417 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2418 the edge that will be taken out of the block.
2419 If VAL is NULL_TREE, then the current value of the final statement's
2420 predicate or index is used.
2421 If the value does not match a unique edge, NULL is returned. */
2423 edge
2424 find_taken_edge (basic_block bb, tree val)
2426 gimple *stmt;
2428 stmt = *gsi_last_bb (bb);
2430 /* Handle ENTRY and EXIT. */
2431 if (!stmt)
2434 else if (gimple_code (stmt) == GIMPLE_COND)
2435 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2437 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2438 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2440 else if (computed_goto_p (stmt))
2442 /* Only optimize if the argument is a label, if the argument is
2443 not a label then we cannot construct a proper CFG.
2445 It may be the case that we only need to allow the LABEL_REF to
2446 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2447 appear inside a LABEL_EXPR just to be safe. */
2448 if (val
2449 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2450 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2451 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2454 /* Otherwise we only know the taken successor edge if it's unique. */
2455 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2458 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2459 statement, determine which of the outgoing edges will be taken out of the
2460 block. Return NULL if either edge may be taken. */
2462 static edge
2463 find_taken_edge_computed_goto (basic_block bb, tree val)
2465 basic_block dest;
2466 edge e = NULL;
2468 dest = label_to_block (cfun, val);
2469 if (dest)
2470 e = find_edge (bb, dest);
2472 /* It's possible for find_edge to return NULL here on invalid code
2473 that abuses the labels-as-values extension (e.g. code that attempts to
2474 jump *between* functions via stored labels-as-values; PR 84136).
2475 If so, then we simply return that NULL for the edge.
2476 We don't currently have a way of detecting such invalid code, so we
2477 can't assert that it was the case when a NULL edge occurs here. */
2479 return e;
2482 /* Given COND_STMT and a constant value VAL for use as the predicate,
2483 determine which of the two edges will be taken out of
2484 the statement's block. Return NULL if either edge may be taken.
2485 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2486 is used. */
2488 static edge
2489 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2491 edge true_edge, false_edge;
2493 if (val == NULL_TREE)
2495 /* Use the current value of the predicate. */
2496 if (gimple_cond_true_p (cond_stmt))
2497 val = integer_one_node;
2498 else if (gimple_cond_false_p (cond_stmt))
2499 val = integer_zero_node;
2500 else
2501 return NULL;
2503 else if (TREE_CODE (val) != INTEGER_CST)
2504 return NULL;
2506 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2507 &true_edge, &false_edge);
2509 return (integer_zerop (val) ? false_edge : true_edge);
2512 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2513 which edge will be taken out of the statement's block. Return NULL if any
2514 edge may be taken.
2515 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2516 is used. */
2518 edge
2519 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2521 basic_block dest_bb;
2522 edge e;
2523 tree taken_case;
2525 if (gimple_switch_num_labels (switch_stmt) == 1)
2526 taken_case = gimple_switch_default_label (switch_stmt);
2527 else
2529 if (val == NULL_TREE)
2530 val = gimple_switch_index (switch_stmt);
2531 if (TREE_CODE (val) != INTEGER_CST)
2532 return NULL;
2533 else
2534 taken_case = find_case_label_for_value (switch_stmt, val);
2536 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2538 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2539 gcc_assert (e);
2540 return e;
2544 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2545 We can make optimal use here of the fact that the case labels are
2546 sorted: We can do a binary search for a case matching VAL. */
2548 tree
2549 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2551 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2552 tree default_case = gimple_switch_default_label (switch_stmt);
2554 for (low = 0, high = n; high - low > 1; )
2556 size_t i = (high + low) / 2;
2557 tree t = gimple_switch_label (switch_stmt, i);
2558 int cmp;
2560 /* Cache the result of comparing CASE_LOW and val. */
2561 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2563 if (cmp > 0)
2564 high = i;
2565 else
2566 low = i;
2568 if (CASE_HIGH (t) == NULL)
2570 /* A singe-valued case label. */
2571 if (cmp == 0)
2572 return t;
2574 else
2576 /* A case range. We can only handle integer ranges. */
2577 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2578 return t;
2582 return default_case;
2586 /* Dump a basic block on stderr. */
2588 void
2589 gimple_debug_bb (basic_block bb)
2591 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2595 /* Dump basic block with index N on stderr. */
2597 basic_block
2598 gimple_debug_bb_n (int n)
2600 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2601 return BASIC_BLOCK_FOR_FN (cfun, n);
2605 /* Dump the CFG on stderr.
2607 FLAGS are the same used by the tree dumping functions
2608 (see TDF_* in dumpfile.h). */
2610 void
2611 gimple_debug_cfg (dump_flags_t flags)
2613 gimple_dump_cfg (stderr, flags);
2617 /* Dump the program showing basic block boundaries on the given FILE.
2619 FLAGS are the same used by the tree dumping functions (see TDF_* in
2620 tree.h). */
2622 void
2623 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2625 if (flags & TDF_DETAILS)
2627 dump_function_header (file, current_function_decl, flags);
2628 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2629 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2630 last_basic_block_for_fn (cfun));
2632 brief_dump_cfg (file, flags);
2633 fprintf (file, "\n");
2636 if (flags & TDF_STATS)
2637 dump_cfg_stats (file);
2639 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2643 /* Dump CFG statistics on FILE. */
2645 void
2646 dump_cfg_stats (FILE *file)
2648 static long max_num_merged_labels = 0;
2649 unsigned long size, total = 0;
2650 long num_edges;
2651 basic_block bb;
2652 const char * const fmt_str = "%-30s%-13s%12s\n";
2653 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2654 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2655 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2656 const char *funcname = current_function_name ();
2658 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2660 fprintf (file, "---------------------------------------------------------\n");
2661 fprintf (file, fmt_str, "", " Number of ", "Memory");
2662 fprintf (file, fmt_str, "", " instances ", "used ");
2663 fprintf (file, "---------------------------------------------------------\n");
2665 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2666 total += size;
2667 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2668 SIZE_AMOUNT (size));
2670 num_edges = 0;
2671 FOR_EACH_BB_FN (bb, cfun)
2672 num_edges += EDGE_COUNT (bb->succs);
2673 size = num_edges * sizeof (class edge_def);
2674 total += size;
2675 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2677 fprintf (file, "---------------------------------------------------------\n");
2678 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2679 SIZE_AMOUNT (total));
2680 fprintf (file, "---------------------------------------------------------\n");
2681 fprintf (file, "\n");
2683 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2684 max_num_merged_labels = cfg_stats.num_merged_labels;
2686 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2687 cfg_stats.num_merged_labels, max_num_merged_labels);
2689 fprintf (file, "\n");
2693 /* Dump CFG statistics on stderr. Keep extern so that it's always
2694 linked in the final executable. */
2696 DEBUG_FUNCTION void
2697 debug_cfg_stats (void)
2699 dump_cfg_stats (stderr);
2702 /*---------------------------------------------------------------------------
2703 Miscellaneous helpers
2704 ---------------------------------------------------------------------------*/
2706 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2707 flow. Transfers of control flow associated with EH are excluded. */
2709 static bool
2710 call_can_make_abnormal_goto (gimple *t)
2712 /* If the function has no non-local labels, then a call cannot make an
2713 abnormal transfer of control. */
2714 if (!cfun->has_nonlocal_label
2715 && !cfun->calls_setjmp)
2716 return false;
2718 /* Likewise if the call has no side effects. */
2719 if (!gimple_has_side_effects (t))
2720 return false;
2722 /* Likewise if the called function is leaf. */
2723 if (gimple_call_flags (t) & ECF_LEAF)
2724 return false;
2726 return true;
2730 /* Return true if T can make an abnormal transfer of control flow.
2731 Transfers of control flow associated with EH are excluded. */
2733 bool
2734 stmt_can_make_abnormal_goto (gimple *t)
2736 if (computed_goto_p (t))
2737 return true;
2738 if (is_gimple_call (t))
2739 return call_can_make_abnormal_goto (t);
2740 return false;
2744 /* Return true if T represents a stmt that always transfers control. */
2746 bool
2747 is_ctrl_stmt (gimple *t)
2749 switch (gimple_code (t))
2751 case GIMPLE_COND:
2752 case GIMPLE_SWITCH:
2753 case GIMPLE_GOTO:
2754 case GIMPLE_RETURN:
2755 case GIMPLE_RESX:
2756 return true;
2757 default:
2758 return false;
2763 /* Return true if T is a statement that may alter the flow of control
2764 (e.g., a call to a non-returning function). */
2766 bool
2767 is_ctrl_altering_stmt (gimple *t)
2769 gcc_assert (t);
2771 switch (gimple_code (t))
2773 case GIMPLE_CALL:
2774 /* Per stmt call flag indicates whether the call could alter
2775 controlflow. */
2776 if (gimple_call_ctrl_altering_p (t))
2777 return true;
2778 break;
2780 case GIMPLE_EH_DISPATCH:
2781 /* EH_DISPATCH branches to the individual catch handlers at
2782 this level of a try or allowed-exceptions region. It can
2783 fallthru to the next statement as well. */
2784 return true;
2786 case GIMPLE_ASM:
2787 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2788 return true;
2789 break;
2791 CASE_GIMPLE_OMP:
2792 /* OpenMP directives alter control flow. */
2793 return true;
2795 case GIMPLE_TRANSACTION:
2796 /* A transaction start alters control flow. */
2797 return true;
2799 default:
2800 break;
2803 /* If a statement can throw, it alters control flow. */
2804 return stmt_can_throw_internal (cfun, t);
2808 /* Return true if T is a simple local goto. */
2810 bool
2811 simple_goto_p (gimple *t)
2813 return (gimple_code (t) == GIMPLE_GOTO
2814 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2818 /* Return true if STMT should start a new basic block. PREV_STMT is
2819 the statement preceding STMT. It is used when STMT is a label or a
2820 case label. Labels should only start a new basic block if their
2821 previous statement wasn't a label. Otherwise, sequence of labels
2822 would generate unnecessary basic blocks that only contain a single
2823 label. */
2825 static inline bool
2826 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2828 if (stmt == NULL)
2829 return false;
2831 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2832 any nondebug stmts in the block. We don't want to start another
2833 block in this case: the debug stmt will already have started the
2834 one STMT would start if we weren't outputting debug stmts. */
2835 if (prev_stmt && is_gimple_debug (prev_stmt))
2836 return false;
2838 /* Labels start a new basic block only if the preceding statement
2839 wasn't a label of the same type. This prevents the creation of
2840 consecutive blocks that have nothing but a single label. */
2841 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2843 /* Nonlocal and computed GOTO targets always start a new block. */
2844 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2845 || FORCED_LABEL (gimple_label_label (label_stmt)))
2846 return true;
2848 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2850 if (DECL_NONLOCAL (gimple_label_label (plabel))
2851 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2852 return true;
2854 cfg_stats.num_merged_labels++;
2855 return false;
2857 else
2858 return true;
2860 else if (gimple_code (stmt) == GIMPLE_CALL)
2862 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2863 /* setjmp acts similar to a nonlocal GOTO target and thus should
2864 start a new block. */
2865 return true;
2866 if (gimple_call_internal_p (stmt, IFN_PHI)
2867 && prev_stmt
2868 && gimple_code (prev_stmt) != GIMPLE_LABEL
2869 && (gimple_code (prev_stmt) != GIMPLE_CALL
2870 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2871 /* PHI nodes start a new block unless preceeded by a label
2872 or another PHI. */
2873 return true;
2876 return false;
2880 /* Return true if T should end a basic block. */
2882 bool
2883 stmt_ends_bb_p (gimple *t)
2885 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2888 /* Remove block annotations and other data structures. */
2890 void
2891 delete_tree_cfg_annotations (struct function *fn)
2893 vec_free (label_to_block_map_for_fn (fn));
2896 /* Return the virtual phi in BB. */
2898 gphi *
2899 get_virtual_phi (basic_block bb)
2901 for (gphi_iterator gsi = gsi_start_phis (bb);
2902 !gsi_end_p (gsi);
2903 gsi_next (&gsi))
2905 gphi *phi = gsi.phi ();
2907 if (virtual_operand_p (PHI_RESULT (phi)))
2908 return phi;
2911 return NULL;
2914 /* Return the first statement in basic block BB. */
2916 gimple *
2917 first_stmt (basic_block bb)
2919 gimple_stmt_iterator i = gsi_start_bb (bb);
2920 gimple *stmt = NULL;
2922 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2924 gsi_next (&i);
2925 stmt = NULL;
2927 return stmt;
2930 /* Return the first non-label statement in basic block BB. */
2932 static gimple *
2933 first_non_label_stmt (basic_block bb)
2935 gimple_stmt_iterator i = gsi_start_bb (bb);
2936 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2937 gsi_next (&i);
2938 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2941 /* Return the last statement in basic block BB. */
2943 gimple *
2944 last_nondebug_stmt (basic_block bb)
2946 gimple_stmt_iterator i = gsi_last_bb (bb);
2947 gimple *stmt = NULL;
2949 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2951 gsi_prev (&i);
2952 stmt = NULL;
2954 return stmt;
2957 /* Return the last statement of an otherwise empty block. Return NULL
2958 if the block is totally empty, or if it contains more than one
2959 statement. */
2961 gimple *
2962 last_and_only_stmt (basic_block bb)
2964 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2965 gimple *last, *prev;
2967 if (gsi_end_p (i))
2968 return NULL;
2970 last = gsi_stmt (i);
2971 gsi_prev_nondebug (&i);
2972 if (gsi_end_p (i))
2973 return last;
2975 /* Empty statements should no longer appear in the instruction stream.
2976 Everything that might have appeared before should be deleted by
2977 remove_useless_stmts, and the optimizers should just gsi_remove
2978 instead of smashing with build_empty_stmt.
2980 Thus the only thing that should appear here in a block containing
2981 one executable statement is a label. */
2982 prev = gsi_stmt (i);
2983 if (gimple_code (prev) == GIMPLE_LABEL)
2984 return last;
2985 else
2986 return NULL;
2989 /* Returns the basic block after which the new basic block created
2990 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2991 near its "logical" location. This is of most help to humans looking
2992 at debugging dumps. */
2994 basic_block
2995 split_edge_bb_loc (edge edge_in)
2997 basic_block dest = edge_in->dest;
2998 basic_block dest_prev = dest->prev_bb;
3000 if (dest_prev)
3002 edge e = find_edge (dest_prev, dest);
3003 if (e && !(e->flags & EDGE_COMPLEX))
3004 return edge_in->src;
3006 return dest_prev;
3009 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3010 Abort on abnormal edges. */
3012 static basic_block
3013 gimple_split_edge (edge edge_in)
3015 basic_block new_bb, after_bb, dest;
3016 edge new_edge, e;
3018 /* Abnormal edges cannot be split. */
3019 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3021 dest = edge_in->dest;
3023 after_bb = split_edge_bb_loc (edge_in);
3025 new_bb = create_empty_bb (after_bb);
3026 new_bb->count = edge_in->count ();
3028 /* We want to avoid re-allocating PHIs when we first
3029 add the fallthru edge from new_bb to dest but we also
3030 want to avoid changing PHI argument order when
3031 first redirecting edge_in away from dest. The former
3032 avoids changing PHI argument order by adding them
3033 last and then the redirection swapping it back into
3034 place by means of unordered remove.
3035 So hack around things by temporarily removing all PHIs
3036 from the destination during the edge redirection and then
3037 making sure the edges stay in order. */
3038 gimple_seq saved_phis = phi_nodes (dest);
3039 unsigned old_dest_idx = edge_in->dest_idx;
3040 set_phi_nodes (dest, NULL);
3041 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3042 e = redirect_edge_and_branch (edge_in, new_bb);
3043 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
3044 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3045 dest->il.gimple.phi_nodes = saved_phis;
3047 return new_bb;
3051 /* Verify properties of the address expression T whose base should be
3052 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3054 static bool
3055 verify_address (tree t, bool verify_addressable)
3057 bool old_constant;
3058 bool old_side_effects;
3059 bool new_constant;
3060 bool new_side_effects;
3062 old_constant = TREE_CONSTANT (t);
3063 old_side_effects = TREE_SIDE_EFFECTS (t);
3065 recompute_tree_invariant_for_addr_expr (t);
3066 new_side_effects = TREE_SIDE_EFFECTS (t);
3067 new_constant = TREE_CONSTANT (t);
3069 if (old_constant != new_constant)
3071 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3072 return true;
3074 if (old_side_effects != new_side_effects)
3076 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3077 return true;
3080 tree base = TREE_OPERAND (t, 0);
3081 while (handled_component_p (base))
3082 base = TREE_OPERAND (base, 0);
3084 if (!(VAR_P (base)
3085 || TREE_CODE (base) == PARM_DECL
3086 || TREE_CODE (base) == RESULT_DECL))
3087 return false;
3089 if (verify_addressable && !TREE_ADDRESSABLE (base))
3091 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3092 return true;
3095 return false;
3099 /* Verify if EXPR is a valid GIMPLE reference expression. If
3100 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3101 if there is an error, otherwise false. */
3103 static bool
3104 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3106 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3108 if (TREE_CODE (expr) == REALPART_EXPR
3109 || TREE_CODE (expr) == IMAGPART_EXPR
3110 || TREE_CODE (expr) == BIT_FIELD_REF
3111 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3113 tree op = TREE_OPERAND (expr, 0);
3114 if (TREE_CODE (expr) != VIEW_CONVERT_EXPR
3115 && !is_gimple_reg_type (TREE_TYPE (expr)))
3117 error ("non-scalar %qs", code_name);
3118 return true;
3121 if (TREE_CODE (expr) == BIT_FIELD_REF)
3123 tree t1 = TREE_OPERAND (expr, 1);
3124 tree t2 = TREE_OPERAND (expr, 2);
3125 poly_uint64 size, bitpos;
3126 if (!poly_int_tree_p (t1, &size)
3127 || !poly_int_tree_p (t2, &bitpos)
3128 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3129 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3131 error ("invalid position or size operand to %qs", code_name);
3132 return true;
3134 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3135 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3137 error ("integral result type precision does not match "
3138 "field size of %qs", code_name);
3139 return true;
3141 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3142 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3143 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3144 size))
3146 error ("mode size of non-integral result does not "
3147 "match field size of %qs",
3148 code_name);
3149 return true;
3151 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3152 && !type_has_mode_precision_p (TREE_TYPE (op)))
3154 error ("%qs of non-mode-precision operand", code_name);
3155 return true;
3157 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3158 && maybe_gt (size + bitpos,
3159 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3161 error ("position plus size exceeds size of referenced object in "
3162 "%qs", code_name);
3163 return true;
3167 if ((TREE_CODE (expr) == REALPART_EXPR
3168 || TREE_CODE (expr) == IMAGPART_EXPR)
3169 && !useless_type_conversion_p (TREE_TYPE (expr),
3170 TREE_TYPE (TREE_TYPE (op))))
3172 error ("type mismatch in %qs reference", code_name);
3173 debug_generic_stmt (TREE_TYPE (expr));
3174 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3175 return true;
3178 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3180 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3181 that their operand is not a register an invariant when
3182 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3183 bug). Otherwise there is nothing to verify, gross mismatches at
3184 most invoke undefined behavior. */
3185 if (require_lvalue
3186 && (is_gimple_reg (op) || is_gimple_min_invariant (op)))
3188 error ("conversion of %qs on the left hand side of %qs",
3189 get_tree_code_name (TREE_CODE (op)), code_name);
3190 debug_generic_stmt (expr);
3191 return true;
3193 else if (is_gimple_reg (op)
3194 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3196 error ("conversion of register to a different size in %qs",
3197 code_name);
3198 debug_generic_stmt (expr);
3199 return true;
3203 expr = op;
3206 bool require_non_reg = false;
3207 while (handled_component_p (expr))
3209 require_non_reg = true;
3210 code_name = get_tree_code_name (TREE_CODE (expr));
3212 if (TREE_CODE (expr) == REALPART_EXPR
3213 || TREE_CODE (expr) == IMAGPART_EXPR
3214 || TREE_CODE (expr) == BIT_FIELD_REF)
3216 error ("non-top-level %qs", code_name);
3217 return true;
3220 tree op = TREE_OPERAND (expr, 0);
3222 if (TREE_CODE (expr) == ARRAY_REF
3223 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3225 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3226 || (TREE_OPERAND (expr, 2)
3227 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3228 || (TREE_OPERAND (expr, 3)
3229 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3231 error ("invalid operands to %qs", code_name);
3232 debug_generic_stmt (expr);
3233 return true;
3237 /* Verify if the reference array element types are compatible. */
3238 if (TREE_CODE (expr) == ARRAY_REF
3239 && !useless_type_conversion_p (TREE_TYPE (expr),
3240 TREE_TYPE (TREE_TYPE (op))))
3242 error ("type mismatch in %qs", code_name);
3243 debug_generic_stmt (TREE_TYPE (expr));
3244 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3245 return true;
3247 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3248 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3249 TREE_TYPE (TREE_TYPE (op))))
3251 error ("type mismatch in %qs", code_name);
3252 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3253 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3254 return true;
3257 if (TREE_CODE (expr) == COMPONENT_REF)
3259 if (TREE_OPERAND (expr, 2)
3260 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3262 error ("invalid %qs offset operator", code_name);
3263 return true;
3265 if (!useless_type_conversion_p (TREE_TYPE (expr),
3266 TREE_TYPE (TREE_OPERAND (expr, 1))))
3268 error ("type mismatch in %qs", code_name);
3269 debug_generic_stmt (TREE_TYPE (expr));
3270 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3271 return true;
3275 expr = op;
3278 code_name = get_tree_code_name (TREE_CODE (expr));
3280 if (TREE_CODE (expr) == MEM_REF)
3282 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3283 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3284 && verify_address (TREE_OPERAND (expr, 0), false)))
3286 error ("invalid address operand in %qs", code_name);
3287 debug_generic_stmt (expr);
3288 return true;
3290 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3291 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3293 error ("invalid offset operand in %qs", code_name);
3294 debug_generic_stmt (expr);
3295 return true;
3297 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3298 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3300 error ("invalid clique in %qs", code_name);
3301 debug_generic_stmt (expr);
3302 return true;
3305 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3307 if (!TMR_BASE (expr)
3308 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3309 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3310 && verify_address (TMR_BASE (expr), false)))
3312 error ("invalid address operand in %qs", code_name);
3313 return true;
3315 if (!TMR_OFFSET (expr)
3316 || !poly_int_tree_p (TMR_OFFSET (expr))
3317 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3319 error ("invalid offset operand in %qs", code_name);
3320 debug_generic_stmt (expr);
3321 return true;
3323 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3324 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3326 error ("invalid clique in %qs", code_name);
3327 debug_generic_stmt (expr);
3328 return true;
3331 else if (INDIRECT_REF_P (expr))
3333 error ("%qs in gimple IL", code_name);
3334 debug_generic_stmt (expr);
3335 return true;
3337 else if (require_non_reg
3338 && (is_gimple_reg (expr)
3339 || (is_gimple_min_invariant (expr)
3340 /* STRING_CSTs are representatives of the string table
3341 entry which lives in memory. */
3342 && TREE_CODE (expr) != STRING_CST)))
3344 error ("%qs as base where non-register is required", code_name);
3345 debug_generic_stmt (expr);
3346 return true;
3349 if (!require_lvalue
3350 && (is_gimple_reg (expr) || is_gimple_min_invariant (expr)))
3351 return false;
3353 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3354 return false;
3356 if (TREE_CODE (expr) != TARGET_MEM_REF
3357 && TREE_CODE (expr) != MEM_REF)
3359 error ("invalid expression for min lvalue");
3360 return true;
3363 return false;
3366 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3367 list of pointer-to types that is trivially convertible to DEST. */
3369 static bool
3370 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3372 tree src;
3374 if (!TYPE_POINTER_TO (src_obj))
3375 return true;
3377 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3378 if (useless_type_conversion_p (dest, src))
3379 return true;
3381 return false;
3384 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3385 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3387 static bool
3388 valid_fixed_convert_types_p (tree type1, tree type2)
3390 return (FIXED_POINT_TYPE_P (type1)
3391 && (INTEGRAL_TYPE_P (type2)
3392 || SCALAR_FLOAT_TYPE_P (type2)
3393 || FIXED_POINT_TYPE_P (type2)));
3396 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3397 is a problem, otherwise false. */
3399 static bool
3400 verify_gimple_call (gcall *stmt)
3402 tree fn = gimple_call_fn (stmt);
3403 tree fntype, fndecl;
3404 unsigned i;
3406 if (gimple_call_internal_p (stmt))
3408 if (fn)
3410 error ("gimple call has two targets");
3411 debug_generic_stmt (fn);
3412 return true;
3415 else
3417 if (!fn)
3419 error ("gimple call has no target");
3420 return true;
3424 if (fn && !is_gimple_call_addr (fn))
3426 error ("invalid function in gimple call");
3427 debug_generic_stmt (fn);
3428 return true;
3431 if (fn
3432 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3433 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3434 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3436 error ("non-function in gimple call");
3437 return true;
3440 fndecl = gimple_call_fndecl (stmt);
3441 if (fndecl
3442 && TREE_CODE (fndecl) == FUNCTION_DECL
3443 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3444 && !DECL_PURE_P (fndecl)
3445 && !TREE_READONLY (fndecl))
3447 error ("invalid pure const state for function");
3448 return true;
3451 tree lhs = gimple_call_lhs (stmt);
3452 if (lhs
3453 && (!is_gimple_reg (lhs)
3454 && (!is_gimple_lvalue (lhs)
3455 || verify_types_in_gimple_reference
3456 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3457 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3459 error ("invalid LHS in gimple call");
3460 return true;
3463 if (gimple_call_ctrl_altering_p (stmt)
3464 && gimple_call_noreturn_p (stmt)
3465 && should_remove_lhs_p (lhs))
3467 error ("LHS in %<noreturn%> call");
3468 return true;
3471 fntype = gimple_call_fntype (stmt);
3472 if (fntype
3473 && lhs
3474 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3475 /* ??? At least C++ misses conversions at assignments from
3476 void * call results.
3477 For now simply allow arbitrary pointer type conversions. */
3478 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3479 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3481 error ("invalid conversion in gimple call");
3482 debug_generic_stmt (TREE_TYPE (lhs));
3483 debug_generic_stmt (TREE_TYPE (fntype));
3484 return true;
3487 if (gimple_call_chain (stmt)
3488 && !is_gimple_val (gimple_call_chain (stmt)))
3490 error ("invalid static chain in gimple call");
3491 debug_generic_stmt (gimple_call_chain (stmt));
3492 return true;
3495 /* If there is a static chain argument, the call should either be
3496 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3497 if (gimple_call_chain (stmt)
3498 && fndecl
3499 && !DECL_STATIC_CHAIN (fndecl))
3501 error ("static chain with function that doesn%'t use one");
3502 return true;
3505 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3507 switch (DECL_FUNCTION_CODE (fndecl))
3509 case BUILT_IN_UNREACHABLE:
3510 case BUILT_IN_UNREACHABLE_TRAP:
3511 case BUILT_IN_TRAP:
3512 if (gimple_call_num_args (stmt) > 0)
3514 /* Built-in unreachable with parameters might not be caught by
3515 undefined behavior sanitizer. Front-ends do check users do not
3516 call them that way but we also produce calls to
3517 __builtin_unreachable internally, for example when IPA figures
3518 out a call cannot happen in a legal program. In such cases,
3519 we must make sure arguments are stripped off. */
3520 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3521 "with arguments");
3522 return true;
3524 break;
3525 default:
3526 break;
3530 /* For a call to .DEFERRED_INIT,
3531 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3532 we should guarantee that when the 1st argument is a constant, it should
3533 be the same as the size of the LHS. */
3535 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3537 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3538 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3540 if (TREE_CODE (lhs) == SSA_NAME)
3541 lhs = SSA_NAME_VAR (lhs);
3543 poly_uint64 size_from_arg0, size_from_lhs;
3544 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3545 &size_from_arg0);
3546 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3547 &size_from_lhs);
3548 if (is_constant_size_arg0 && is_constant_size_lhs)
3549 if (maybe_ne (size_from_arg0, size_from_lhs))
3551 error ("%<DEFERRED_INIT%> calls should have same "
3552 "constant size for the first argument and LHS");
3553 return true;
3557 /* ??? The C frontend passes unpromoted arguments in case it
3558 didn't see a function declaration before the call. So for now
3559 leave the call arguments mostly unverified. Once we gimplify
3560 unit-at-a-time we have a chance to fix this. */
3561 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3563 tree arg = gimple_call_arg (stmt, i);
3564 if ((is_gimple_reg_type (TREE_TYPE (arg))
3565 && !is_gimple_val (arg))
3566 || (!is_gimple_reg_type (TREE_TYPE (arg))
3567 && !is_gimple_lvalue (arg)))
3569 error ("invalid argument to gimple call");
3570 debug_generic_expr (arg);
3571 return true;
3573 if (!is_gimple_reg (arg))
3575 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3576 arg = TREE_OPERAND (arg, 0);
3577 if (verify_types_in_gimple_reference (arg, false))
3578 return true;
3582 return false;
3585 /* Verifies the gimple comparison with the result type TYPE and
3586 the operands OP0 and OP1, comparison code is CODE. */
3588 static bool
3589 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3591 tree op0_type = TREE_TYPE (op0);
3592 tree op1_type = TREE_TYPE (op1);
3594 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3596 error ("invalid operands in gimple comparison");
3597 return true;
3600 /* For comparisons we do not have the operations type as the
3601 effective type the comparison is carried out in. Instead
3602 we require that either the first operand is trivially
3603 convertible into the second, or the other way around. */
3604 if (!useless_type_conversion_p (op0_type, op1_type)
3605 && !useless_type_conversion_p (op1_type, op0_type))
3607 error ("mismatching comparison operand types");
3608 debug_generic_expr (op0_type);
3609 debug_generic_expr (op1_type);
3610 return true;
3613 /* The resulting type of a comparison may be an effective boolean type. */
3614 if (INTEGRAL_TYPE_P (type)
3615 && (TREE_CODE (type) == BOOLEAN_TYPE
3616 || TYPE_PRECISION (type) == 1))
3618 if ((VECTOR_TYPE_P (op0_type)
3619 || VECTOR_TYPE_P (op1_type))
3620 && code != EQ_EXPR && code != NE_EXPR
3621 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3622 && !VECTOR_INTEGER_TYPE_P (op0_type))
3624 error ("unsupported operation or type for vector comparison"
3625 " returning a boolean");
3626 debug_generic_expr (op0_type);
3627 debug_generic_expr (op1_type);
3628 return true;
3631 /* Or a boolean vector type with the same element count
3632 as the comparison operand types. */
3633 else if (VECTOR_TYPE_P (type)
3634 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3636 if (TREE_CODE (op0_type) != VECTOR_TYPE
3637 || TREE_CODE (op1_type) != VECTOR_TYPE)
3639 error ("non-vector operands in vector comparison");
3640 debug_generic_expr (op0_type);
3641 debug_generic_expr (op1_type);
3642 return true;
3645 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3646 TYPE_VECTOR_SUBPARTS (op0_type)))
3648 error ("invalid vector comparison resulting type");
3649 debug_generic_expr (type);
3650 return true;
3653 else
3655 error ("bogus comparison result type");
3656 debug_generic_expr (type);
3657 return true;
3660 return false;
3663 /* Verify a gimple assignment statement STMT with an unary rhs.
3664 Returns true if anything is wrong. */
3666 static bool
3667 verify_gimple_assign_unary (gassign *stmt)
3669 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3670 tree lhs = gimple_assign_lhs (stmt);
3671 tree lhs_type = TREE_TYPE (lhs);
3672 tree rhs1 = gimple_assign_rhs1 (stmt);
3673 tree rhs1_type = TREE_TYPE (rhs1);
3675 if (!is_gimple_reg (lhs))
3677 error ("non-register as LHS of unary operation");
3678 return true;
3681 if (!is_gimple_val (rhs1))
3683 error ("invalid operand in unary operation");
3684 return true;
3687 const char* const code_name = get_tree_code_name (rhs_code);
3689 /* First handle conversions. */
3690 switch (rhs_code)
3692 CASE_CONVERT:
3694 /* Allow conversions between vectors with the same number of elements,
3695 provided that the conversion is OK for the element types too. */
3696 if (VECTOR_TYPE_P (lhs_type)
3697 && VECTOR_TYPE_P (rhs1_type)
3698 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3699 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3701 lhs_type = TREE_TYPE (lhs_type);
3702 rhs1_type = TREE_TYPE (rhs1_type);
3704 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3706 error ("invalid vector types in nop conversion");
3707 debug_generic_expr (lhs_type);
3708 debug_generic_expr (rhs1_type);
3709 return true;
3712 /* Allow conversions from pointer type to integral type only if
3713 there is no sign or zero extension involved.
3714 For targets were the precision of ptrofftype doesn't match that
3715 of pointers we allow conversions to types where
3716 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3717 if ((POINTER_TYPE_P (lhs_type)
3718 && INTEGRAL_TYPE_P (rhs1_type))
3719 || (POINTER_TYPE_P (rhs1_type)
3720 && INTEGRAL_TYPE_P (lhs_type)
3721 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3722 #if defined(POINTERS_EXTEND_UNSIGNED)
3723 || (TYPE_MODE (rhs1_type) == ptr_mode
3724 && (TYPE_PRECISION (lhs_type)
3725 == BITS_PER_WORD /* word_mode */
3726 || (TYPE_PRECISION (lhs_type)
3727 == GET_MODE_PRECISION (Pmode))))
3728 #endif
3730 return false;
3732 /* Allow conversion from integral to offset type and vice versa. */
3733 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3734 && INTEGRAL_TYPE_P (rhs1_type))
3735 || (INTEGRAL_TYPE_P (lhs_type)
3736 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3737 return false;
3739 /* Otherwise assert we are converting between types of the
3740 same kind. */
3741 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3743 error ("invalid types in nop conversion");
3744 debug_generic_expr (lhs_type);
3745 debug_generic_expr (rhs1_type);
3746 return true;
3749 return false;
3752 case ADDR_SPACE_CONVERT_EXPR:
3754 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3755 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3756 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3758 error ("invalid types in address space conversion");
3759 debug_generic_expr (lhs_type);
3760 debug_generic_expr (rhs1_type);
3761 return true;
3764 return false;
3767 case FIXED_CONVERT_EXPR:
3769 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3770 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3772 error ("invalid types in fixed-point conversion");
3773 debug_generic_expr (lhs_type);
3774 debug_generic_expr (rhs1_type);
3775 return true;
3778 return false;
3781 case FLOAT_EXPR:
3783 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3784 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3785 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3787 error ("invalid types in conversion to floating-point");
3788 debug_generic_expr (lhs_type);
3789 debug_generic_expr (rhs1_type);
3790 return true;
3793 return false;
3796 case FIX_TRUNC_EXPR:
3798 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3799 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3800 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3802 error ("invalid types in conversion to integer");
3803 debug_generic_expr (lhs_type);
3804 debug_generic_expr (rhs1_type);
3805 return true;
3808 return false;
3811 case VEC_UNPACK_HI_EXPR:
3812 case VEC_UNPACK_LO_EXPR:
3813 case VEC_UNPACK_FLOAT_HI_EXPR:
3814 case VEC_UNPACK_FLOAT_LO_EXPR:
3815 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3816 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3817 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3818 || TREE_CODE (lhs_type) != VECTOR_TYPE
3819 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3820 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3821 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3822 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3823 || ((rhs_code == VEC_UNPACK_HI_EXPR
3824 || rhs_code == VEC_UNPACK_LO_EXPR)
3825 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3826 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3827 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3828 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3829 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3830 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3831 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3832 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3833 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3834 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3835 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3836 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3837 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3838 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3839 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3840 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3842 error ("type mismatch in %qs expression", code_name);
3843 debug_generic_expr (lhs_type);
3844 debug_generic_expr (rhs1_type);
3845 return true;
3848 return false;
3850 case NEGATE_EXPR:
3851 case ABS_EXPR:
3852 case BIT_NOT_EXPR:
3853 case PAREN_EXPR:
3854 case CONJ_EXPR:
3855 /* Disallow pointer and offset types for many of the unary gimple. */
3856 if (POINTER_TYPE_P (lhs_type)
3857 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3859 error ("invalid types for %qs", code_name);
3860 debug_generic_expr (lhs_type);
3861 debug_generic_expr (rhs1_type);
3862 return true;
3864 break;
3866 case ABSU_EXPR:
3867 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3868 || !TYPE_UNSIGNED (lhs_type)
3869 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3870 || TYPE_UNSIGNED (rhs1_type)
3871 || element_precision (lhs_type) != element_precision (rhs1_type))
3873 error ("invalid types for %qs", code_name);
3874 debug_generic_expr (lhs_type);
3875 debug_generic_expr (rhs1_type);
3876 return true;
3878 return false;
3880 case VEC_DUPLICATE_EXPR:
3881 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3882 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3884 error ("%qs should be from a scalar to a like vector", code_name);
3885 debug_generic_expr (lhs_type);
3886 debug_generic_expr (rhs1_type);
3887 return true;
3889 return false;
3891 default:
3892 gcc_unreachable ();
3895 /* For the remaining codes assert there is no conversion involved. */
3896 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3898 error ("non-trivial conversion in unary operation");
3899 debug_generic_expr (lhs_type);
3900 debug_generic_expr (rhs1_type);
3901 return true;
3904 return false;
3907 /* Verify a gimple assignment statement STMT with a binary rhs.
3908 Returns true if anything is wrong. */
3910 static bool
3911 verify_gimple_assign_binary (gassign *stmt)
3913 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3914 tree lhs = gimple_assign_lhs (stmt);
3915 tree lhs_type = TREE_TYPE (lhs);
3916 tree rhs1 = gimple_assign_rhs1 (stmt);
3917 tree rhs1_type = TREE_TYPE (rhs1);
3918 tree rhs2 = gimple_assign_rhs2 (stmt);
3919 tree rhs2_type = TREE_TYPE (rhs2);
3921 if (!is_gimple_reg (lhs))
3923 error ("non-register as LHS of binary operation");
3924 return true;
3927 if (!is_gimple_val (rhs1)
3928 || !is_gimple_val (rhs2))
3930 error ("invalid operands in binary operation");
3931 return true;
3934 const char* const code_name = get_tree_code_name (rhs_code);
3936 /* First handle operations that involve different types. */
3937 switch (rhs_code)
3939 case COMPLEX_EXPR:
3941 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3942 || !(INTEGRAL_TYPE_P (rhs1_type)
3943 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3944 || !(INTEGRAL_TYPE_P (rhs2_type)
3945 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3947 error ("type mismatch in %qs", code_name);
3948 debug_generic_expr (lhs_type);
3949 debug_generic_expr (rhs1_type);
3950 debug_generic_expr (rhs2_type);
3951 return true;
3954 return false;
3957 case LSHIFT_EXPR:
3958 case RSHIFT_EXPR:
3959 case LROTATE_EXPR:
3960 case RROTATE_EXPR:
3962 /* Shifts and rotates are ok on integral types, fixed point
3963 types and integer vector types. */
3964 if ((!INTEGRAL_TYPE_P (rhs1_type)
3965 && !FIXED_POINT_TYPE_P (rhs1_type)
3966 && ! (VECTOR_TYPE_P (rhs1_type)
3967 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3968 || (!INTEGRAL_TYPE_P (rhs2_type)
3969 /* Vector shifts of vectors are also ok. */
3970 && ! (VECTOR_TYPE_P (rhs1_type)
3971 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3972 && VECTOR_TYPE_P (rhs2_type)
3973 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3974 || !useless_type_conversion_p (lhs_type, rhs1_type))
3976 error ("type mismatch in %qs", code_name);
3977 debug_generic_expr (lhs_type);
3978 debug_generic_expr (rhs1_type);
3979 debug_generic_expr (rhs2_type);
3980 return true;
3983 return false;
3986 case WIDEN_LSHIFT_EXPR:
3988 if (!INTEGRAL_TYPE_P (lhs_type)
3989 || !INTEGRAL_TYPE_P (rhs1_type)
3990 || TREE_CODE (rhs2) != INTEGER_CST
3991 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3993 error ("type mismatch in %qs", code_name);
3994 debug_generic_expr (lhs_type);
3995 debug_generic_expr (rhs1_type);
3996 debug_generic_expr (rhs2_type);
3997 return true;
4000 return false;
4003 case VEC_WIDEN_LSHIFT_HI_EXPR:
4004 case VEC_WIDEN_LSHIFT_LO_EXPR:
4006 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4007 || TREE_CODE (lhs_type) != VECTOR_TYPE
4008 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4009 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4010 || TREE_CODE (rhs2) != INTEGER_CST
4011 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4012 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4014 error ("type mismatch in %qs", code_name);
4015 debug_generic_expr (lhs_type);
4016 debug_generic_expr (rhs1_type);
4017 debug_generic_expr (rhs2_type);
4018 return true;
4021 return false;
4024 case WIDEN_PLUS_EXPR:
4025 case WIDEN_MINUS_EXPR:
4026 case PLUS_EXPR:
4027 case MINUS_EXPR:
4029 tree lhs_etype = lhs_type;
4030 tree rhs1_etype = rhs1_type;
4031 tree rhs2_etype = rhs2_type;
4032 if (VECTOR_TYPE_P (lhs_type))
4034 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4035 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4037 error ("invalid non-vector operands to %qs", code_name);
4038 return true;
4040 lhs_etype = TREE_TYPE (lhs_type);
4041 rhs1_etype = TREE_TYPE (rhs1_type);
4042 rhs2_etype = TREE_TYPE (rhs2_type);
4044 if (POINTER_TYPE_P (lhs_etype)
4045 || POINTER_TYPE_P (rhs1_etype)
4046 || POINTER_TYPE_P (rhs2_etype))
4048 error ("invalid (pointer) operands %qs", code_name);
4049 return true;
4052 /* Continue with generic binary expression handling. */
4053 break;
4056 case POINTER_PLUS_EXPR:
4058 if (!POINTER_TYPE_P (rhs1_type)
4059 || !useless_type_conversion_p (lhs_type, rhs1_type)
4060 || !ptrofftype_p (rhs2_type))
4062 error ("type mismatch in %qs", code_name);
4063 debug_generic_stmt (lhs_type);
4064 debug_generic_stmt (rhs1_type);
4065 debug_generic_stmt (rhs2_type);
4066 return true;
4069 return false;
4072 case POINTER_DIFF_EXPR:
4074 if (!POINTER_TYPE_P (rhs1_type)
4075 || !POINTER_TYPE_P (rhs2_type)
4076 /* Because we special-case pointers to void we allow difference
4077 of arbitrary pointers with the same mode. */
4078 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4079 || !INTEGRAL_TYPE_P (lhs_type)
4080 || TYPE_UNSIGNED (lhs_type)
4081 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4083 error ("type mismatch in %qs", code_name);
4084 debug_generic_stmt (lhs_type);
4085 debug_generic_stmt (rhs1_type);
4086 debug_generic_stmt (rhs2_type);
4087 return true;
4090 return false;
4093 case TRUTH_ANDIF_EXPR:
4094 case TRUTH_ORIF_EXPR:
4095 case TRUTH_AND_EXPR:
4096 case TRUTH_OR_EXPR:
4097 case TRUTH_XOR_EXPR:
4099 gcc_unreachable ();
4101 case LT_EXPR:
4102 case LE_EXPR:
4103 case GT_EXPR:
4104 case GE_EXPR:
4105 case EQ_EXPR:
4106 case NE_EXPR:
4107 case UNORDERED_EXPR:
4108 case ORDERED_EXPR:
4109 case UNLT_EXPR:
4110 case UNLE_EXPR:
4111 case UNGT_EXPR:
4112 case UNGE_EXPR:
4113 case UNEQ_EXPR:
4114 case LTGT_EXPR:
4115 /* Comparisons are also binary, but the result type is not
4116 connected to the operand types. */
4117 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4119 case WIDEN_MULT_EXPR:
4120 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4121 return true;
4122 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4123 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4125 case WIDEN_SUM_EXPR:
4127 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4128 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4129 && ((!INTEGRAL_TYPE_P (rhs1_type)
4130 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4131 || (!INTEGRAL_TYPE_P (lhs_type)
4132 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4133 || !useless_type_conversion_p (lhs_type, rhs2_type)
4134 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4135 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4137 error ("type mismatch in %qs", code_name);
4138 debug_generic_expr (lhs_type);
4139 debug_generic_expr (rhs1_type);
4140 debug_generic_expr (rhs2_type);
4141 return true;
4143 return false;
4146 case VEC_WIDEN_MINUS_HI_EXPR:
4147 case VEC_WIDEN_MINUS_LO_EXPR:
4148 case VEC_WIDEN_PLUS_HI_EXPR:
4149 case VEC_WIDEN_PLUS_LO_EXPR:
4150 case VEC_WIDEN_MULT_HI_EXPR:
4151 case VEC_WIDEN_MULT_LO_EXPR:
4152 case VEC_WIDEN_MULT_EVEN_EXPR:
4153 case VEC_WIDEN_MULT_ODD_EXPR:
4155 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4156 || TREE_CODE (lhs_type) != VECTOR_TYPE
4157 || !types_compatible_p (rhs1_type, rhs2_type)
4158 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4159 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4161 error ("type mismatch in %qs", code_name);
4162 debug_generic_expr (lhs_type);
4163 debug_generic_expr (rhs1_type);
4164 debug_generic_expr (rhs2_type);
4165 return true;
4167 return false;
4170 case VEC_PACK_TRUNC_EXPR:
4171 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4172 vector boolean types. */
4173 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4174 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4175 && types_compatible_p (rhs1_type, rhs2_type)
4176 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4177 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4178 return false;
4180 /* Fallthru. */
4181 case VEC_PACK_SAT_EXPR:
4182 case VEC_PACK_FIX_TRUNC_EXPR:
4184 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4185 || TREE_CODE (lhs_type) != VECTOR_TYPE
4186 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4187 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4188 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4189 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4190 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4191 || !types_compatible_p (rhs1_type, rhs2_type)
4192 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4193 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4194 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4195 TYPE_VECTOR_SUBPARTS (lhs_type)))
4197 error ("type mismatch in %qs", code_name);
4198 debug_generic_expr (lhs_type);
4199 debug_generic_expr (rhs1_type);
4200 debug_generic_expr (rhs2_type);
4201 return true;
4204 return false;
4207 case VEC_PACK_FLOAT_EXPR:
4208 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4209 || TREE_CODE (lhs_type) != VECTOR_TYPE
4210 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4211 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4212 || !types_compatible_p (rhs1_type, rhs2_type)
4213 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4214 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4215 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4216 TYPE_VECTOR_SUBPARTS (lhs_type)))
4218 error ("type mismatch in %qs", code_name);
4219 debug_generic_expr (lhs_type);
4220 debug_generic_expr (rhs1_type);
4221 debug_generic_expr (rhs2_type);
4222 return true;
4225 return false;
4227 case MULT_EXPR:
4228 case MULT_HIGHPART_EXPR:
4229 case TRUNC_DIV_EXPR:
4230 case CEIL_DIV_EXPR:
4231 case FLOOR_DIV_EXPR:
4232 case ROUND_DIV_EXPR:
4233 case TRUNC_MOD_EXPR:
4234 case CEIL_MOD_EXPR:
4235 case FLOOR_MOD_EXPR:
4236 case ROUND_MOD_EXPR:
4237 case RDIV_EXPR:
4238 case EXACT_DIV_EXPR:
4239 case BIT_IOR_EXPR:
4240 case BIT_XOR_EXPR:
4241 /* Disallow pointer and offset types for many of the binary gimple. */
4242 if (POINTER_TYPE_P (lhs_type)
4243 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4245 error ("invalid types for %qs", code_name);
4246 debug_generic_expr (lhs_type);
4247 debug_generic_expr (rhs1_type);
4248 debug_generic_expr (rhs2_type);
4249 return true;
4251 /* Continue with generic binary expression handling. */
4252 break;
4254 case MIN_EXPR:
4255 case MAX_EXPR:
4256 /* Continue with generic binary expression handling. */
4257 break;
4259 case BIT_AND_EXPR:
4260 if (POINTER_TYPE_P (lhs_type)
4261 && TREE_CODE (rhs2) == INTEGER_CST)
4262 break;
4263 /* Disallow pointer and offset types for many of the binary gimple. */
4264 if (POINTER_TYPE_P (lhs_type)
4265 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4267 error ("invalid types for %qs", code_name);
4268 debug_generic_expr (lhs_type);
4269 debug_generic_expr (rhs1_type);
4270 debug_generic_expr (rhs2_type);
4271 return true;
4273 /* Continue with generic binary expression handling. */
4274 break;
4276 case VEC_SERIES_EXPR:
4277 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4279 error ("type mismatch in %qs", code_name);
4280 debug_generic_expr (rhs1_type);
4281 debug_generic_expr (rhs2_type);
4282 return true;
4284 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4285 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4287 error ("vector type expected in %qs", code_name);
4288 debug_generic_expr (lhs_type);
4289 return true;
4291 return false;
4293 default:
4294 gcc_unreachable ();
4297 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4298 || !useless_type_conversion_p (lhs_type, rhs2_type))
4300 error ("type mismatch in binary expression");
4301 debug_generic_stmt (lhs_type);
4302 debug_generic_stmt (rhs1_type);
4303 debug_generic_stmt (rhs2_type);
4304 return true;
4307 return false;
4310 /* Verify a gimple assignment statement STMT with a ternary rhs.
4311 Returns true if anything is wrong. */
4313 static bool
4314 verify_gimple_assign_ternary (gassign *stmt)
4316 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4317 tree lhs = gimple_assign_lhs (stmt);
4318 tree lhs_type = TREE_TYPE (lhs);
4319 tree rhs1 = gimple_assign_rhs1 (stmt);
4320 tree rhs1_type = TREE_TYPE (rhs1);
4321 tree rhs2 = gimple_assign_rhs2 (stmt);
4322 tree rhs2_type = TREE_TYPE (rhs2);
4323 tree rhs3 = gimple_assign_rhs3 (stmt);
4324 tree rhs3_type = TREE_TYPE (rhs3);
4326 if (!is_gimple_reg (lhs))
4328 error ("non-register as LHS of ternary operation");
4329 return true;
4332 if (!is_gimple_val (rhs1)
4333 || !is_gimple_val (rhs2)
4334 || !is_gimple_val (rhs3))
4336 error ("invalid operands in ternary operation");
4337 return true;
4340 const char* const code_name = get_tree_code_name (rhs_code);
4342 /* First handle operations that involve different types. */
4343 switch (rhs_code)
4345 case WIDEN_MULT_PLUS_EXPR:
4346 case WIDEN_MULT_MINUS_EXPR:
4347 if ((!INTEGRAL_TYPE_P (rhs1_type)
4348 && !FIXED_POINT_TYPE_P (rhs1_type))
4349 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4350 || !useless_type_conversion_p (lhs_type, rhs3_type)
4351 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4352 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4354 error ("type mismatch in %qs", code_name);
4355 debug_generic_expr (lhs_type);
4356 debug_generic_expr (rhs1_type);
4357 debug_generic_expr (rhs2_type);
4358 debug_generic_expr (rhs3_type);
4359 return true;
4361 break;
4363 case VEC_COND_EXPR:
4364 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4365 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4366 TYPE_VECTOR_SUBPARTS (lhs_type)))
4368 error ("the first argument of a %qs must be of a "
4369 "boolean vector type of the same number of elements "
4370 "as the result", code_name);
4371 debug_generic_expr (lhs_type);
4372 debug_generic_expr (rhs1_type);
4373 return true;
4375 /* Fallthrough. */
4376 case COND_EXPR:
4377 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4378 || !useless_type_conversion_p (lhs_type, rhs3_type))
4380 error ("type mismatch in %qs", code_name);
4381 debug_generic_expr (lhs_type);
4382 debug_generic_expr (rhs2_type);
4383 debug_generic_expr (rhs3_type);
4384 return true;
4386 break;
4388 case VEC_PERM_EXPR:
4389 /* If permute is constant, then we allow for lhs and rhs
4390 to have different vector types, provided:
4391 (1) lhs, rhs1, rhs2 have same element type.
4392 (2) rhs3 vector is constant and has integer element type.
4393 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4395 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4396 || TREE_CODE (rhs1_type) != VECTOR_TYPE
4397 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4398 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4400 error ("vector types expected in %qs", code_name);
4401 debug_generic_expr (lhs_type);
4402 debug_generic_expr (rhs1_type);
4403 debug_generic_expr (rhs2_type);
4404 debug_generic_expr (rhs3_type);
4405 return true;
4408 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4409 as long as lhs, rhs1 and rhs2 have same element type. */
4410 if (TREE_CONSTANT (rhs3)
4411 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs1_type))
4412 || !useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs2_type)))
4413 : (!useless_type_conversion_p (lhs_type, rhs1_type)
4414 || !useless_type_conversion_p (lhs_type, rhs2_type)))
4416 error ("type mismatch in %qs", code_name);
4417 debug_generic_expr (lhs_type);
4418 debug_generic_expr (rhs1_type);
4419 debug_generic_expr (rhs2_type);
4420 debug_generic_expr (rhs3_type);
4421 return true;
4424 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4425 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4426 TYPE_VECTOR_SUBPARTS (rhs2_type))
4427 || (!TREE_CONSTANT(rhs3)
4428 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4429 TYPE_VECTOR_SUBPARTS (rhs3_type)))
4430 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4431 TYPE_VECTOR_SUBPARTS (lhs_type)))
4433 error ("vectors with different element number found in %qs",
4434 code_name);
4435 debug_generic_expr (lhs_type);
4436 debug_generic_expr (rhs1_type);
4437 debug_generic_expr (rhs2_type);
4438 debug_generic_expr (rhs3_type);
4439 return true;
4442 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4443 || (TREE_CODE (rhs3) != VECTOR_CST
4444 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4445 (TREE_TYPE (rhs3_type)))
4446 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4447 (TREE_TYPE (rhs1_type))))))
4449 error ("invalid mask type in %qs", code_name);
4450 debug_generic_expr (lhs_type);
4451 debug_generic_expr (rhs1_type);
4452 debug_generic_expr (rhs2_type);
4453 debug_generic_expr (rhs3_type);
4454 return true;
4457 return false;
4459 case SAD_EXPR:
4460 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4461 || !useless_type_conversion_p (lhs_type, rhs3_type)
4462 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4463 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4465 error ("type mismatch in %qs", code_name);
4466 debug_generic_expr (lhs_type);
4467 debug_generic_expr (rhs1_type);
4468 debug_generic_expr (rhs2_type);
4469 debug_generic_expr (rhs3_type);
4470 return true;
4473 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4474 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4475 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4477 error ("vector types expected in %qs", code_name);
4478 debug_generic_expr (lhs_type);
4479 debug_generic_expr (rhs1_type);
4480 debug_generic_expr (rhs2_type);
4481 debug_generic_expr (rhs3_type);
4482 return true;
4485 return false;
4487 case BIT_INSERT_EXPR:
4488 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4490 error ("type mismatch in %qs", code_name);
4491 debug_generic_expr (lhs_type);
4492 debug_generic_expr (rhs1_type);
4493 return true;
4495 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4496 && INTEGRAL_TYPE_P (rhs2_type))
4497 /* Vector element insert. */
4498 || (VECTOR_TYPE_P (rhs1_type)
4499 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4500 /* Aligned sub-vector insert. */
4501 || (VECTOR_TYPE_P (rhs1_type)
4502 && VECTOR_TYPE_P (rhs2_type)
4503 && types_compatible_p (TREE_TYPE (rhs1_type),
4504 TREE_TYPE (rhs2_type))
4505 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4506 TYPE_VECTOR_SUBPARTS (rhs2_type))
4507 && multiple_p (wi::to_poly_offset (rhs3),
4508 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4510 error ("not allowed type combination in %qs", code_name);
4511 debug_generic_expr (rhs1_type);
4512 debug_generic_expr (rhs2_type);
4513 return true;
4515 if (! tree_fits_uhwi_p (rhs3)
4516 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4517 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4519 error ("invalid position or size in %qs", code_name);
4520 return true;
4522 if (INTEGRAL_TYPE_P (rhs1_type)
4523 && !type_has_mode_precision_p (rhs1_type))
4525 error ("%qs into non-mode-precision operand", code_name);
4526 return true;
4528 if (INTEGRAL_TYPE_P (rhs1_type))
4530 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4531 if (bitpos >= TYPE_PRECISION (rhs1_type)
4532 || (bitpos + TYPE_PRECISION (rhs2_type)
4533 > TYPE_PRECISION (rhs1_type)))
4535 error ("insertion out of range in %qs", code_name);
4536 return true;
4539 else if (VECTOR_TYPE_P (rhs1_type))
4541 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4542 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4543 if (bitpos % bitsize != 0)
4545 error ("%qs not at element boundary", code_name);
4546 return true;
4549 return false;
4551 case DOT_PROD_EXPR:
4553 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4554 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4555 && ((!INTEGRAL_TYPE_P (rhs1_type)
4556 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4557 || (!INTEGRAL_TYPE_P (lhs_type)
4558 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4559 /* rhs1_type and rhs2_type may differ in sign. */
4560 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4561 || !useless_type_conversion_p (lhs_type, rhs3_type)
4562 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4563 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4565 error ("type mismatch in %qs", code_name);
4566 debug_generic_expr (lhs_type);
4567 debug_generic_expr (rhs1_type);
4568 debug_generic_expr (rhs2_type);
4569 return true;
4571 return false;
4574 case REALIGN_LOAD_EXPR:
4575 /* FIXME. */
4576 return false;
4578 default:
4579 gcc_unreachable ();
4581 return false;
4584 /* Verify a gimple assignment statement STMT with a single rhs.
4585 Returns true if anything is wrong. */
4587 static bool
4588 verify_gimple_assign_single (gassign *stmt)
4590 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4591 tree lhs = gimple_assign_lhs (stmt);
4592 tree lhs_type = TREE_TYPE (lhs);
4593 tree rhs1 = gimple_assign_rhs1 (stmt);
4594 tree rhs1_type = TREE_TYPE (rhs1);
4595 bool res = false;
4597 const char* const code_name = get_tree_code_name (rhs_code);
4599 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4601 error ("non-trivial conversion in %qs", code_name);
4602 debug_generic_expr (lhs_type);
4603 debug_generic_expr (rhs1_type);
4604 return true;
4607 if (gimple_clobber_p (stmt)
4608 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4610 error ("%qs LHS in clobber statement",
4611 get_tree_code_name (TREE_CODE (lhs)));
4612 debug_generic_expr (lhs);
4613 return true;
4616 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4618 error ("%qs LHS in assignment statement",
4619 get_tree_code_name (TREE_CODE (lhs)));
4620 debug_generic_expr (lhs);
4621 return true;
4624 if (handled_component_p (lhs)
4625 || TREE_CODE (lhs) == MEM_REF
4626 || TREE_CODE (lhs) == TARGET_MEM_REF)
4627 res |= verify_types_in_gimple_reference (lhs, true);
4629 /* Special codes we cannot handle via their class. */
4630 switch (rhs_code)
4632 case ADDR_EXPR:
4634 tree op = TREE_OPERAND (rhs1, 0);
4635 if (!is_gimple_addressable (op))
4637 error ("invalid operand in %qs", code_name);
4638 return true;
4641 /* Technically there is no longer a need for matching types, but
4642 gimple hygiene asks for this check. In LTO we can end up
4643 combining incompatible units and thus end up with addresses
4644 of globals that change their type to a common one. */
4645 if (!in_lto_p
4646 && !types_compatible_p (TREE_TYPE (op),
4647 TREE_TYPE (TREE_TYPE (rhs1)))
4648 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4649 TREE_TYPE (op)))
4651 error ("type mismatch in %qs", code_name);
4652 debug_generic_stmt (TREE_TYPE (rhs1));
4653 debug_generic_stmt (TREE_TYPE (op));
4654 return true;
4657 return (verify_address (rhs1, true)
4658 || verify_types_in_gimple_reference (op, true));
4661 /* tcc_reference */
4662 case INDIRECT_REF:
4663 error ("%qs in gimple IL", code_name);
4664 return true;
4666 case COMPONENT_REF:
4667 case BIT_FIELD_REF:
4668 case ARRAY_REF:
4669 case ARRAY_RANGE_REF:
4670 case VIEW_CONVERT_EXPR:
4671 case REALPART_EXPR:
4672 case IMAGPART_EXPR:
4673 case TARGET_MEM_REF:
4674 case MEM_REF:
4675 if (!is_gimple_reg (lhs)
4676 && is_gimple_reg_type (TREE_TYPE (lhs)))
4678 error ("invalid RHS for gimple memory store: %qs", code_name);
4679 debug_generic_stmt (lhs);
4680 debug_generic_stmt (rhs1);
4681 return true;
4683 return res || verify_types_in_gimple_reference (rhs1, false);
4685 /* tcc_constant */
4686 case SSA_NAME:
4687 case INTEGER_CST:
4688 case REAL_CST:
4689 case FIXED_CST:
4690 case COMPLEX_CST:
4691 case VECTOR_CST:
4692 case STRING_CST:
4693 return res;
4695 /* tcc_declaration */
4696 case CONST_DECL:
4697 return res;
4698 case VAR_DECL:
4699 case PARM_DECL:
4700 if (!is_gimple_reg (lhs)
4701 && !is_gimple_reg (rhs1)
4702 && is_gimple_reg_type (TREE_TYPE (lhs)))
4704 error ("invalid RHS for gimple memory store: %qs", code_name);
4705 debug_generic_stmt (lhs);
4706 debug_generic_stmt (rhs1);
4707 return true;
4709 return res;
4711 case CONSTRUCTOR:
4712 if (VECTOR_TYPE_P (rhs1_type))
4714 unsigned int i;
4715 tree elt_i, elt_v, elt_t = NULL_TREE;
4717 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4718 return res;
4719 /* For vector CONSTRUCTORs we require that either it is empty
4720 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4721 (then the element count must be correct to cover the whole
4722 outer vector and index must be NULL on all elements, or it is
4723 a CONSTRUCTOR of scalar elements, where we as an exception allow
4724 smaller number of elements (assuming zero filling) and
4725 consecutive indexes as compared to NULL indexes (such
4726 CONSTRUCTORs can appear in the IL from FEs). */
4727 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4729 if (elt_t == NULL_TREE)
4731 elt_t = TREE_TYPE (elt_v);
4732 if (VECTOR_TYPE_P (elt_t))
4734 tree elt_t = TREE_TYPE (elt_v);
4735 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4736 TREE_TYPE (elt_t)))
4738 error ("incorrect type of vector %qs elements",
4739 code_name);
4740 debug_generic_stmt (rhs1);
4741 return true;
4743 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4744 * TYPE_VECTOR_SUBPARTS (elt_t),
4745 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4747 error ("incorrect number of vector %qs elements",
4748 code_name);
4749 debug_generic_stmt (rhs1);
4750 return true;
4753 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4754 elt_t))
4756 error ("incorrect type of vector %qs elements",
4757 code_name);
4758 debug_generic_stmt (rhs1);
4759 return true;
4761 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4762 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4764 error ("incorrect number of vector %qs elements",
4765 code_name);
4766 debug_generic_stmt (rhs1);
4767 return true;
4770 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4772 error ("incorrect type of vector CONSTRUCTOR elements");
4773 debug_generic_stmt (rhs1);
4774 return true;
4776 if (elt_i != NULL_TREE
4777 && (VECTOR_TYPE_P (elt_t)
4778 || TREE_CODE (elt_i) != INTEGER_CST
4779 || compare_tree_int (elt_i, i) != 0))
4781 error ("vector %qs with non-NULL element index",
4782 code_name);
4783 debug_generic_stmt (rhs1);
4784 return true;
4786 if (!is_gimple_val (elt_v))
4788 error ("vector %qs element is not a GIMPLE value",
4789 code_name);
4790 debug_generic_stmt (rhs1);
4791 return true;
4795 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4797 error ("non-vector %qs with elements", code_name);
4798 debug_generic_stmt (rhs1);
4799 return true;
4801 return res;
4803 case WITH_SIZE_EXPR:
4804 error ("%qs RHS in assignment statement",
4805 get_tree_code_name (rhs_code));
4806 debug_generic_expr (rhs1);
4807 return true;
4809 case OBJ_TYPE_REF:
4810 /* FIXME. */
4811 return res;
4813 default:;
4816 return res;
4819 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4820 is a problem, otherwise false. */
4822 static bool
4823 verify_gimple_assign (gassign *stmt)
4825 switch (gimple_assign_rhs_class (stmt))
4827 case GIMPLE_SINGLE_RHS:
4828 return verify_gimple_assign_single (stmt);
4830 case GIMPLE_UNARY_RHS:
4831 return verify_gimple_assign_unary (stmt);
4833 case GIMPLE_BINARY_RHS:
4834 return verify_gimple_assign_binary (stmt);
4836 case GIMPLE_TERNARY_RHS:
4837 return verify_gimple_assign_ternary (stmt);
4839 default:
4840 gcc_unreachable ();
4844 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4845 is a problem, otherwise false. */
4847 static bool
4848 verify_gimple_return (greturn *stmt)
4850 tree op = gimple_return_retval (stmt);
4851 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4853 /* We cannot test for present return values as we do not fix up missing
4854 return values from the original source. */
4855 if (op == NULL)
4856 return false;
4858 if (!is_gimple_val (op)
4859 && TREE_CODE (op) != RESULT_DECL)
4861 error ("invalid operand in return statement");
4862 debug_generic_stmt (op);
4863 return true;
4866 if ((TREE_CODE (op) == RESULT_DECL
4867 && DECL_BY_REFERENCE (op))
4868 || (TREE_CODE (op) == SSA_NAME
4869 && SSA_NAME_VAR (op)
4870 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4871 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4872 op = TREE_TYPE (op);
4874 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4876 error ("invalid conversion in return statement");
4877 debug_generic_stmt (restype);
4878 debug_generic_stmt (TREE_TYPE (op));
4879 return true;
4882 return false;
4886 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4887 is a problem, otherwise false. */
4889 static bool
4890 verify_gimple_goto (ggoto *stmt)
4892 tree dest = gimple_goto_dest (stmt);
4894 /* ??? We have two canonical forms of direct goto destinations, a
4895 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4896 if (TREE_CODE (dest) != LABEL_DECL
4897 && (!is_gimple_val (dest)
4898 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4900 error ("goto destination is neither a label nor a pointer");
4901 return true;
4904 return false;
4907 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4908 is a problem, otherwise false. */
4910 static bool
4911 verify_gimple_switch (gswitch *stmt)
4913 unsigned int i, n;
4914 tree elt, prev_upper_bound = NULL_TREE;
4915 tree index_type, elt_type = NULL_TREE;
4917 if (!is_gimple_val (gimple_switch_index (stmt)))
4919 error ("invalid operand to switch statement");
4920 debug_generic_stmt (gimple_switch_index (stmt));
4921 return true;
4924 index_type = TREE_TYPE (gimple_switch_index (stmt));
4925 if (! INTEGRAL_TYPE_P (index_type))
4927 error ("non-integral type switch statement");
4928 debug_generic_expr (index_type);
4929 return true;
4932 elt = gimple_switch_label (stmt, 0);
4933 if (CASE_LOW (elt) != NULL_TREE
4934 || CASE_HIGH (elt) != NULL_TREE
4935 || CASE_CHAIN (elt) != NULL_TREE)
4937 error ("invalid default case label in switch statement");
4938 debug_generic_expr (elt);
4939 return true;
4942 n = gimple_switch_num_labels (stmt);
4943 for (i = 1; i < n; i++)
4945 elt = gimple_switch_label (stmt, i);
4947 if (CASE_CHAIN (elt))
4949 error ("invalid %<CASE_CHAIN%>");
4950 debug_generic_expr (elt);
4951 return true;
4953 if (! CASE_LOW (elt))
4955 error ("invalid case label in switch statement");
4956 debug_generic_expr (elt);
4957 return true;
4959 if (CASE_HIGH (elt)
4960 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4962 error ("invalid case range in switch statement");
4963 debug_generic_expr (elt);
4964 return true;
4967 if (! elt_type)
4969 elt_type = TREE_TYPE (CASE_LOW (elt));
4970 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4972 error ("type precision mismatch in switch statement");
4973 return true;
4976 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4977 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4979 error ("type mismatch for case label in switch statement");
4980 debug_generic_expr (elt);
4981 return true;
4984 if (prev_upper_bound)
4986 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4988 error ("case labels not sorted in switch statement");
4989 return true;
4993 prev_upper_bound = CASE_HIGH (elt);
4994 if (! prev_upper_bound)
4995 prev_upper_bound = CASE_LOW (elt);
4998 return false;
5001 /* Verify a gimple debug statement STMT.
5002 Returns true if anything is wrong. */
5004 static bool
5005 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5007 /* There isn't much that could be wrong in a gimple debug stmt. A
5008 gimple debug bind stmt, for example, maps a tree, that's usually
5009 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5010 component or member of an aggregate type, to another tree, that
5011 can be an arbitrary expression. These stmts expand into debug
5012 insns, and are converted to debug notes by var-tracking.cc. */
5013 return false;
5016 /* Verify a gimple label statement STMT.
5017 Returns true if anything is wrong. */
5019 static bool
5020 verify_gimple_label (glabel *stmt)
5022 tree decl = gimple_label_label (stmt);
5023 int uid;
5024 bool err = false;
5026 if (TREE_CODE (decl) != LABEL_DECL)
5027 return true;
5028 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5029 && DECL_CONTEXT (decl) != current_function_decl)
5031 error ("label context is not the current function declaration");
5032 err |= true;
5035 uid = LABEL_DECL_UID (decl);
5036 if (cfun->cfg
5037 && (uid == -1
5038 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5040 error ("incorrect entry in %<label_to_block_map%>");
5041 err |= true;
5044 uid = EH_LANDING_PAD_NR (decl);
5045 if (uid)
5047 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5048 if (decl != lp->post_landing_pad)
5050 error ("incorrect setting of landing pad number");
5051 err |= true;
5055 return err;
5058 /* Verify a gimple cond statement STMT.
5059 Returns true if anything is wrong. */
5061 static bool
5062 verify_gimple_cond (gcond *stmt)
5064 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5066 error ("invalid comparison code in gimple cond");
5067 return true;
5069 if (!(!gimple_cond_true_label (stmt)
5070 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5071 || !(!gimple_cond_false_label (stmt)
5072 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5074 error ("invalid labels in gimple cond");
5075 return true;
5078 return verify_gimple_comparison (boolean_type_node,
5079 gimple_cond_lhs (stmt),
5080 gimple_cond_rhs (stmt),
5081 gimple_cond_code (stmt));
5084 /* Verify the GIMPLE statement STMT. Returns true if there is an
5085 error, otherwise false. */
5087 static bool
5088 verify_gimple_stmt (gimple *stmt)
5090 switch (gimple_code (stmt))
5092 case GIMPLE_ASSIGN:
5093 return verify_gimple_assign (as_a <gassign *> (stmt));
5095 case GIMPLE_LABEL:
5096 return verify_gimple_label (as_a <glabel *> (stmt));
5098 case GIMPLE_CALL:
5099 return verify_gimple_call (as_a <gcall *> (stmt));
5101 case GIMPLE_COND:
5102 return verify_gimple_cond (as_a <gcond *> (stmt));
5104 case GIMPLE_GOTO:
5105 return verify_gimple_goto (as_a <ggoto *> (stmt));
5107 case GIMPLE_SWITCH:
5108 return verify_gimple_switch (as_a <gswitch *> (stmt));
5110 case GIMPLE_RETURN:
5111 return verify_gimple_return (as_a <greturn *> (stmt));
5113 case GIMPLE_ASM:
5114 return false;
5116 case GIMPLE_TRANSACTION:
5117 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5119 /* Tuples that do not have tree operands. */
5120 case GIMPLE_NOP:
5121 case GIMPLE_PREDICT:
5122 case GIMPLE_RESX:
5123 case GIMPLE_EH_DISPATCH:
5124 case GIMPLE_EH_MUST_NOT_THROW:
5125 return false;
5127 CASE_GIMPLE_OMP:
5128 /* OpenMP directives are validated by the FE and never operated
5129 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5130 non-gimple expressions when the main index variable has had
5131 its address taken. This does not affect the loop itself
5132 because the header of an GIMPLE_OMP_FOR is merely used to determine
5133 how to setup the parallel iteration. */
5134 return false;
5136 case GIMPLE_ASSUME:
5137 return false;
5139 case GIMPLE_DEBUG:
5140 return verify_gimple_debug (stmt);
5142 default:
5143 gcc_unreachable ();
5147 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5148 and false otherwise. */
5150 static bool
5151 verify_gimple_phi (gphi *phi)
5153 bool err = false;
5154 unsigned i;
5155 tree phi_result = gimple_phi_result (phi);
5156 bool virtual_p;
5158 if (!phi_result)
5160 error ("invalid %<PHI%> result");
5161 return true;
5164 virtual_p = virtual_operand_p (phi_result);
5165 if (TREE_CODE (phi_result) != SSA_NAME
5166 || (virtual_p
5167 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5169 error ("invalid %<PHI%> result");
5170 err = true;
5173 for (i = 0; i < gimple_phi_num_args (phi); i++)
5175 tree t = gimple_phi_arg_def (phi, i);
5177 if (!t)
5179 error ("missing %<PHI%> def");
5180 err |= true;
5181 continue;
5183 /* Addressable variables do have SSA_NAMEs but they
5184 are not considered gimple values. */
5185 else if ((TREE_CODE (t) == SSA_NAME
5186 && virtual_p != virtual_operand_p (t))
5187 || (virtual_p
5188 && (TREE_CODE (t) != SSA_NAME
5189 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5190 || (!virtual_p
5191 && !is_gimple_val (t)))
5193 error ("invalid %<PHI%> argument");
5194 debug_generic_expr (t);
5195 err |= true;
5197 #ifdef ENABLE_TYPES_CHECKING
5198 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5200 error ("incompatible types in %<PHI%> argument %u", i);
5201 debug_generic_stmt (TREE_TYPE (phi_result));
5202 debug_generic_stmt (TREE_TYPE (t));
5203 err |= true;
5205 #endif
5208 return err;
5211 /* Verify the GIMPLE statements inside the sequence STMTS. */
5213 static bool
5214 verify_gimple_in_seq_2 (gimple_seq stmts)
5216 gimple_stmt_iterator ittr;
5217 bool err = false;
5219 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5221 gimple *stmt = gsi_stmt (ittr);
5223 switch (gimple_code (stmt))
5225 case GIMPLE_BIND:
5226 err |= verify_gimple_in_seq_2 (
5227 gimple_bind_body (as_a <gbind *> (stmt)));
5228 break;
5230 case GIMPLE_TRY:
5231 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5232 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5233 break;
5235 case GIMPLE_EH_FILTER:
5236 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5237 break;
5239 case GIMPLE_EH_ELSE:
5241 geh_else *eh_else = as_a <geh_else *> (stmt);
5242 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5243 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5245 break;
5247 case GIMPLE_CATCH:
5248 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5249 as_a <gcatch *> (stmt)));
5250 break;
5252 case GIMPLE_ASSUME:
5253 err |= verify_gimple_in_seq_2 (gimple_assume_body (stmt));
5254 break;
5256 case GIMPLE_TRANSACTION:
5257 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5258 break;
5260 default:
5262 bool err2 = verify_gimple_stmt (stmt);
5263 if (err2)
5264 debug_gimple_stmt (stmt);
5265 err |= err2;
5270 return err;
5273 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5274 is a problem, otherwise false. */
5276 static bool
5277 verify_gimple_transaction (gtransaction *stmt)
5279 tree lab;
5281 lab = gimple_transaction_label_norm (stmt);
5282 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5283 return true;
5284 lab = gimple_transaction_label_uninst (stmt);
5285 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5286 return true;
5287 lab = gimple_transaction_label_over (stmt);
5288 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5289 return true;
5291 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5295 /* Verify the GIMPLE statements inside the statement list STMTS. */
5297 DEBUG_FUNCTION bool
5298 verify_gimple_in_seq (gimple_seq stmts, bool ice)
5300 timevar_push (TV_TREE_STMT_VERIFY);
5301 bool res = verify_gimple_in_seq_2 (stmts);
5302 if (res && ice)
5303 internal_error ("%<verify_gimple%> failed");
5304 timevar_pop (TV_TREE_STMT_VERIFY);
5305 return res;
5308 /* Return true when the T can be shared. */
5310 static bool
5311 tree_node_can_be_shared (tree t)
5313 if (IS_TYPE_OR_DECL_P (t)
5314 || TREE_CODE (t) == SSA_NAME
5315 || TREE_CODE (t) == IDENTIFIER_NODE
5316 || TREE_CODE (t) == CASE_LABEL_EXPR
5317 || is_gimple_min_invariant (t))
5318 return true;
5320 if (t == error_mark_node)
5321 return true;
5323 return false;
5326 /* Called via walk_tree. Verify tree sharing. */
5328 static tree
5329 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5331 hash_set<void *> *visited = (hash_set<void *> *) data;
5333 if (tree_node_can_be_shared (*tp))
5335 *walk_subtrees = false;
5336 return NULL;
5339 if (visited->add (*tp))
5340 return *tp;
5342 return NULL;
5345 /* Called via walk_gimple_stmt. Verify tree sharing. */
5347 static tree
5348 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5350 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5351 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5354 static bool eh_error_found;
5355 bool
5356 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5357 hash_set<gimple *> *visited)
5359 if (!visited->contains (stmt))
5361 error ("dead statement in EH table");
5362 debug_gimple_stmt (stmt);
5363 eh_error_found = true;
5365 return true;
5368 /* Verify if the location LOCs block is in BLOCKS. */
5370 static bool
5371 verify_location (hash_set<tree> *blocks, location_t loc)
5373 tree block = LOCATION_BLOCK (loc);
5374 if (block != NULL_TREE
5375 && !blocks->contains (block))
5377 error ("location references block not in block tree");
5378 return true;
5380 if (block != NULL_TREE)
5381 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5382 return false;
5385 /* Called via walk_tree. Verify that expressions have no blocks. */
5387 static tree
5388 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5390 if (!EXPR_P (*tp))
5392 *walk_subtrees = false;
5393 return NULL;
5396 location_t loc = EXPR_LOCATION (*tp);
5397 if (LOCATION_BLOCK (loc) != NULL)
5398 return *tp;
5400 return NULL;
5403 /* Called via walk_tree. Verify locations of expressions. */
5405 static tree
5406 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5408 hash_set<tree> *blocks = (hash_set<tree> *) data;
5409 tree t = *tp;
5411 /* ??? This doesn't really belong here but there's no good place to
5412 stick this remainder of old verify_expr. */
5413 /* ??? This barfs on debug stmts which contain binds to vars with
5414 different function context. */
5415 #if 0
5416 if (VAR_P (t)
5417 || TREE_CODE (t) == PARM_DECL
5418 || TREE_CODE (t) == RESULT_DECL)
5420 tree context = decl_function_context (t);
5421 if (context != cfun->decl
5422 && !SCOPE_FILE_SCOPE_P (context)
5423 && !TREE_STATIC (t)
5424 && !DECL_EXTERNAL (t))
5426 error ("local declaration from a different function");
5427 return t;
5430 #endif
5432 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5434 tree x = DECL_DEBUG_EXPR (t);
5435 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5436 if (addr)
5437 return addr;
5439 if ((VAR_P (t)
5440 || TREE_CODE (t) == PARM_DECL
5441 || TREE_CODE (t) == RESULT_DECL)
5442 && DECL_HAS_VALUE_EXPR_P (t))
5444 tree x = DECL_VALUE_EXPR (t);
5445 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5446 if (addr)
5447 return addr;
5450 if (!EXPR_P (t))
5452 *walk_subtrees = false;
5453 return NULL;
5456 location_t loc = EXPR_LOCATION (t);
5457 if (verify_location (blocks, loc))
5458 return t;
5460 return NULL;
5463 /* Called via walk_gimple_op. Verify locations of expressions. */
5465 static tree
5466 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5468 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5469 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5472 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5474 static void
5475 collect_subblocks (hash_set<tree> *blocks, tree block)
5477 tree t;
5478 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5480 blocks->add (t);
5481 collect_subblocks (blocks, t);
5485 /* Disable warnings about missing quoting in GCC diagnostics for
5486 the verification errors. Their format strings don't follow
5487 GCC diagnostic conventions and trigger an ICE in the end. */
5488 #if __GNUC__ >= 10
5489 # pragma GCC diagnostic push
5490 # pragma GCC diagnostic ignored "-Wformat-diag"
5491 #endif
5493 /* Verify the GIMPLE statements in the CFG of FN. */
5495 DEBUG_FUNCTION bool
5496 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow, bool ice)
5498 basic_block bb;
5499 bool err = false;
5501 timevar_push (TV_TREE_STMT_VERIFY);
5502 hash_set<void *> visited;
5503 hash_set<gimple *> visited_throwing_stmts;
5505 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5506 hash_set<tree> blocks;
5507 if (DECL_INITIAL (fn->decl))
5509 blocks.add (DECL_INITIAL (fn->decl));
5510 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5513 FOR_EACH_BB_FN (bb, fn)
5515 gimple_stmt_iterator gsi;
5516 edge_iterator ei;
5517 edge e;
5519 for (gphi_iterator gpi = gsi_start_phis (bb);
5520 !gsi_end_p (gpi);
5521 gsi_next (&gpi))
5523 gphi *phi = gpi.phi ();
5524 bool err2 = false;
5525 unsigned i;
5527 if (gimple_bb (phi) != bb)
5529 error ("gimple_bb (phi) is set to a wrong basic block");
5530 err2 = true;
5533 err2 |= verify_gimple_phi (phi);
5535 /* Only PHI arguments have locations. */
5536 if (gimple_location (phi) != UNKNOWN_LOCATION)
5538 error ("PHI node with location");
5539 err2 = true;
5542 for (i = 0; i < gimple_phi_num_args (phi); i++)
5544 tree arg = gimple_phi_arg_def (phi, i);
5545 tree addr = walk_tree (&arg, verify_node_sharing_1,
5546 &visited, NULL);
5547 if (addr)
5549 error ("incorrect sharing of tree nodes");
5550 debug_generic_expr (addr);
5551 err2 |= true;
5553 location_t loc = gimple_phi_arg_location (phi, i);
5554 if (virtual_operand_p (gimple_phi_result (phi))
5555 && loc != UNKNOWN_LOCATION)
5557 error ("virtual PHI with argument locations");
5558 err2 = true;
5560 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5561 if (addr)
5563 debug_generic_expr (addr);
5564 err2 = true;
5566 err2 |= verify_location (&blocks, loc);
5569 if (err2)
5570 debug_gimple_stmt (phi);
5571 err |= err2;
5574 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5576 gimple *stmt = gsi_stmt (gsi);
5577 bool err2 = false;
5578 struct walk_stmt_info wi;
5579 tree addr;
5580 int lp_nr;
5582 if (gimple_bb (stmt) != bb)
5584 error ("gimple_bb (stmt) is set to a wrong basic block");
5585 err2 = true;
5588 err2 |= verify_gimple_stmt (stmt);
5589 err2 |= verify_location (&blocks, gimple_location (stmt));
5591 memset (&wi, 0, sizeof (wi));
5592 wi.info = (void *) &visited;
5593 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5594 if (addr)
5596 error ("incorrect sharing of tree nodes");
5597 debug_generic_expr (addr);
5598 err2 |= true;
5601 memset (&wi, 0, sizeof (wi));
5602 wi.info = (void *) &blocks;
5603 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5604 if (addr)
5606 debug_generic_expr (addr);
5607 err2 |= true;
5610 /* If the statement is marked as part of an EH region, then it is
5611 expected that the statement could throw. Verify that when we
5612 have optimizations that simplify statements such that we prove
5613 that they cannot throw, that we update other data structures
5614 to match. */
5615 lp_nr = lookup_stmt_eh_lp (stmt);
5616 if (lp_nr != 0)
5617 visited_throwing_stmts.add (stmt);
5618 if (lp_nr > 0)
5620 if (!stmt_could_throw_p (cfun, stmt))
5622 if (verify_nothrow)
5624 error ("statement marked for throw, but doesn%'t");
5625 err2 |= true;
5628 else if (!gsi_one_before_end_p (gsi))
5630 error ("statement marked for throw in middle of block");
5631 err2 |= true;
5635 if (err2)
5636 debug_gimple_stmt (stmt);
5637 err |= err2;
5640 FOR_EACH_EDGE (e, ei, bb->succs)
5641 if (e->goto_locus != UNKNOWN_LOCATION)
5642 err |= verify_location (&blocks, e->goto_locus);
5645 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5646 eh_error_found = false;
5647 if (eh_table)
5648 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5649 (&visited_throwing_stmts);
5651 if (ice && (err || eh_error_found))
5652 internal_error ("verify_gimple failed");
5654 verify_histograms ();
5655 timevar_pop (TV_TREE_STMT_VERIFY);
5657 return (err || eh_error_found);
5661 /* Verifies that the flow information is OK. */
5663 static int
5664 gimple_verify_flow_info (void)
5666 int err = 0;
5667 basic_block bb;
5668 gimple_stmt_iterator gsi;
5669 gimple *stmt;
5670 edge e;
5671 edge_iterator ei;
5673 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5674 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5676 error ("ENTRY_BLOCK has IL associated with it");
5677 err = 1;
5680 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5681 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5683 error ("EXIT_BLOCK has IL associated with it");
5684 err = 1;
5687 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5688 if (e->flags & EDGE_FALLTHRU)
5690 error ("fallthru to exit from bb %d", e->src->index);
5691 err = 1;
5694 FOR_EACH_BB_FN (bb, cfun)
5696 bool found_ctrl_stmt = false;
5698 stmt = NULL;
5700 /* Skip labels on the start of basic block. */
5701 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5703 tree label;
5704 gimple *prev_stmt = stmt;
5706 stmt = gsi_stmt (gsi);
5708 if (gimple_code (stmt) != GIMPLE_LABEL)
5709 break;
5711 label = gimple_label_label (as_a <glabel *> (stmt));
5712 if (prev_stmt && DECL_NONLOCAL (label))
5714 error ("nonlocal label %qD is not first in a sequence "
5715 "of labels in bb %d", label, bb->index);
5716 err = 1;
5719 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5721 error ("EH landing pad label %qD is not first in a sequence "
5722 "of labels in bb %d", label, bb->index);
5723 err = 1;
5726 if (label_to_block (cfun, label) != bb)
5728 error ("label %qD to block does not match in bb %d",
5729 label, bb->index);
5730 err = 1;
5733 if (decl_function_context (label) != current_function_decl)
5735 error ("label %qD has incorrect context in bb %d",
5736 label, bb->index);
5737 err = 1;
5741 /* Verify that body of basic block BB is free of control flow. */
5742 bool seen_nondebug_stmt = false;
5743 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5745 gimple *stmt = gsi_stmt (gsi);
5747 if (found_ctrl_stmt)
5749 error ("control flow in the middle of basic block %d",
5750 bb->index);
5751 err = 1;
5754 if (stmt_ends_bb_p (stmt))
5755 found_ctrl_stmt = true;
5757 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5759 error ("label %qD in the middle of basic block %d",
5760 gimple_label_label (label_stmt), bb->index);
5761 err = 1;
5764 /* Check that no statements appear between a returns_twice call
5765 and its associated abnormal edge. */
5766 if (gimple_code (stmt) == GIMPLE_CALL
5767 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
5769 const char *misplaced = NULL;
5770 /* TM is an exception: it points abnormal edges just after the
5771 call that starts a transaction, i.e. it must end the BB. */
5772 if (gimple_call_builtin_p (stmt, BUILT_IN_TM_START))
5774 if (single_succ_p (bb)
5775 && bb_has_abnormal_pred (single_succ (bb))
5776 && !gsi_one_nondebug_before_end_p (gsi))
5777 misplaced = "not last";
5779 else
5781 if (seen_nondebug_stmt
5782 && bb_has_abnormal_pred (bb))
5783 misplaced = "not first";
5785 if (misplaced)
5787 error ("returns_twice call is %s in basic block %d",
5788 misplaced, bb->index);
5789 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
5790 err = 1;
5793 if (!is_gimple_debug (stmt))
5794 seen_nondebug_stmt = true;
5797 gsi = gsi_last_nondebug_bb (bb);
5798 if (gsi_end_p (gsi))
5799 continue;
5801 stmt = gsi_stmt (gsi);
5803 if (gimple_code (stmt) == GIMPLE_LABEL)
5804 continue;
5806 err |= verify_eh_edges (stmt);
5808 if (is_ctrl_stmt (stmt))
5810 FOR_EACH_EDGE (e, ei, bb->succs)
5811 if (e->flags & EDGE_FALLTHRU)
5813 error ("fallthru edge after a control statement in bb %d",
5814 bb->index);
5815 err = 1;
5819 if (gimple_code (stmt) != GIMPLE_COND)
5821 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5822 after anything else but if statement. */
5823 FOR_EACH_EDGE (e, ei, bb->succs)
5824 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5826 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5827 bb->index);
5828 err = 1;
5832 switch (gimple_code (stmt))
5834 case GIMPLE_COND:
5836 edge true_edge;
5837 edge false_edge;
5839 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5841 if (!true_edge
5842 || !false_edge
5843 || !(true_edge->flags & EDGE_TRUE_VALUE)
5844 || !(false_edge->flags & EDGE_FALSE_VALUE)
5845 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5846 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5847 || EDGE_COUNT (bb->succs) >= 3)
5849 error ("wrong outgoing edge flags at end of bb %d",
5850 bb->index);
5851 err = 1;
5854 break;
5856 case GIMPLE_GOTO:
5857 if (simple_goto_p (stmt))
5859 error ("explicit goto at end of bb %d", bb->index);
5860 err = 1;
5862 else
5864 /* FIXME. We should double check that the labels in the
5865 destination blocks have their address taken. */
5866 FOR_EACH_EDGE (e, ei, bb->succs)
5867 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5868 | EDGE_FALSE_VALUE))
5869 || !(e->flags & EDGE_ABNORMAL))
5871 error ("wrong outgoing edge flags at end of bb %d",
5872 bb->index);
5873 err = 1;
5876 break;
5878 case GIMPLE_CALL:
5879 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5880 break;
5881 /* fallthru */
5882 case GIMPLE_RETURN:
5883 if (!single_succ_p (bb)
5884 || (single_succ_edge (bb)->flags
5885 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5886 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5888 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5889 err = 1;
5891 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5893 error ("return edge does not point to exit in bb %d",
5894 bb->index);
5895 err = 1;
5897 break;
5899 case GIMPLE_SWITCH:
5901 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5902 tree prev;
5903 edge e;
5904 size_t i, n;
5906 n = gimple_switch_num_labels (switch_stmt);
5908 /* Mark all the destination basic blocks. */
5909 for (i = 0; i < n; ++i)
5911 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5912 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5913 label_bb->aux = (void *)1;
5916 /* Verify that the case labels are sorted. */
5917 prev = gimple_switch_label (switch_stmt, 0);
5918 for (i = 1; i < n; ++i)
5920 tree c = gimple_switch_label (switch_stmt, i);
5921 if (!CASE_LOW (c))
5923 error ("found default case not at the start of "
5924 "case vector");
5925 err = 1;
5926 continue;
5928 if (CASE_LOW (prev)
5929 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5931 error ("case labels not sorted: ");
5932 print_generic_expr (stderr, prev);
5933 fprintf (stderr," is greater than ");
5934 print_generic_expr (stderr, c);
5935 fprintf (stderr," but comes before it.\n");
5936 err = 1;
5938 prev = c;
5940 /* VRP will remove the default case if it can prove it will
5941 never be executed. So do not verify there always exists
5942 a default case here. */
5944 FOR_EACH_EDGE (e, ei, bb->succs)
5946 if (!e->dest->aux)
5948 error ("extra outgoing edge %d->%d",
5949 bb->index, e->dest->index);
5950 err = 1;
5953 e->dest->aux = (void *)2;
5954 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5955 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5957 error ("wrong outgoing edge flags at end of bb %d",
5958 bb->index);
5959 err = 1;
5963 /* Check that we have all of them. */
5964 for (i = 0; i < n; ++i)
5966 basic_block label_bb = gimple_switch_label_bb (cfun,
5967 switch_stmt, i);
5969 if (label_bb->aux != (void *)2)
5971 error ("missing edge %i->%i", bb->index, label_bb->index);
5972 err = 1;
5976 FOR_EACH_EDGE (e, ei, bb->succs)
5977 e->dest->aux = (void *)0;
5979 break;
5981 case GIMPLE_EH_DISPATCH:
5982 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5983 break;
5985 default:
5986 break;
5990 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5991 verify_dominators (CDI_DOMINATORS);
5993 return err;
5996 #if __GNUC__ >= 10
5997 # pragma GCC diagnostic pop
5998 #endif
6000 /* Updates phi nodes after creating a forwarder block joined
6001 by edge FALLTHRU. */
6003 static void
6004 gimple_make_forwarder_block (edge fallthru)
6006 edge e;
6007 edge_iterator ei;
6008 basic_block dummy, bb;
6009 tree var;
6010 gphi_iterator gsi;
6011 bool forward_location_p;
6013 dummy = fallthru->src;
6014 bb = fallthru->dest;
6016 if (single_pred_p (bb))
6017 return;
6019 /* We can forward location info if we have only one predecessor. */
6020 forward_location_p = single_pred_p (dummy);
6022 /* If we redirected a branch we must create new PHI nodes at the
6023 start of BB. */
6024 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6026 gphi *phi, *new_phi;
6028 phi = gsi.phi ();
6029 var = gimple_phi_result (phi);
6030 new_phi = create_phi_node (var, bb);
6031 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
6032 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
6033 forward_location_p
6034 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
6037 /* Add the arguments we have stored on edges. */
6038 FOR_EACH_EDGE (e, ei, bb->preds)
6040 if (e == fallthru)
6041 continue;
6043 flush_pending_stmts (e);
6048 /* Return a non-special label in the head of basic block BLOCK.
6049 Create one if it doesn't exist. */
6051 tree
6052 gimple_block_label (basic_block bb)
6054 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6055 bool first = true;
6056 tree label;
6057 glabel *stmt;
6059 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6061 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6062 if (!stmt)
6063 break;
6064 label = gimple_label_label (stmt);
6065 if (!DECL_NONLOCAL (label))
6067 if (!first)
6068 gsi_move_before (&i, &s);
6069 return label;
6073 label = create_artificial_label (UNKNOWN_LOCATION);
6074 stmt = gimple_build_label (label);
6075 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6076 return label;
6080 /* Attempt to perform edge redirection by replacing a possibly complex
6081 jump instruction by a goto or by removing the jump completely.
6082 This can apply only if all edges now point to the same block. The
6083 parameters and return values are equivalent to
6084 redirect_edge_and_branch. */
6086 static edge
6087 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6089 basic_block src = e->src;
6090 gimple_stmt_iterator i;
6091 gimple *stmt;
6093 /* We can replace or remove a complex jump only when we have exactly
6094 two edges. */
6095 if (EDGE_COUNT (src->succs) != 2
6096 /* Verify that all targets will be TARGET. Specifically, the
6097 edge that is not E must also go to TARGET. */
6098 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6099 return NULL;
6101 i = gsi_last_bb (src);
6102 if (gsi_end_p (i))
6103 return NULL;
6105 stmt = gsi_stmt (i);
6107 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6109 gsi_remove (&i, true);
6110 e = ssa_redirect_edge (e, target);
6111 e->flags = EDGE_FALLTHRU;
6112 return e;
6115 return NULL;
6119 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6120 edge representing the redirected branch. */
6122 static edge
6123 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6125 basic_block bb = e->src;
6126 gimple_stmt_iterator gsi;
6127 edge ret;
6128 gimple *stmt;
6130 if (e->flags & EDGE_ABNORMAL)
6131 return NULL;
6133 if (e->dest == dest)
6134 return NULL;
6136 if (e->flags & EDGE_EH)
6137 return redirect_eh_edge (e, dest);
6139 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6141 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6142 if (ret)
6143 return ret;
6146 gsi = gsi_last_nondebug_bb (bb);
6147 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6149 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6151 case GIMPLE_COND:
6152 /* For COND_EXPR, we only need to redirect the edge. */
6153 break;
6155 case GIMPLE_GOTO:
6156 /* No non-abnormal edges should lead from a non-simple goto, and
6157 simple ones should be represented implicitly. */
6158 gcc_unreachable ();
6160 case GIMPLE_SWITCH:
6162 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6163 tree label = gimple_block_label (dest);
6164 tree cases = get_cases_for_edge (e, switch_stmt);
6166 /* If we have a list of cases associated with E, then use it
6167 as it's a lot faster than walking the entire case vector. */
6168 if (cases)
6170 edge e2 = find_edge (e->src, dest);
6171 tree last, first;
6173 first = cases;
6174 while (cases)
6176 last = cases;
6177 CASE_LABEL (cases) = label;
6178 cases = CASE_CHAIN (cases);
6181 /* If there was already an edge in the CFG, then we need
6182 to move all the cases associated with E to E2. */
6183 if (e2)
6185 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6187 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6188 CASE_CHAIN (cases2) = first;
6190 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6192 else
6194 size_t i, n = gimple_switch_num_labels (switch_stmt);
6196 for (i = 0; i < n; i++)
6198 tree elt = gimple_switch_label (switch_stmt, i);
6199 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6200 CASE_LABEL (elt) = label;
6204 break;
6206 case GIMPLE_ASM:
6208 gasm *asm_stmt = as_a <gasm *> (stmt);
6209 int i, n = gimple_asm_nlabels (asm_stmt);
6210 tree label = NULL;
6212 for (i = 0; i < n; ++i)
6214 tree cons = gimple_asm_label_op (asm_stmt, i);
6215 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6217 if (!label)
6218 label = gimple_block_label (dest);
6219 TREE_VALUE (cons) = label;
6223 /* If we didn't find any label matching the former edge in the
6224 asm labels, we must be redirecting the fallthrough
6225 edge. */
6226 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6228 break;
6230 case GIMPLE_RETURN:
6231 gsi_remove (&gsi, true);
6232 e->flags |= EDGE_FALLTHRU;
6233 break;
6235 case GIMPLE_OMP_RETURN:
6236 case GIMPLE_OMP_CONTINUE:
6237 case GIMPLE_OMP_SECTIONS_SWITCH:
6238 case GIMPLE_OMP_FOR:
6239 /* The edges from OMP constructs can be simply redirected. */
6240 break;
6242 case GIMPLE_EH_DISPATCH:
6243 if (!(e->flags & EDGE_FALLTHRU))
6244 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6245 break;
6247 case GIMPLE_TRANSACTION:
6248 if (e->flags & EDGE_TM_ABORT)
6249 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6250 gimple_block_label (dest));
6251 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6252 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6253 gimple_block_label (dest));
6254 else
6255 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6256 gimple_block_label (dest));
6257 break;
6259 default:
6260 /* Otherwise it must be a fallthru edge, and we don't need to
6261 do anything besides redirecting it. */
6262 gcc_assert (e->flags & EDGE_FALLTHRU);
6263 break;
6266 /* Update/insert PHI nodes as necessary. */
6268 /* Now update the edges in the CFG. */
6269 e = ssa_redirect_edge (e, dest);
6271 return e;
6274 /* Returns true if it is possible to remove edge E by redirecting
6275 it to the destination of the other edge from E->src. */
6277 static bool
6278 gimple_can_remove_branch_p (const_edge e)
6280 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6281 return false;
6283 return true;
6286 /* Simple wrapper, as we can always redirect fallthru edges. */
6288 static basic_block
6289 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6291 e = gimple_redirect_edge_and_branch (e, dest);
6292 gcc_assert (e);
6294 return NULL;
6298 /* Splits basic block BB after statement STMT (but at least after the
6299 labels). If STMT is NULL, BB is split just after the labels. */
6301 static basic_block
6302 gimple_split_block (basic_block bb, void *stmt)
6304 gimple_stmt_iterator gsi;
6305 gimple_stmt_iterator gsi_tgt;
6306 gimple_seq list;
6307 basic_block new_bb;
6308 edge e;
6309 edge_iterator ei;
6311 new_bb = create_empty_bb (bb);
6313 /* Redirect the outgoing edges. */
6314 new_bb->succs = bb->succs;
6315 bb->succs = NULL;
6316 FOR_EACH_EDGE (e, ei, new_bb->succs)
6317 e->src = new_bb;
6319 /* Get a stmt iterator pointing to the first stmt to move. */
6320 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6321 gsi = gsi_after_labels (bb);
6322 else
6324 gsi = gsi_for_stmt ((gimple *) stmt);
6325 gsi_next (&gsi);
6328 /* Move everything from GSI to the new basic block. */
6329 if (gsi_end_p (gsi))
6330 return new_bb;
6332 /* Split the statement list - avoid re-creating new containers as this
6333 brings ugly quadratic memory consumption in the inliner.
6334 (We are still quadratic since we need to update stmt BB pointers,
6335 sadly.) */
6336 gsi_split_seq_before (&gsi, &list);
6337 set_bb_seq (new_bb, list);
6338 for (gsi_tgt = gsi_start (list);
6339 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6340 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6342 return new_bb;
6346 /* Moves basic block BB after block AFTER. */
6348 static bool
6349 gimple_move_block_after (basic_block bb, basic_block after)
6351 if (bb->prev_bb == after)
6352 return true;
6354 unlink_block (bb);
6355 link_block (bb, after);
6357 return true;
6361 /* Return TRUE if block BB has no executable statements, otherwise return
6362 FALSE. */
6364 static bool
6365 gimple_empty_block_p (basic_block bb)
6367 /* BB must have no executable statements. */
6368 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6369 if (phi_nodes (bb))
6370 return false;
6371 while (!gsi_end_p (gsi))
6373 gimple *stmt = gsi_stmt (gsi);
6374 if (is_gimple_debug (stmt))
6376 else if (gimple_code (stmt) == GIMPLE_NOP
6377 || gimple_code (stmt) == GIMPLE_PREDICT)
6379 else
6380 return false;
6381 gsi_next (&gsi);
6383 return true;
6387 /* Split a basic block if it ends with a conditional branch and if the
6388 other part of the block is not empty. */
6390 static basic_block
6391 gimple_split_block_before_cond_jump (basic_block bb)
6393 gimple *last, *split_point;
6394 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6395 if (gsi_end_p (gsi))
6396 return NULL;
6397 last = gsi_stmt (gsi);
6398 if (gimple_code (last) != GIMPLE_COND
6399 && gimple_code (last) != GIMPLE_SWITCH)
6400 return NULL;
6401 gsi_prev (&gsi);
6402 split_point = gsi_stmt (gsi);
6403 return split_block (bb, split_point)->dest;
6407 /* Return true if basic_block can be duplicated. */
6409 static bool
6410 gimple_can_duplicate_bb_p (const_basic_block bb)
6412 gimple *last = last_nondebug_stmt (CONST_CAST_BB (bb));
6414 /* Do checks that can only fail for the last stmt, to minimize the work in the
6415 stmt loop. */
6416 if (last) {
6417 /* A transaction is a single entry multiple exit region. It
6418 must be duplicated in its entirety or not at all. */
6419 if (gimple_code (last) == GIMPLE_TRANSACTION)
6420 return false;
6422 /* An IFN_UNIQUE call must be duplicated as part of its group,
6423 or not at all. */
6424 if (is_gimple_call (last)
6425 && gimple_call_internal_p (last)
6426 && gimple_call_internal_unique_p (last))
6427 return false;
6430 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6431 !gsi_end_p (gsi); gsi_next (&gsi))
6433 gimple *g = gsi_stmt (gsi);
6435 /* Prohibit duplication of returns_twice calls, otherwise associated
6436 abnormal edges also need to be duplicated properly.
6437 An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6438 duplicated as part of its group, or not at all.
6439 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6440 group, so the same holds there. */
6441 if (is_gimple_call (g)
6442 && (gimple_call_flags (g) & ECF_RETURNS_TWICE
6443 || gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6444 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6445 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6446 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6447 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6448 return false;
6451 return true;
6454 /* Create a duplicate of the basic block BB. NOTE: This does not
6455 preserve SSA form. */
6457 static basic_block
6458 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6460 basic_block new_bb;
6461 gimple_stmt_iterator gsi_tgt;
6463 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6465 /* Copy the PHI nodes. We ignore PHI node arguments here because
6466 the incoming edges have not been setup yet. */
6467 for (gphi_iterator gpi = gsi_start_phis (bb);
6468 !gsi_end_p (gpi);
6469 gsi_next (&gpi))
6471 gphi *phi, *copy;
6472 phi = gpi.phi ();
6473 copy = create_phi_node (NULL_TREE, new_bb);
6474 create_new_def_for (gimple_phi_result (phi), copy,
6475 gimple_phi_result_ptr (copy));
6476 gimple_set_uid (copy, gimple_uid (phi));
6479 gsi_tgt = gsi_start_bb (new_bb);
6480 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6481 !gsi_end_p (gsi);
6482 gsi_next (&gsi))
6484 def_operand_p def_p;
6485 ssa_op_iter op_iter;
6486 tree lhs;
6487 gimple *stmt, *copy;
6489 stmt = gsi_stmt (gsi);
6490 if (gimple_code (stmt) == GIMPLE_LABEL)
6491 continue;
6493 /* Don't duplicate label debug stmts. */
6494 if (gimple_debug_bind_p (stmt)
6495 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6496 == LABEL_DECL)
6497 continue;
6499 /* Create a new copy of STMT and duplicate STMT's virtual
6500 operands. */
6501 copy = gimple_copy (stmt);
6502 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6504 maybe_duplicate_eh_stmt (copy, stmt);
6505 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6507 /* When copying around a stmt writing into a local non-user
6508 aggregate, make sure it won't share stack slot with other
6509 vars. */
6510 lhs = gimple_get_lhs (stmt);
6511 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6513 tree base = get_base_address (lhs);
6514 if (base
6515 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6516 && DECL_IGNORED_P (base)
6517 && !TREE_STATIC (base)
6518 && !DECL_EXTERNAL (base)
6519 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6520 DECL_NONSHAREABLE (base) = 1;
6523 /* If requested remap dependence info of cliques brought in
6524 via inlining. */
6525 if (id)
6526 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6528 tree op = gimple_op (copy, i);
6529 if (!op)
6530 continue;
6531 if (TREE_CODE (op) == ADDR_EXPR
6532 || TREE_CODE (op) == WITH_SIZE_EXPR)
6533 op = TREE_OPERAND (op, 0);
6534 while (handled_component_p (op))
6535 op = TREE_OPERAND (op, 0);
6536 if ((TREE_CODE (op) == MEM_REF
6537 || TREE_CODE (op) == TARGET_MEM_REF)
6538 && MR_DEPENDENCE_CLIQUE (op) > 1
6539 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6541 if (!id->dependence_map)
6542 id->dependence_map = new hash_map<dependence_hash,
6543 unsigned short>;
6544 bool existed;
6545 unsigned short &newc = id->dependence_map->get_or_insert
6546 (MR_DEPENDENCE_CLIQUE (op), &existed);
6547 if (!existed)
6549 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6550 newc = ++cfun->last_clique;
6552 MR_DEPENDENCE_CLIQUE (op) = newc;
6556 /* Create new names for all the definitions created by COPY and
6557 add replacement mappings for each new name. */
6558 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6559 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6562 return new_bb;
6565 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6567 static void
6568 add_phi_args_after_copy_edge (edge e_copy)
6570 basic_block bb, bb_copy = e_copy->src, dest;
6571 edge e;
6572 edge_iterator ei;
6573 gphi *phi, *phi_copy;
6574 tree def;
6575 gphi_iterator psi, psi_copy;
6577 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6578 return;
6580 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6582 if (e_copy->dest->flags & BB_DUPLICATED)
6583 dest = get_bb_original (e_copy->dest);
6584 else
6585 dest = e_copy->dest;
6587 e = find_edge (bb, dest);
6588 if (!e)
6590 /* During loop unrolling the target of the latch edge is copied.
6591 In this case we are not looking for edge to dest, but to
6592 duplicated block whose original was dest. */
6593 FOR_EACH_EDGE (e, ei, bb->succs)
6595 if ((e->dest->flags & BB_DUPLICATED)
6596 && get_bb_original (e->dest) == dest)
6597 break;
6600 gcc_assert (e != NULL);
6603 for (psi = gsi_start_phis (e->dest),
6604 psi_copy = gsi_start_phis (e_copy->dest);
6605 !gsi_end_p (psi);
6606 gsi_next (&psi), gsi_next (&psi_copy))
6608 phi = psi.phi ();
6609 phi_copy = psi_copy.phi ();
6610 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6611 add_phi_arg (phi_copy, def, e_copy,
6612 gimple_phi_arg_location_from_edge (phi, e));
6617 /* Basic block BB_COPY was created by code duplication. Add phi node
6618 arguments for edges going out of BB_COPY. The blocks that were
6619 duplicated have BB_DUPLICATED set. */
6621 void
6622 add_phi_args_after_copy_bb (basic_block bb_copy)
6624 edge e_copy;
6625 edge_iterator ei;
6627 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6629 add_phi_args_after_copy_edge (e_copy);
6633 /* Blocks in REGION_COPY array of length N_REGION were created by
6634 duplication of basic blocks. Add phi node arguments for edges
6635 going from these blocks. If E_COPY is not NULL, also add
6636 phi node arguments for its destination.*/
6638 void
6639 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6640 edge e_copy)
6642 unsigned i;
6644 for (i = 0; i < n_region; i++)
6645 region_copy[i]->flags |= BB_DUPLICATED;
6647 for (i = 0; i < n_region; i++)
6648 add_phi_args_after_copy_bb (region_copy[i]);
6649 if (e_copy)
6650 add_phi_args_after_copy_edge (e_copy);
6652 for (i = 0; i < n_region; i++)
6653 region_copy[i]->flags &= ~BB_DUPLICATED;
6656 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6657 important exit edge EXIT. By important we mean that no SSA name defined
6658 inside region is live over the other exit edges of the region. All entry
6659 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6660 to the duplicate of the region. Dominance and loop information is
6661 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6662 UPDATE_DOMINANCE is false then we assume that the caller will update the
6663 dominance information after calling this function. The new basic
6664 blocks are stored to REGION_COPY in the same order as they had in REGION,
6665 provided that REGION_COPY is not NULL.
6666 The function returns false if it is unable to copy the region,
6667 true otherwise. */
6669 bool
6670 gimple_duplicate_sese_region (edge entry, edge exit,
6671 basic_block *region, unsigned n_region,
6672 basic_block *region_copy,
6673 bool update_dominance)
6675 unsigned i;
6676 bool free_region_copy = false, copying_header = false;
6677 class loop *loop = entry->dest->loop_father;
6678 edge exit_copy;
6679 edge redirected;
6680 profile_count total_count = profile_count::uninitialized ();
6681 profile_count entry_count = profile_count::uninitialized ();
6683 if (!can_copy_bbs_p (region, n_region))
6684 return false;
6686 /* Some sanity checking. Note that we do not check for all possible
6687 missuses of the functions. I.e. if you ask to copy something weird,
6688 it will work, but the state of structures probably will not be
6689 correct. */
6690 for (i = 0; i < n_region; i++)
6692 /* We do not handle subloops, i.e. all the blocks must belong to the
6693 same loop. */
6694 if (region[i]->loop_father != loop)
6695 return false;
6697 if (region[i] != entry->dest
6698 && region[i] == loop->header)
6699 return false;
6702 /* In case the function is used for loop header copying (which is the primary
6703 use), ensure that EXIT and its copy will be new latch and entry edges. */
6704 if (loop->header == entry->dest)
6706 copying_header = true;
6708 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6709 return false;
6711 for (i = 0; i < n_region; i++)
6712 if (region[i] != exit->src
6713 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6714 return false;
6717 initialize_original_copy_tables ();
6719 if (copying_header)
6720 set_loop_copy (loop, loop_outer (loop));
6721 else
6722 set_loop_copy (loop, loop);
6724 if (!region_copy)
6726 region_copy = XNEWVEC (basic_block, n_region);
6727 free_region_copy = true;
6730 /* Record blocks outside the region that are dominated by something
6731 inside. */
6732 auto_vec<basic_block> doms;
6733 if (update_dominance)
6735 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6738 if (entry->dest->count.initialized_p ())
6740 total_count = entry->dest->count;
6741 entry_count = entry->count ();
6742 /* Fix up corner cases, to avoid division by zero or creation of negative
6743 frequencies. */
6744 if (entry_count > total_count)
6745 entry_count = total_count;
6748 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6749 split_edge_bb_loc (entry), update_dominance);
6750 if (total_count.initialized_p () && entry_count.initialized_p ())
6752 scale_bbs_frequencies_profile_count (region, n_region,
6753 total_count - entry_count,
6754 total_count);
6755 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6756 total_count);
6759 if (copying_header)
6761 loop->header = exit->dest;
6762 loop->latch = exit->src;
6765 /* Redirect the entry and add the phi node arguments. */
6766 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6767 gcc_assert (redirected != NULL);
6768 flush_pending_stmts (entry);
6770 /* Concerning updating of dominators: We must recount dominators
6771 for entry block and its copy. Anything that is outside of the
6772 region, but was dominated by something inside needs recounting as
6773 well. */
6774 if (update_dominance)
6776 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6777 doms.safe_push (get_bb_original (entry->dest));
6778 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6781 /* Add the other PHI node arguments. */
6782 add_phi_args_after_copy (region_copy, n_region, NULL);
6784 if (free_region_copy)
6785 free (region_copy);
6787 free_original_copy_tables ();
6788 return true;
6791 /* Checks if BB is part of the region defined by N_REGION BBS. */
6792 static bool
6793 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6795 unsigned int n;
6797 for (n = 0; n < n_region; n++)
6799 if (bb == bbs[n])
6800 return true;
6802 return false;
6806 /* For each PHI in BB, copy the argument associated with SRC_E to TGT_E.
6807 Assuming the argument exists, just does not have a value. */
6809 void
6810 copy_phi_arg_into_existing_phi (edge src_e, edge tgt_e)
6812 int src_idx = src_e->dest_idx;
6813 int tgt_idx = tgt_e->dest_idx;
6815 /* Iterate over each PHI in e->dest. */
6816 for (gphi_iterator gsi = gsi_start_phis (src_e->dest),
6817 gsi2 = gsi_start_phis (tgt_e->dest);
6818 !gsi_end_p (gsi);
6819 gsi_next (&gsi), gsi_next (&gsi2))
6821 gphi *src_phi = gsi.phi ();
6822 gphi *dest_phi = gsi2.phi ();
6823 tree val = gimple_phi_arg_def (src_phi, src_idx);
6824 location_t locus = gimple_phi_arg_location (src_phi, src_idx);
6826 SET_PHI_ARG_DEF (dest_phi, tgt_idx, val);
6827 gimple_phi_arg_set_location (dest_phi, tgt_idx, locus);
6831 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6832 are stored to REGION_COPY in the same order in that they appear
6833 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6834 the region, EXIT an exit from it. The condition guarding EXIT
6835 is moved to ENTRY. Returns true if duplication succeeds, false
6836 otherwise.
6838 For example,
6840 some_code;
6841 if (cond)
6843 else
6846 is transformed to
6848 if (cond)
6850 some_code;
6853 else
6855 some_code;
6860 bool
6861 gimple_duplicate_sese_tail (edge entry, edge exit,
6862 basic_block *region, unsigned n_region,
6863 basic_block *region_copy)
6865 unsigned i;
6866 bool free_region_copy = false;
6867 class loop *loop = exit->dest->loop_father;
6868 class loop *orig_loop = entry->dest->loop_father;
6869 basic_block switch_bb, entry_bb, nentry_bb;
6870 profile_count total_count = profile_count::uninitialized (),
6871 exit_count = profile_count::uninitialized ();
6872 edge exits[2], nexits[2], e;
6873 gimple_stmt_iterator gsi;
6874 edge sorig, snew;
6875 basic_block exit_bb;
6876 class loop *target, *aloop, *cloop;
6878 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6879 exits[0] = exit;
6880 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6882 if (!can_copy_bbs_p (region, n_region))
6883 return false;
6885 initialize_original_copy_tables ();
6886 set_loop_copy (orig_loop, loop);
6888 target= loop;
6889 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6891 if (bb_part_of_region_p (aloop->header, region, n_region))
6893 cloop = duplicate_loop (aloop, target);
6894 duplicate_subloops (aloop, cloop);
6898 if (!region_copy)
6900 region_copy = XNEWVEC (basic_block, n_region);
6901 free_region_copy = true;
6904 gcc_assert (!need_ssa_update_p (cfun));
6906 /* Record blocks outside the region that are dominated by something
6907 inside. */
6908 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6909 n_region);
6911 total_count = exit->src->count;
6912 exit_count = exit->count ();
6913 /* Fix up corner cases, to avoid division by zero or creation of negative
6914 frequencies. */
6915 if (exit_count > total_count)
6916 exit_count = total_count;
6918 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6919 split_edge_bb_loc (exit), true);
6920 if (total_count.initialized_p () && exit_count.initialized_p ())
6922 scale_bbs_frequencies_profile_count (region, n_region,
6923 total_count - exit_count,
6924 total_count);
6925 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6926 total_count);
6929 /* Create the switch block, and put the exit condition to it. */
6930 entry_bb = entry->dest;
6931 nentry_bb = get_bb_copy (entry_bb);
6932 if (!*gsi_last_bb (entry->src)
6933 || !stmt_ends_bb_p (*gsi_last_bb (entry->src)))
6934 switch_bb = entry->src;
6935 else
6936 switch_bb = split_edge (entry);
6937 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6939 gcond *cond_stmt = as_a <gcond *> (*gsi_last_bb (exit->src));
6940 cond_stmt = as_a <gcond *> (gimple_copy (cond_stmt));
6942 gsi = gsi_last_bb (switch_bb);
6943 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6945 sorig = single_succ_edge (switch_bb);
6946 sorig->flags = exits[1]->flags;
6947 sorig->probability = exits[1]->probability;
6948 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6949 snew->probability = exits[0]->probability;
6952 /* Register the new edge from SWITCH_BB in loop exit lists. */
6953 rescan_loop_exit (snew, true, false);
6955 /* Add the PHI node arguments. */
6956 add_phi_args_after_copy (region_copy, n_region, snew);
6958 /* Get rid of now superfluous conditions and associated edges (and phi node
6959 arguments). */
6960 exit_bb = exit->dest;
6962 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6963 PENDING_STMT (e) = NULL;
6965 /* The latch of ORIG_LOOP was copied, and so was the backedge
6966 to the original header. We redirect this backedge to EXIT_BB. */
6967 for (i = 0; i < n_region; i++)
6968 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6970 gcc_assert (single_succ_edge (region_copy[i]));
6971 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6972 PENDING_STMT (e) = NULL;
6973 copy_phi_arg_into_existing_phi (nexits[0], e);
6975 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6976 PENDING_STMT (e) = NULL;
6978 /* Anything that is outside of the region, but was dominated by something
6979 inside needs to update dominance info. */
6980 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6982 if (free_region_copy)
6983 free (region_copy);
6985 free_original_copy_tables ();
6986 return true;
6989 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6990 adding blocks when the dominator traversal reaches EXIT. This
6991 function silently assumes that ENTRY strictly dominates EXIT. */
6993 void
6994 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6995 vec<basic_block> *bbs_p)
6997 basic_block son;
6999 for (son = first_dom_son (CDI_DOMINATORS, entry);
7000 son;
7001 son = next_dom_son (CDI_DOMINATORS, son))
7003 bbs_p->safe_push (son);
7004 if (son != exit)
7005 gather_blocks_in_sese_region (son, exit, bbs_p);
7009 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
7010 The duplicates are recorded in VARS_MAP. */
7012 static void
7013 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
7014 tree to_context)
7016 tree t = *tp, new_t;
7017 struct function *f = DECL_STRUCT_FUNCTION (to_context);
7019 if (DECL_CONTEXT (t) == to_context)
7020 return;
7022 bool existed;
7023 tree &loc = vars_map->get_or_insert (t, &existed);
7025 if (!existed)
7027 if (SSA_VAR_P (t))
7029 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
7030 add_local_decl (f, new_t);
7032 else
7034 gcc_assert (TREE_CODE (t) == CONST_DECL);
7035 new_t = copy_node (t);
7037 DECL_CONTEXT (new_t) = to_context;
7039 loc = new_t;
7041 else
7042 new_t = loc;
7044 *tp = new_t;
7048 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7049 VARS_MAP maps old ssa names and var_decls to the new ones. */
7051 static tree
7052 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
7053 tree to_context)
7055 tree new_name;
7057 gcc_assert (!virtual_operand_p (name));
7059 tree *loc = vars_map->get (name);
7061 if (!loc)
7063 tree decl = SSA_NAME_VAR (name);
7064 if (decl)
7066 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
7067 replace_by_duplicate_decl (&decl, vars_map, to_context);
7068 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7069 decl, SSA_NAME_DEF_STMT (name));
7071 else
7072 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7073 name, SSA_NAME_DEF_STMT (name));
7075 /* Now that we've used the def stmt to define new_name, make sure it
7076 doesn't define name anymore. */
7077 SSA_NAME_DEF_STMT (name) = NULL;
7079 vars_map->put (name, new_name);
7081 else
7082 new_name = *loc;
7084 return new_name;
7087 struct move_stmt_d
7089 tree orig_block;
7090 tree new_block;
7091 tree from_context;
7092 tree to_context;
7093 hash_map<tree, tree> *vars_map;
7094 htab_t new_label_map;
7095 hash_map<void *, void *> *eh_map;
7096 bool remap_decls_p;
7099 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7100 contained in *TP if it has been ORIG_BLOCK previously and change the
7101 DECL_CONTEXT of every local variable referenced in *TP. */
7103 static tree
7104 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
7106 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7107 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7108 tree t = *tp;
7110 if (EXPR_P (t))
7112 tree block = TREE_BLOCK (t);
7113 if (block == NULL_TREE)
7115 else if (block == p->orig_block
7116 || p->orig_block == NULL_TREE)
7118 /* tree_node_can_be_shared says we can share invariant
7119 addresses but unshare_expr copies them anyways. Make sure
7120 to unshare before adjusting the block in place - we do not
7121 always see a copy here. */
7122 if (TREE_CODE (t) == ADDR_EXPR
7123 && is_gimple_min_invariant (t))
7124 *tp = t = unshare_expr (t);
7125 TREE_SET_BLOCK (t, p->new_block);
7127 else if (flag_checking)
7129 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7130 block = BLOCK_SUPERCONTEXT (block);
7131 gcc_assert (block == p->orig_block);
7134 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7136 if (TREE_CODE (t) == SSA_NAME)
7137 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7138 else if (TREE_CODE (t) == PARM_DECL
7139 && gimple_in_ssa_p (cfun))
7140 *tp = *(p->vars_map->get (t));
7141 else if (TREE_CODE (t) == LABEL_DECL)
7143 if (p->new_label_map)
7145 struct tree_map in, *out;
7146 in.base.from = t;
7147 out = (struct tree_map *)
7148 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7149 if (out)
7150 *tp = t = out->to;
7153 /* For FORCED_LABELs we can end up with references from other
7154 functions if some SESE regions are outlined. It is UB to
7155 jump in between them, but they could be used just for printing
7156 addresses etc. In that case, DECL_CONTEXT on the label should
7157 be the function containing the glabel stmt with that LABEL_DECL,
7158 rather than whatever function a reference to the label was seen
7159 last time. */
7160 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7161 DECL_CONTEXT (t) = p->to_context;
7163 else if (p->remap_decls_p)
7165 /* Replace T with its duplicate. T should no longer appear in the
7166 parent function, so this looks wasteful; however, it may appear
7167 in referenced_vars, and more importantly, as virtual operands of
7168 statements, and in alias lists of other variables. It would be
7169 quite difficult to expunge it from all those places. ??? It might
7170 suffice to do this for addressable variables. */
7171 if ((VAR_P (t) && !is_global_var (t))
7172 || TREE_CODE (t) == CONST_DECL)
7173 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7175 *walk_subtrees = 0;
7177 else if (TYPE_P (t))
7178 *walk_subtrees = 0;
7180 return NULL_TREE;
7183 /* Helper for move_stmt_r. Given an EH region number for the source
7184 function, map that to the duplicate EH regio number in the dest. */
7186 static int
7187 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7189 eh_region old_r, new_r;
7191 old_r = get_eh_region_from_number (old_nr);
7192 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7194 return new_r->index;
7197 /* Similar, but operate on INTEGER_CSTs. */
7199 static tree
7200 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7202 int old_nr, new_nr;
7204 old_nr = tree_to_shwi (old_t_nr);
7205 new_nr = move_stmt_eh_region_nr (old_nr, p);
7207 return build_int_cst (integer_type_node, new_nr);
7210 /* Like move_stmt_op, but for gimple statements.
7212 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7213 contained in the current statement in *GSI_P and change the
7214 DECL_CONTEXT of every local variable referenced in the current
7215 statement. */
7217 static tree
7218 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7219 struct walk_stmt_info *wi)
7221 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7222 gimple *stmt = gsi_stmt (*gsi_p);
7223 tree block = gimple_block (stmt);
7225 if (block == p->orig_block
7226 || (p->orig_block == NULL_TREE
7227 && block != NULL_TREE))
7228 gimple_set_block (stmt, p->new_block);
7230 switch (gimple_code (stmt))
7232 case GIMPLE_CALL:
7233 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7235 tree r, fndecl = gimple_call_fndecl (stmt);
7236 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7237 switch (DECL_FUNCTION_CODE (fndecl))
7239 case BUILT_IN_EH_COPY_VALUES:
7240 r = gimple_call_arg (stmt, 1);
7241 r = move_stmt_eh_region_tree_nr (r, p);
7242 gimple_call_set_arg (stmt, 1, r);
7243 /* FALLTHRU */
7245 case BUILT_IN_EH_POINTER:
7246 case BUILT_IN_EH_FILTER:
7247 r = gimple_call_arg (stmt, 0);
7248 r = move_stmt_eh_region_tree_nr (r, p);
7249 gimple_call_set_arg (stmt, 0, r);
7250 break;
7252 default:
7253 break;
7256 break;
7258 case GIMPLE_RESX:
7260 gresx *resx_stmt = as_a <gresx *> (stmt);
7261 int r = gimple_resx_region (resx_stmt);
7262 r = move_stmt_eh_region_nr (r, p);
7263 gimple_resx_set_region (resx_stmt, r);
7265 break;
7267 case GIMPLE_EH_DISPATCH:
7269 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7270 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7271 r = move_stmt_eh_region_nr (r, p);
7272 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7274 break;
7276 case GIMPLE_OMP_RETURN:
7277 case GIMPLE_OMP_CONTINUE:
7278 break;
7280 case GIMPLE_LABEL:
7282 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7283 so that such labels can be referenced from other regions.
7284 Make sure to update it when seeing a GIMPLE_LABEL though,
7285 that is the owner of the label. */
7286 walk_gimple_op (stmt, move_stmt_op, wi);
7287 *handled_ops_p = true;
7288 tree label = gimple_label_label (as_a <glabel *> (stmt));
7289 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7290 DECL_CONTEXT (label) = p->to_context;
7292 break;
7294 default:
7295 if (is_gimple_omp (stmt))
7297 /* Do not remap variables inside OMP directives. Variables
7298 referenced in clauses and directive header belong to the
7299 parent function and should not be moved into the child
7300 function. */
7301 bool save_remap_decls_p = p->remap_decls_p;
7302 p->remap_decls_p = false;
7303 *handled_ops_p = true;
7305 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7306 move_stmt_op, wi);
7308 p->remap_decls_p = save_remap_decls_p;
7310 break;
7313 return NULL_TREE;
7316 /* Move basic block BB from function CFUN to function DEST_FN. The
7317 block is moved out of the original linked list and placed after
7318 block AFTER in the new list. Also, the block is removed from the
7319 original array of blocks and placed in DEST_FN's array of blocks.
7320 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7321 updated to reflect the moved edges.
7323 The local variables are remapped to new instances, VARS_MAP is used
7324 to record the mapping. */
7326 static void
7327 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7328 basic_block after, bool update_edge_count_p,
7329 struct move_stmt_d *d)
7331 struct control_flow_graph *cfg;
7332 edge_iterator ei;
7333 edge e;
7334 gimple_stmt_iterator si;
7335 unsigned old_len;
7337 /* Remove BB from dominance structures. */
7338 delete_from_dominance_info (CDI_DOMINATORS, bb);
7340 /* Move BB from its current loop to the copy in the new function. */
7341 if (current_loops)
7343 class loop *new_loop = (class loop *)bb->loop_father->aux;
7344 if (new_loop)
7345 bb->loop_father = new_loop;
7348 /* Link BB to the new linked list. */
7349 move_block_after (bb, after);
7351 /* Update the edge count in the corresponding flowgraphs. */
7352 if (update_edge_count_p)
7353 FOR_EACH_EDGE (e, ei, bb->succs)
7355 cfun->cfg->x_n_edges--;
7356 dest_cfun->cfg->x_n_edges++;
7359 /* Remove BB from the original basic block array. */
7360 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7361 cfun->cfg->x_n_basic_blocks--;
7363 /* Grow DEST_CFUN's basic block array if needed. */
7364 cfg = dest_cfun->cfg;
7365 cfg->x_n_basic_blocks++;
7366 if (bb->index >= cfg->x_last_basic_block)
7367 cfg->x_last_basic_block = bb->index + 1;
7369 old_len = vec_safe_length (cfg->x_basic_block_info);
7370 if ((unsigned) cfg->x_last_basic_block >= old_len)
7371 vec_safe_grow_cleared (cfg->x_basic_block_info,
7372 cfg->x_last_basic_block + 1);
7374 (*cfg->x_basic_block_info)[bb->index] = bb;
7376 /* Remap the variables in phi nodes. */
7377 for (gphi_iterator psi = gsi_start_phis (bb);
7378 !gsi_end_p (psi); )
7380 gphi *phi = psi.phi ();
7381 use_operand_p use;
7382 tree op = PHI_RESULT (phi);
7383 ssa_op_iter oi;
7384 unsigned i;
7386 if (virtual_operand_p (op))
7388 /* Remove the phi nodes for virtual operands (alias analysis will be
7389 run for the new function, anyway). But replace all uses that
7390 might be outside of the region we move. */
7391 use_operand_p use_p;
7392 imm_use_iterator iter;
7393 gimple *use_stmt;
7394 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7395 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7396 SET_USE (use_p, SSA_NAME_VAR (op));
7397 remove_phi_node (&psi, true);
7398 continue;
7401 SET_PHI_RESULT (phi,
7402 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7403 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7405 op = USE_FROM_PTR (use);
7406 if (TREE_CODE (op) == SSA_NAME)
7407 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7410 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7412 location_t locus = gimple_phi_arg_location (phi, i);
7413 tree block = LOCATION_BLOCK (locus);
7415 if (locus == UNKNOWN_LOCATION)
7416 continue;
7417 if (d->orig_block == NULL_TREE || block == d->orig_block)
7419 locus = set_block (locus, d->new_block);
7420 gimple_phi_arg_set_location (phi, i, locus);
7424 gsi_next (&psi);
7427 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7429 gimple *stmt = gsi_stmt (si);
7430 struct walk_stmt_info wi;
7432 memset (&wi, 0, sizeof (wi));
7433 wi.info = d;
7434 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7436 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7438 tree label = gimple_label_label (label_stmt);
7439 int uid = LABEL_DECL_UID (label);
7441 gcc_assert (uid > -1);
7443 old_len = vec_safe_length (cfg->x_label_to_block_map);
7444 if (old_len <= (unsigned) uid)
7445 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7447 (*cfg->x_label_to_block_map)[uid] = bb;
7448 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7450 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7452 if (uid >= dest_cfun->cfg->last_label_uid)
7453 dest_cfun->cfg->last_label_uid = uid + 1;
7456 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7457 remove_stmt_from_eh_lp_fn (cfun, stmt);
7459 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7460 gimple_remove_stmt_histograms (cfun, stmt);
7462 /* We cannot leave any operands allocated from the operand caches of
7463 the current function. */
7464 free_stmt_operands (cfun, stmt);
7465 push_cfun (dest_cfun);
7466 update_stmt (stmt);
7467 if (is_gimple_call (stmt))
7468 notice_special_calls (as_a <gcall *> (stmt));
7469 pop_cfun ();
7472 FOR_EACH_EDGE (e, ei, bb->succs)
7473 if (e->goto_locus != UNKNOWN_LOCATION)
7475 tree block = LOCATION_BLOCK (e->goto_locus);
7476 if (d->orig_block == NULL_TREE
7477 || block == d->orig_block)
7478 e->goto_locus = set_block (e->goto_locus, d->new_block);
7482 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7483 the outermost EH region. Use REGION as the incoming base EH region.
7484 If there is no single outermost region, return NULL and set *ALL to
7485 true. */
7487 static eh_region
7488 find_outermost_region_in_block (struct function *src_cfun,
7489 basic_block bb, eh_region region,
7490 bool *all)
7492 gimple_stmt_iterator si;
7494 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7496 gimple *stmt = gsi_stmt (si);
7497 eh_region stmt_region;
7498 int lp_nr;
7500 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7501 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7502 if (stmt_region)
7504 if (region == NULL)
7505 region = stmt_region;
7506 else if (stmt_region != region)
7508 region = eh_region_outermost (src_cfun, stmt_region, region);
7509 if (region == NULL)
7511 *all = true;
7512 return NULL;
7518 return region;
7521 static tree
7522 new_label_mapper (tree decl, void *data)
7524 htab_t hash = (htab_t) data;
7525 struct tree_map *m;
7526 void **slot;
7528 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7530 m = XNEW (struct tree_map);
7531 m->hash = DECL_UID (decl);
7532 m->base.from = decl;
7533 m->to = create_artificial_label (UNKNOWN_LOCATION);
7534 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7535 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7536 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7538 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7539 gcc_assert (*slot == NULL);
7541 *slot = m;
7543 return m->to;
7546 /* Tree walker to replace the decls used inside value expressions by
7547 duplicates. */
7549 static tree
7550 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7552 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7554 switch (TREE_CODE (*tp))
7556 case VAR_DECL:
7557 case PARM_DECL:
7558 case RESULT_DECL:
7559 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7560 break;
7561 default:
7562 break;
7565 if (IS_TYPE_OR_DECL_P (*tp))
7566 *walk_subtrees = false;
7568 return NULL;
7571 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7572 subblocks. */
7574 static void
7575 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7576 tree to_context)
7578 tree *tp, t;
7580 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7582 t = *tp;
7583 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7584 continue;
7585 replace_by_duplicate_decl (&t, vars_map, to_context);
7586 if (t != *tp)
7588 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7590 tree x = DECL_VALUE_EXPR (*tp);
7591 struct replace_decls_d rd = { vars_map, to_context };
7592 unshare_expr (x);
7593 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7594 SET_DECL_VALUE_EXPR (t, x);
7595 DECL_HAS_VALUE_EXPR_P (t) = 1;
7597 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7598 *tp = t;
7602 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7603 replace_block_vars_by_duplicates (block, vars_map, to_context);
7606 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7607 from FN1 to FN2. */
7609 static void
7610 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7611 class loop *loop)
7613 /* Discard it from the old loop array. */
7614 (*get_loops (fn1))[loop->num] = NULL;
7616 /* Place it in the new loop array, assigning it a new number. */
7617 loop->num = number_of_loops (fn2);
7618 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7620 /* Recurse to children. */
7621 for (loop = loop->inner; loop; loop = loop->next)
7622 fixup_loop_arrays_after_move (fn1, fn2, loop);
7625 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7626 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7628 DEBUG_FUNCTION void
7629 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7631 basic_block bb;
7632 edge_iterator ei;
7633 edge e;
7634 bitmap bbs = BITMAP_ALLOC (NULL);
7635 int i;
7637 gcc_assert (entry != NULL);
7638 gcc_assert (entry != exit);
7639 gcc_assert (bbs_p != NULL);
7641 gcc_assert (bbs_p->length () > 0);
7643 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7644 bitmap_set_bit (bbs, bb->index);
7646 gcc_assert (bitmap_bit_p (bbs, entry->index));
7647 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7649 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7651 if (bb == entry)
7653 gcc_assert (single_pred_p (entry));
7654 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7656 else
7657 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7659 e = ei_edge (ei);
7660 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7663 if (bb == exit)
7665 gcc_assert (single_succ_p (exit));
7666 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7668 else
7669 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7671 e = ei_edge (ei);
7672 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7676 BITMAP_FREE (bbs);
7679 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7681 bool
7682 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7684 bitmap release_names = (bitmap)data;
7686 if (TREE_CODE (from) != SSA_NAME)
7687 return true;
7689 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7690 return true;
7693 /* Return LOOP_DIST_ALIAS call if present in BB. */
7695 static gimple *
7696 find_loop_dist_alias (basic_block bb)
7698 gimple_stmt_iterator gsi = gsi_last_bb (bb);
7699 if (!safe_is_a <gcond *> (*gsi))
7700 return NULL;
7702 gsi_prev (&gsi);
7703 if (gsi_end_p (gsi))
7704 return NULL;
7706 gimple *g = gsi_stmt (gsi);
7707 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7708 return g;
7709 return NULL;
7712 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7713 to VALUE and update any immediate uses of it's LHS. */
7715 void
7716 fold_loop_internal_call (gimple *g, tree value)
7718 tree lhs = gimple_call_lhs (g);
7719 use_operand_p use_p;
7720 imm_use_iterator iter;
7721 gimple *use_stmt;
7722 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7724 replace_call_with_value (&gsi, value);
7725 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7727 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7728 SET_USE (use_p, value);
7729 update_stmt (use_stmt);
7733 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7734 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7735 single basic block in the original CFG and the new basic block is
7736 returned. DEST_CFUN must not have a CFG yet.
7738 Note that the region need not be a pure SESE region. Blocks inside
7739 the region may contain calls to abort/exit. The only restriction
7740 is that ENTRY_BB should be the only entry point and it must
7741 dominate EXIT_BB.
7743 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7744 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7745 to the new function.
7747 All local variables referenced in the region are assumed to be in
7748 the corresponding BLOCK_VARS and unexpanded variable lists
7749 associated with DEST_CFUN.
7751 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7752 reimplement move_sese_region_to_fn by duplicating the region rather than
7753 moving it. */
7755 basic_block
7756 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7757 basic_block exit_bb, tree orig_block)
7759 vec<basic_block> bbs;
7760 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7761 basic_block after, bb, *entry_pred, *exit_succ, abb;
7762 struct function *saved_cfun = cfun;
7763 int *entry_flag, *exit_flag;
7764 profile_probability *entry_prob, *exit_prob;
7765 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7766 edge e;
7767 edge_iterator ei;
7768 htab_t new_label_map;
7769 hash_map<void *, void *> *eh_map;
7770 class loop *loop = entry_bb->loop_father;
7771 class loop *loop0 = get_loop (saved_cfun, 0);
7772 struct move_stmt_d d;
7774 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7775 region. */
7776 gcc_assert (entry_bb != exit_bb
7777 && (!exit_bb
7778 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7780 /* Collect all the blocks in the region. Manually add ENTRY_BB
7781 because it won't be added by dfs_enumerate_from. */
7782 bbs.create (0);
7783 bbs.safe_push (entry_bb);
7784 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7786 if (flag_checking)
7787 verify_sese (entry_bb, exit_bb, &bbs);
7789 /* The blocks that used to be dominated by something in BBS will now be
7790 dominated by the new block. */
7791 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7792 bbs.address (),
7793 bbs.length ());
7795 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7796 the predecessor edges to ENTRY_BB and the successor edges to
7797 EXIT_BB so that we can re-attach them to the new basic block that
7798 will replace the region. */
7799 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7800 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7801 entry_flag = XNEWVEC (int, num_entry_edges);
7802 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7803 i = 0;
7804 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7806 entry_prob[i] = e->probability;
7807 entry_flag[i] = e->flags;
7808 entry_pred[i++] = e->src;
7809 remove_edge (e);
7812 if (exit_bb)
7814 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7815 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7816 exit_flag = XNEWVEC (int, num_exit_edges);
7817 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7818 i = 0;
7819 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7821 exit_prob[i] = e->probability;
7822 exit_flag[i] = e->flags;
7823 exit_succ[i++] = e->dest;
7824 remove_edge (e);
7827 else
7829 num_exit_edges = 0;
7830 exit_succ = NULL;
7831 exit_flag = NULL;
7832 exit_prob = NULL;
7835 /* Switch context to the child function to initialize DEST_FN's CFG. */
7836 gcc_assert (dest_cfun->cfg == NULL);
7837 push_cfun (dest_cfun);
7839 init_empty_tree_cfg ();
7841 /* Initialize EH information for the new function. */
7842 eh_map = NULL;
7843 new_label_map = NULL;
7844 if (saved_cfun->eh)
7846 eh_region region = NULL;
7847 bool all = false;
7849 FOR_EACH_VEC_ELT (bbs, i, bb)
7851 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7852 if (all)
7853 break;
7856 init_eh_for_function ();
7857 if (region != NULL || all)
7859 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7860 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7861 new_label_mapper, new_label_map);
7865 /* Initialize an empty loop tree. */
7866 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7867 init_loops_structure (dest_cfun, loops, 1);
7868 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7869 set_loops_for_fn (dest_cfun, loops);
7871 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7873 /* Move the outlined loop tree part. */
7874 num_nodes = bbs.length ();
7875 FOR_EACH_VEC_ELT (bbs, i, bb)
7877 if (bb->loop_father->header == bb)
7879 class loop *this_loop = bb->loop_father;
7880 /* Avoid the need to remap SSA names used in nb_iterations. */
7881 free_numbers_of_iterations_estimates (this_loop);
7882 class loop *outer = loop_outer (this_loop);
7883 if (outer == loop
7884 /* If the SESE region contains some bbs ending with
7885 a noreturn call, those are considered to belong
7886 to the outermost loop in saved_cfun, rather than
7887 the entry_bb's loop_father. */
7888 || outer == loop0)
7890 if (outer != loop)
7891 num_nodes -= this_loop->num_nodes;
7892 flow_loop_tree_node_remove (bb->loop_father);
7893 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7894 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7897 else if (bb->loop_father == loop0 && loop0 != loop)
7898 num_nodes--;
7900 /* Remove loop exits from the outlined region. */
7901 if (loops_for_fn (saved_cfun)->exits)
7902 FOR_EACH_EDGE (e, ei, bb->succs)
7904 struct loops *l = loops_for_fn (saved_cfun);
7905 loop_exit **slot
7906 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7907 NO_INSERT);
7908 if (slot)
7909 l->exits->clear_slot (slot);
7913 /* Adjust the number of blocks in the tree root of the outlined part. */
7914 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7916 /* Setup a mapping to be used by move_block_to_fn. */
7917 loop->aux = current_loops->tree_root;
7918 loop0->aux = current_loops->tree_root;
7920 /* Fix up orig_loop_num. If the block referenced in it has been moved
7921 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7922 signed char *moved_orig_loop_num = NULL;
7923 for (auto dloop : loops_list (dest_cfun, 0))
7924 if (dloop->orig_loop_num)
7926 if (moved_orig_loop_num == NULL)
7927 moved_orig_loop_num
7928 = XCNEWVEC (signed char, vec_safe_length (larray));
7929 if ((*larray)[dloop->orig_loop_num] != NULL
7930 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7932 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7933 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7934 moved_orig_loop_num[dloop->orig_loop_num]++;
7935 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7937 else
7939 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7940 dloop->orig_loop_num = 0;
7943 pop_cfun ();
7945 if (moved_orig_loop_num)
7947 FOR_EACH_VEC_ELT (bbs, i, bb)
7949 gimple *g = find_loop_dist_alias (bb);
7950 if (g == NULL)
7951 continue;
7953 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7954 gcc_assert (orig_loop_num
7955 && (unsigned) orig_loop_num < vec_safe_length (larray));
7956 if (moved_orig_loop_num[orig_loop_num] == 2)
7958 /* If we have moved both loops with this orig_loop_num into
7959 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7960 too, update the first argument. */
7961 gcc_assert ((*larray)[orig_loop_num] != NULL
7962 && (get_loop (saved_cfun, orig_loop_num) == NULL));
7963 tree t = build_int_cst (integer_type_node,
7964 (*larray)[orig_loop_num]->num);
7965 gimple_call_set_arg (g, 0, t);
7966 update_stmt (g);
7967 /* Make sure the following loop will not update it. */
7968 moved_orig_loop_num[orig_loop_num] = 0;
7970 else
7971 /* Otherwise at least one of the loops stayed in saved_cfun.
7972 Remove the LOOP_DIST_ALIAS call. */
7973 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7975 FOR_EACH_BB_FN (bb, saved_cfun)
7977 gimple *g = find_loop_dist_alias (bb);
7978 if (g == NULL)
7979 continue;
7980 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7981 gcc_assert (orig_loop_num
7982 && (unsigned) orig_loop_num < vec_safe_length (larray));
7983 if (moved_orig_loop_num[orig_loop_num])
7984 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7985 of the corresponding loops was moved, remove it. */
7986 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7988 XDELETEVEC (moved_orig_loop_num);
7990 ggc_free (larray);
7992 /* Move blocks from BBS into DEST_CFUN. */
7993 gcc_assert (bbs.length () >= 2);
7994 after = dest_cfun->cfg->x_entry_block_ptr;
7995 hash_map<tree, tree> vars_map;
7997 memset (&d, 0, sizeof (d));
7998 d.orig_block = orig_block;
7999 d.new_block = DECL_INITIAL (dest_cfun->decl);
8000 d.from_context = cfun->decl;
8001 d.to_context = dest_cfun->decl;
8002 d.vars_map = &vars_map;
8003 d.new_label_map = new_label_map;
8004 d.eh_map = eh_map;
8005 d.remap_decls_p = true;
8007 if (gimple_in_ssa_p (cfun))
8008 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
8010 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
8011 set_ssa_default_def (dest_cfun, arg, narg);
8012 vars_map.put (arg, narg);
8015 FOR_EACH_VEC_ELT (bbs, i, bb)
8017 /* No need to update edge counts on the last block. It has
8018 already been updated earlier when we detached the region from
8019 the original CFG. */
8020 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
8021 after = bb;
8024 /* Adjust the maximum clique used. */
8025 dest_cfun->last_clique = saved_cfun->last_clique;
8027 loop->aux = NULL;
8028 loop0->aux = NULL;
8029 /* Loop sizes are no longer correct, fix them up. */
8030 loop->num_nodes -= num_nodes;
8031 for (class loop *outer = loop_outer (loop);
8032 outer; outer = loop_outer (outer))
8033 outer->num_nodes -= num_nodes;
8034 loop0->num_nodes -= bbs.length () - num_nodes;
8036 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
8038 class loop *aloop;
8039 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
8040 if (aloop != NULL)
8042 if (aloop->simduid)
8044 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
8045 d.to_context);
8046 dest_cfun->has_simduid_loops = true;
8048 if (aloop->force_vectorize)
8049 dest_cfun->has_force_vectorize_loops = true;
8053 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8054 if (orig_block)
8056 tree block;
8057 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8058 == NULL_TREE);
8059 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8060 = BLOCK_SUBBLOCKS (orig_block);
8061 for (block = BLOCK_SUBBLOCKS (orig_block);
8062 block; block = BLOCK_CHAIN (block))
8063 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
8064 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
8067 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
8068 &vars_map, dest_cfun->decl);
8070 if (new_label_map)
8071 htab_delete (new_label_map);
8072 if (eh_map)
8073 delete eh_map;
8075 /* We need to release ssa-names in a defined order, so first find them,
8076 and then iterate in ascending version order. */
8077 bitmap release_names = BITMAP_ALLOC (NULL);
8078 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
8079 bitmap_iterator bi;
8080 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
8081 release_ssa_name (ssa_name (i));
8082 BITMAP_FREE (release_names);
8084 /* Rewire the entry and exit blocks. The successor to the entry
8085 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8086 the child function. Similarly, the predecessor of DEST_FN's
8087 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8088 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8089 various CFG manipulation function get to the right CFG.
8091 FIXME, this is silly. The CFG ought to become a parameter to
8092 these helpers. */
8093 push_cfun (dest_cfun);
8094 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
8095 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
8096 if (exit_bb)
8098 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
8099 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
8101 else
8102 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
8103 pop_cfun ();
8105 /* Back in the original function, the SESE region has disappeared,
8106 create a new basic block in its place. */
8107 bb = create_empty_bb (entry_pred[0]);
8108 if (current_loops)
8109 add_bb_to_loop (bb, loop);
8110 for (i = 0; i < num_entry_edges; i++)
8112 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8113 e->probability = entry_prob[i];
8116 for (i = 0; i < num_exit_edges; i++)
8118 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8119 e->probability = exit_prob[i];
8122 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8123 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8124 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8126 if (exit_bb)
8128 free (exit_prob);
8129 free (exit_flag);
8130 free (exit_succ);
8132 free (entry_prob);
8133 free (entry_flag);
8134 free (entry_pred);
8135 bbs.release ();
8137 return bb;
8140 /* Dump default def DEF to file FILE using FLAGS and indentation
8141 SPC. */
8143 static void
8144 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8146 for (int i = 0; i < spc; ++i)
8147 fprintf (file, " ");
8148 dump_ssaname_info_to_file (file, def, spc);
8150 print_generic_expr (file, TREE_TYPE (def), flags);
8151 fprintf (file, " ");
8152 print_generic_expr (file, def, flags);
8153 fprintf (file, " = ");
8154 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8155 fprintf (file, ";\n");
8158 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8160 static void
8161 print_no_sanitize_attr_value (FILE *file, tree value)
8163 unsigned int flags = tree_to_uhwi (value);
8164 bool first = true;
8165 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8167 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8169 if (!first)
8170 fprintf (file, " | ");
8171 fprintf (file, "%s", sanitizer_opts[i].name);
8172 first = false;
8177 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8180 void
8181 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8183 tree arg, var, old_current_fndecl = current_function_decl;
8184 struct function *dsf;
8185 bool ignore_topmost_bind = false, any_var = false;
8186 basic_block bb;
8187 tree chain;
8188 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8189 && decl_is_tm_clone (fndecl));
8190 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8192 tree fntype = TREE_TYPE (fndecl);
8193 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8195 for (int i = 0; i != 2; ++i)
8197 if (!attrs[i])
8198 continue;
8200 fprintf (file, "__attribute__((");
8202 bool first = true;
8203 tree chain;
8204 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8206 if (!first)
8207 fprintf (file, ", ");
8209 tree name = get_attribute_name (chain);
8210 print_generic_expr (file, name, dump_flags);
8211 if (TREE_VALUE (chain) != NULL_TREE)
8213 fprintf (file, " (");
8215 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8216 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8217 else
8218 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8219 fprintf (file, ")");
8223 fprintf (file, "))\n");
8226 current_function_decl = fndecl;
8227 if (flags & TDF_GIMPLE)
8229 static bool hotness_bb_param_printed = false;
8230 if (profile_info != NULL
8231 && !hotness_bb_param_printed)
8233 hotness_bb_param_printed = true;
8234 fprintf (file,
8235 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8236 " */\n", get_hot_bb_threshold ());
8239 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8240 dump_flags | TDF_SLIM);
8241 fprintf (file, " __GIMPLE (%s",
8242 (fun->curr_properties & PROP_ssa) ? "ssa"
8243 : (fun->curr_properties & PROP_cfg) ? "cfg"
8244 : "");
8246 if (fun && fun->cfg)
8248 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8249 if (bb->count.initialized_p ())
8250 fprintf (file, ",%s(%" PRIu64 ")",
8251 profile_quality_as_string (bb->count.quality ()),
8252 bb->count.value ());
8253 if (dump_flags & TDF_UID)
8254 fprintf (file, ")\n%sD_%u (", function_name (fun),
8255 DECL_UID (fndecl));
8256 else
8257 fprintf (file, ")\n%s (", function_name (fun));
8260 else
8262 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8263 if (dump_flags & TDF_UID)
8264 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8265 tmclone ? "[tm-clone] " : "");
8266 else
8267 fprintf (file, " %s %s(", function_name (fun),
8268 tmclone ? "[tm-clone] " : "");
8271 arg = DECL_ARGUMENTS (fndecl);
8272 while (arg)
8274 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8275 fprintf (file, " ");
8276 print_generic_expr (file, arg, dump_flags);
8277 if (DECL_CHAIN (arg))
8278 fprintf (file, ", ");
8279 arg = DECL_CHAIN (arg);
8281 fprintf (file, ")\n");
8283 dsf = DECL_STRUCT_FUNCTION (fndecl);
8284 if (dsf && (flags & TDF_EH))
8285 dump_eh_tree (file, dsf);
8287 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8289 dump_node (fndecl, TDF_SLIM | flags, file);
8290 current_function_decl = old_current_fndecl;
8291 return;
8294 /* When GIMPLE is lowered, the variables are no longer available in
8295 BIND_EXPRs, so display them separately. */
8296 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8298 unsigned ix;
8299 ignore_topmost_bind = true;
8301 fprintf (file, "{\n");
8302 if (gimple_in_ssa_p (fun)
8303 && (flags & TDF_ALIAS))
8305 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8306 arg = DECL_CHAIN (arg))
8308 tree def = ssa_default_def (fun, arg);
8309 if (def)
8310 dump_default_def (file, def, 2, flags);
8313 tree res = DECL_RESULT (fun->decl);
8314 if (res != NULL_TREE
8315 && DECL_BY_REFERENCE (res))
8317 tree def = ssa_default_def (fun, res);
8318 if (def)
8319 dump_default_def (file, def, 2, flags);
8322 tree static_chain = fun->static_chain_decl;
8323 if (static_chain != NULL_TREE)
8325 tree def = ssa_default_def (fun, static_chain);
8326 if (def)
8327 dump_default_def (file, def, 2, flags);
8331 if (!vec_safe_is_empty (fun->local_decls))
8332 FOR_EACH_LOCAL_DECL (fun, ix, var)
8334 print_generic_decl (file, var, flags);
8335 fprintf (file, "\n");
8337 any_var = true;
8340 tree name;
8342 if (gimple_in_ssa_p (fun))
8343 FOR_EACH_SSA_NAME (ix, name, fun)
8345 if (!SSA_NAME_VAR (name)
8346 /* SSA name with decls without a name still get
8347 dumped as _N, list those explicitely as well even
8348 though we've dumped the decl declaration as D.xxx
8349 above. */
8350 || !SSA_NAME_IDENTIFIER (name))
8352 fprintf (file, " ");
8353 print_generic_expr (file, TREE_TYPE (name), flags);
8354 fprintf (file, " ");
8355 print_generic_expr (file, name, flags);
8356 fprintf (file, ";\n");
8358 any_var = true;
8363 if (fun && fun->decl == fndecl
8364 && fun->cfg
8365 && basic_block_info_for_fn (fun))
8367 /* If the CFG has been built, emit a CFG-based dump. */
8368 if (!ignore_topmost_bind)
8369 fprintf (file, "{\n");
8371 if (any_var && n_basic_blocks_for_fn (fun))
8372 fprintf (file, "\n");
8374 FOR_EACH_BB_FN (bb, fun)
8375 dump_bb (file, bb, 2, flags);
8377 fprintf (file, "}\n");
8379 else if (fun && (fun->curr_properties & PROP_gimple_any))
8381 /* The function is now in GIMPLE form but the CFG has not been
8382 built yet. Emit the single sequence of GIMPLE statements
8383 that make up its body. */
8384 gimple_seq body = gimple_body (fndecl);
8386 if (gimple_seq_first_stmt (body)
8387 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8388 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8389 print_gimple_seq (file, body, 0, flags);
8390 else
8392 if (!ignore_topmost_bind)
8393 fprintf (file, "{\n");
8395 if (any_var)
8396 fprintf (file, "\n");
8398 print_gimple_seq (file, body, 2, flags);
8399 fprintf (file, "}\n");
8402 else
8404 int indent;
8406 /* Make a tree based dump. */
8407 chain = DECL_SAVED_TREE (fndecl);
8408 if (chain && TREE_CODE (chain) == BIND_EXPR)
8410 if (ignore_topmost_bind)
8412 chain = BIND_EXPR_BODY (chain);
8413 indent = 2;
8415 else
8416 indent = 0;
8418 else
8420 if (!ignore_topmost_bind)
8422 fprintf (file, "{\n");
8423 /* No topmost bind, pretend it's ignored for later. */
8424 ignore_topmost_bind = true;
8426 indent = 2;
8429 if (any_var)
8430 fprintf (file, "\n");
8432 print_generic_stmt_indented (file, chain, flags, indent);
8433 if (ignore_topmost_bind)
8434 fprintf (file, "}\n");
8437 if (flags & TDF_ENUMERATE_LOCALS)
8438 dump_enumerated_decls (file, flags);
8439 fprintf (file, "\n\n");
8441 current_function_decl = old_current_fndecl;
8444 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8446 DEBUG_FUNCTION void
8447 debug_function (tree fn, dump_flags_t flags)
8449 dump_function_to_file (fn, stderr, flags);
8453 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8455 static void
8456 print_pred_bbs (FILE *file, basic_block bb)
8458 edge e;
8459 edge_iterator ei;
8461 FOR_EACH_EDGE (e, ei, bb->preds)
8462 fprintf (file, "bb_%d ", e->src->index);
8466 /* Print on FILE the indexes for the successors of basic_block BB. */
8468 static void
8469 print_succ_bbs (FILE *file, basic_block bb)
8471 edge e;
8472 edge_iterator ei;
8474 FOR_EACH_EDGE (e, ei, bb->succs)
8475 fprintf (file, "bb_%d ", e->dest->index);
8478 /* Print to FILE the basic block BB following the VERBOSITY level. */
8480 void
8481 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8483 char *s_indent = (char *) alloca ((size_t) indent + 1);
8484 memset ((void *) s_indent, ' ', (size_t) indent);
8485 s_indent[indent] = '\0';
8487 /* Print basic_block's header. */
8488 if (verbosity >= 2)
8490 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8491 print_pred_bbs (file, bb);
8492 fprintf (file, "}, succs = {");
8493 print_succ_bbs (file, bb);
8494 fprintf (file, "})\n");
8497 /* Print basic_block's body. */
8498 if (verbosity >= 3)
8500 fprintf (file, "%s {\n", s_indent);
8501 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8502 fprintf (file, "%s }\n", s_indent);
8506 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8508 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8509 VERBOSITY level this outputs the contents of the loop, or just its
8510 structure. */
8512 static void
8513 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8515 char *s_indent;
8516 basic_block bb;
8518 if (loop == NULL)
8519 return;
8521 s_indent = (char *) alloca ((size_t) indent + 1);
8522 memset ((void *) s_indent, ' ', (size_t) indent);
8523 s_indent[indent] = '\0';
8525 /* Print loop's header. */
8526 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8527 if (loop->header)
8528 fprintf (file, "header = %d", loop->header->index);
8529 else
8531 fprintf (file, "deleted)\n");
8532 return;
8534 if (loop->latch)
8535 fprintf (file, ", latch = %d", loop->latch->index);
8536 else
8537 fprintf (file, ", multiple latches");
8538 fprintf (file, ", niter = ");
8539 print_generic_expr (file, loop->nb_iterations);
8541 if (loop->any_upper_bound)
8543 fprintf (file, ", upper_bound = ");
8544 print_decu (loop->nb_iterations_upper_bound, file);
8546 if (loop->any_likely_upper_bound)
8548 fprintf (file, ", likely_upper_bound = ");
8549 print_decu (loop->nb_iterations_likely_upper_bound, file);
8552 if (loop->any_estimate)
8554 fprintf (file, ", estimate = ");
8555 print_decu (loop->nb_iterations_estimate, file);
8557 if (loop->unroll)
8558 fprintf (file, ", unroll = %d", loop->unroll);
8559 fprintf (file, ")\n");
8561 /* Print loop's body. */
8562 if (verbosity >= 1)
8564 fprintf (file, "%s{\n", s_indent);
8565 FOR_EACH_BB_FN (bb, cfun)
8566 if (bb->loop_father == loop)
8567 print_loops_bb (file, bb, indent, verbosity);
8569 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8570 fprintf (file, "%s}\n", s_indent);
8574 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8575 spaces. Following VERBOSITY level this outputs the contents of the
8576 loop, or just its structure. */
8578 static void
8579 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8580 int verbosity)
8582 if (loop == NULL)
8583 return;
8585 print_loop (file, loop, indent, verbosity);
8586 print_loop_and_siblings (file, loop->next, indent, verbosity);
8589 /* Follow a CFG edge from the entry point of the program, and on entry
8590 of a loop, pretty print the loop structure on FILE. */
8592 void
8593 print_loops (FILE *file, int verbosity)
8595 basic_block bb;
8597 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8598 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8599 if (bb && bb->loop_father)
8600 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8603 /* Dump a loop. */
8605 DEBUG_FUNCTION void
8606 debug (class loop &ref)
8608 print_loop (stderr, &ref, 0, /*verbosity*/0);
8611 DEBUG_FUNCTION void
8612 debug (class loop *ptr)
8614 if (ptr)
8615 debug (*ptr);
8616 else
8617 fprintf (stderr, "<nil>\n");
8620 /* Dump a loop verbosely. */
8622 DEBUG_FUNCTION void
8623 debug_verbose (class loop &ref)
8625 print_loop (stderr, &ref, 0, /*verbosity*/3);
8628 DEBUG_FUNCTION void
8629 debug_verbose (class loop *ptr)
8631 if (ptr)
8632 debug (*ptr);
8633 else
8634 fprintf (stderr, "<nil>\n");
8638 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8640 DEBUG_FUNCTION void
8641 debug_loops (int verbosity)
8643 print_loops (stderr, verbosity);
8646 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8648 DEBUG_FUNCTION void
8649 debug_loop (class loop *loop, int verbosity)
8651 print_loop (stderr, loop, 0, verbosity);
8654 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8655 level. */
8657 DEBUG_FUNCTION void
8658 debug_loop_num (unsigned num, int verbosity)
8660 debug_loop (get_loop (cfun, num), verbosity);
8663 /* Return true if BB ends with a call, possibly followed by some
8664 instructions that must stay with the call. Return false,
8665 otherwise. */
8667 static bool
8668 gimple_block_ends_with_call_p (basic_block bb)
8670 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8671 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8675 /* Return true if BB ends with a conditional branch. Return false,
8676 otherwise. */
8678 static bool
8679 gimple_block_ends_with_condjump_p (const_basic_block bb)
8681 return safe_is_a <gcond *> (*gsi_last_bb (const_cast <basic_block> (bb)));
8685 /* Return true if statement T may terminate execution of BB in ways not
8686 explicitly represtented in the CFG. */
8688 bool
8689 stmt_can_terminate_bb_p (gimple *t)
8691 tree fndecl = NULL_TREE;
8692 int call_flags = 0;
8694 /* Eh exception not handled internally terminates execution of the whole
8695 function. */
8696 if (stmt_can_throw_external (cfun, t))
8697 return true;
8699 /* NORETURN and LONGJMP calls already have an edge to exit.
8700 CONST and PURE calls do not need one.
8701 We don't currently check for CONST and PURE here, although
8702 it would be a good idea, because those attributes are
8703 figured out from the RTL in mark_constant_function, and
8704 the counter incrementation code from -fprofile-arcs
8705 leads to different results from -fbranch-probabilities. */
8706 if (is_gimple_call (t))
8708 fndecl = gimple_call_fndecl (t);
8709 call_flags = gimple_call_flags (t);
8712 if (is_gimple_call (t)
8713 && fndecl
8714 && fndecl_built_in_p (fndecl)
8715 && (call_flags & ECF_NOTHROW)
8716 && !(call_flags & ECF_RETURNS_TWICE)
8717 /* fork() doesn't really return twice, but the effect of
8718 wrapping it in __gcov_fork() which calls __gcov_dump() and
8719 __gcov_reset() and clears the counters before forking has the same
8720 effect as returning twice. Force a fake edge. */
8721 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8722 return false;
8724 if (is_gimple_call (t))
8726 edge_iterator ei;
8727 edge e;
8728 basic_block bb;
8730 if (call_flags & (ECF_PURE | ECF_CONST)
8731 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8732 return false;
8734 /* Function call may do longjmp, terminate program or do other things.
8735 Special case noreturn that have non-abnormal edges out as in this case
8736 the fact is sufficiently represented by lack of edges out of T. */
8737 if (!(call_flags & ECF_NORETURN))
8738 return true;
8740 bb = gimple_bb (t);
8741 FOR_EACH_EDGE (e, ei, bb->succs)
8742 if ((e->flags & EDGE_FAKE) == 0)
8743 return true;
8746 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8747 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8748 return true;
8750 return false;
8754 /* Add fake edges to the function exit for any non constant and non
8755 noreturn calls (or noreturn calls with EH/abnormal edges),
8756 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8757 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8758 that were split.
8760 The goal is to expose cases in which entering a basic block does
8761 not imply that all subsequent instructions must be executed. */
8763 static int
8764 gimple_flow_call_edges_add (sbitmap blocks)
8766 int i;
8767 int blocks_split = 0;
8768 int last_bb = last_basic_block_for_fn (cfun);
8769 bool check_last_block = false;
8771 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8772 return 0;
8774 if (! blocks)
8775 check_last_block = true;
8776 else
8777 check_last_block = bitmap_bit_p (blocks,
8778 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8780 /* In the last basic block, before epilogue generation, there will be
8781 a fallthru edge to EXIT. Special care is required if the last insn
8782 of the last basic block is a call because make_edge folds duplicate
8783 edges, which would result in the fallthru edge also being marked
8784 fake, which would result in the fallthru edge being removed by
8785 remove_fake_edges, which would result in an invalid CFG.
8787 Moreover, we can't elide the outgoing fake edge, since the block
8788 profiler needs to take this into account in order to solve the minimal
8789 spanning tree in the case that the call doesn't return.
8791 Handle this by adding a dummy instruction in a new last basic block. */
8792 if (check_last_block)
8794 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8795 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8796 gimple *t = NULL;
8798 if (!gsi_end_p (gsi))
8799 t = gsi_stmt (gsi);
8801 if (t && stmt_can_terminate_bb_p (t))
8803 edge e;
8805 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8806 if (e)
8808 gsi_insert_on_edge (e, gimple_build_nop ());
8809 gsi_commit_edge_inserts ();
8814 /* Now add fake edges to the function exit for any non constant
8815 calls since there is no way that we can determine if they will
8816 return or not... */
8817 for (i = 0; i < last_bb; i++)
8819 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8820 gimple_stmt_iterator gsi;
8821 gimple *stmt, *last_stmt;
8823 if (!bb)
8824 continue;
8826 if (blocks && !bitmap_bit_p (blocks, i))
8827 continue;
8829 gsi = gsi_last_nondebug_bb (bb);
8830 if (!gsi_end_p (gsi))
8832 last_stmt = gsi_stmt (gsi);
8835 stmt = gsi_stmt (gsi);
8836 if (stmt_can_terminate_bb_p (stmt))
8838 edge e;
8840 /* The handling above of the final block before the
8841 epilogue should be enough to verify that there is
8842 no edge to the exit block in CFG already.
8843 Calling make_edge in such case would cause us to
8844 mark that edge as fake and remove it later. */
8845 if (flag_checking && stmt == last_stmt)
8847 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8848 gcc_assert (e == NULL);
8851 /* Note that the following may create a new basic block
8852 and renumber the existing basic blocks. */
8853 if (stmt != last_stmt)
8855 e = split_block (bb, stmt);
8856 if (e)
8857 blocks_split++;
8859 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8860 e->probability = profile_probability::guessed_never ();
8862 gsi_prev (&gsi);
8864 while (!gsi_end_p (gsi));
8868 if (blocks_split)
8869 checking_verify_flow_info ();
8871 return blocks_split;
8874 /* Removes edge E and all the blocks dominated by it, and updates dominance
8875 information. The IL in E->src needs to be updated separately.
8876 If dominance info is not available, only the edge E is removed.*/
8878 void
8879 remove_edge_and_dominated_blocks (edge e)
8881 vec<basic_block> bbs_to_fix_dom = vNULL;
8882 edge f;
8883 edge_iterator ei;
8884 bool none_removed = false;
8885 unsigned i;
8886 basic_block bb, dbb;
8887 bitmap_iterator bi;
8889 /* If we are removing a path inside a non-root loop that may change
8890 loop ownership of blocks or remove loops. Mark loops for fixup. */
8891 if (current_loops
8892 && loop_outer (e->src->loop_father) != NULL
8893 && e->src->loop_father == e->dest->loop_father)
8894 loops_state_set (LOOPS_NEED_FIXUP);
8896 if (!dom_info_available_p (CDI_DOMINATORS))
8898 remove_edge (e);
8899 return;
8902 /* No updating is needed for edges to exit. */
8903 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8905 if (cfgcleanup_altered_bbs)
8906 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8907 remove_edge (e);
8908 return;
8911 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8912 that is not dominated by E->dest, then this set is empty. Otherwise,
8913 all the basic blocks dominated by E->dest are removed.
8915 Also, to DF_IDOM we store the immediate dominators of the blocks in
8916 the dominance frontier of E (i.e., of the successors of the
8917 removed blocks, if there are any, and of E->dest otherwise). */
8918 FOR_EACH_EDGE (f, ei, e->dest->preds)
8920 if (f == e)
8921 continue;
8923 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8925 none_removed = true;
8926 break;
8930 auto_bitmap df, df_idom;
8931 auto_vec<basic_block> bbs_to_remove;
8932 if (none_removed)
8933 bitmap_set_bit (df_idom,
8934 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8935 else
8937 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8938 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8940 FOR_EACH_EDGE (f, ei, bb->succs)
8942 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8943 bitmap_set_bit (df, f->dest->index);
8946 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8947 bitmap_clear_bit (df, bb->index);
8949 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8951 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8952 bitmap_set_bit (df_idom,
8953 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8957 if (cfgcleanup_altered_bbs)
8959 /* Record the set of the altered basic blocks. */
8960 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8961 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8964 /* Remove E and the cancelled blocks. */
8965 if (none_removed)
8966 remove_edge (e);
8967 else
8969 /* Walk backwards so as to get a chance to substitute all
8970 released DEFs into debug stmts. See
8971 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
8972 details. */
8973 for (i = bbs_to_remove.length (); i-- > 0; )
8974 delete_basic_block (bbs_to_remove[i]);
8977 /* Update the dominance information. The immediate dominator may change only
8978 for blocks whose immediate dominator belongs to DF_IDOM:
8980 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8981 removal. Let Z the arbitrary block such that idom(Z) = Y and
8982 Z dominates X after the removal. Before removal, there exists a path P
8983 from Y to X that avoids Z. Let F be the last edge on P that is
8984 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8985 dominates W, and because of P, Z does not dominate W), and W belongs to
8986 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8987 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8989 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8990 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8991 dbb;
8992 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8993 bbs_to_fix_dom.safe_push (dbb);
8996 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8998 bbs_to_fix_dom.release ();
9001 /* Purge dead EH edges from basic block BB. */
9003 bool
9004 gimple_purge_dead_eh_edges (basic_block bb)
9006 bool changed = false;
9007 edge e;
9008 edge_iterator ei;
9009 gimple *stmt = *gsi_last_bb (bb);
9011 if (stmt && stmt_can_throw_internal (cfun, stmt))
9012 return false;
9014 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9016 if (e->flags & EDGE_EH)
9018 remove_edge_and_dominated_blocks (e);
9019 changed = true;
9021 else
9022 ei_next (&ei);
9025 return changed;
9028 /* Purge dead EH edges from basic block listed in BLOCKS. */
9030 bool
9031 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
9033 bool changed = false;
9034 unsigned i;
9035 bitmap_iterator bi;
9037 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9039 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9041 /* Earlier gimple_purge_dead_eh_edges could have removed
9042 this basic block already. */
9043 gcc_assert (bb || changed);
9044 if (bb != NULL)
9045 changed |= gimple_purge_dead_eh_edges (bb);
9048 return changed;
9051 /* Purge dead abnormal call edges from basic block BB. */
9053 bool
9054 gimple_purge_dead_abnormal_call_edges (basic_block bb)
9056 bool changed = false;
9057 edge e;
9058 edge_iterator ei;
9059 gimple *stmt = *gsi_last_bb (bb);
9061 if (stmt && stmt_can_make_abnormal_goto (stmt))
9062 return false;
9064 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9066 if (e->flags & EDGE_ABNORMAL)
9068 if (e->flags & EDGE_FALLTHRU)
9069 e->flags &= ~EDGE_ABNORMAL;
9070 else
9071 remove_edge_and_dominated_blocks (e);
9072 changed = true;
9074 else
9075 ei_next (&ei);
9078 return changed;
9081 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9083 bool
9084 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
9086 bool changed = false;
9087 unsigned i;
9088 bitmap_iterator bi;
9090 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9092 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9094 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9095 this basic block already. */
9096 gcc_assert (bb || changed);
9097 if (bb != NULL)
9098 changed |= gimple_purge_dead_abnormal_call_edges (bb);
9101 return changed;
9104 /* This function is called whenever a new edge is created or
9105 redirected. */
9107 static void
9108 gimple_execute_on_growing_pred (edge e)
9110 basic_block bb = e->dest;
9112 if (!gimple_seq_empty_p (phi_nodes (bb)))
9113 reserve_phi_args_for_new_edge (bb);
9116 /* This function is called immediately before edge E is removed from
9117 the edge vector E->dest->preds. */
9119 static void
9120 gimple_execute_on_shrinking_pred (edge e)
9122 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9123 remove_phi_args (e);
9126 /*---------------------------------------------------------------------------
9127 Helper functions for Loop versioning
9128 ---------------------------------------------------------------------------*/
9130 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9131 of 'first'. Both of them are dominated by 'new_head' basic block. When
9132 'new_head' was created by 'second's incoming edge it received phi arguments
9133 on the edge by split_edge(). Later, additional edge 'e' was created to
9134 connect 'new_head' and 'first'. Now this routine adds phi args on this
9135 additional edge 'e' that new_head to second edge received as part of edge
9136 splitting. */
9138 static void
9139 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9140 basic_block new_head, edge e)
9142 gphi *phi1, *phi2;
9143 gphi_iterator psi1, psi2;
9144 tree def;
9145 edge e2 = find_edge (new_head, second);
9147 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9148 edge, we should always have an edge from NEW_HEAD to SECOND. */
9149 gcc_assert (e2 != NULL);
9151 /* Browse all 'second' basic block phi nodes and add phi args to
9152 edge 'e' for 'first' head. PHI args are always in correct order. */
9154 for (psi2 = gsi_start_phis (second),
9155 psi1 = gsi_start_phis (first);
9156 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9157 gsi_next (&psi2), gsi_next (&psi1))
9159 phi1 = psi1.phi ();
9160 phi2 = psi2.phi ();
9161 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9162 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9167 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9168 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9169 the destination of the ELSE part. */
9171 static void
9172 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9173 basic_block second_head ATTRIBUTE_UNUSED,
9174 basic_block cond_bb, void *cond_e)
9176 gimple_stmt_iterator gsi;
9177 gimple *new_cond_expr;
9178 tree cond_expr = (tree) cond_e;
9179 edge e0;
9181 /* Build new conditional expr */
9182 gsi = gsi_last_bb (cond_bb);
9184 cond_expr = force_gimple_operand_gsi_1 (&gsi, cond_expr,
9185 is_gimple_condexpr_for_cond,
9186 NULL_TREE, false,
9187 GSI_CONTINUE_LINKING);
9188 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9189 NULL_TREE, NULL_TREE);
9191 /* Add new cond in cond_bb. */
9192 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9194 /* Adjust edges appropriately to connect new head with first head
9195 as well as second head. */
9196 e0 = single_succ_edge (cond_bb);
9197 e0->flags &= ~EDGE_FALLTHRU;
9198 e0->flags |= EDGE_FALSE_VALUE;
9202 /* Do book-keeping of basic block BB for the profile consistency checker.
9203 Store the counting in RECORD. */
9204 static void
9205 gimple_account_profile_record (basic_block bb,
9206 struct profile_record *record)
9208 gimple_stmt_iterator i;
9209 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9210 gsi_next_nondebug (&i))
9212 record->size
9213 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9214 if (profile_info)
9216 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9217 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9218 && bb->count.ipa ().initialized_p ())
9219 record->time
9220 += estimate_num_insns (gsi_stmt (i),
9221 &eni_time_weights)
9222 * bb->count.ipa ().to_gcov_type ();
9224 else if (bb->count.initialized_p ()
9225 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9226 record->time
9227 += estimate_num_insns
9228 (gsi_stmt (i),
9229 &eni_time_weights)
9230 * bb->count.to_sreal_scale
9231 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9232 else
9233 record->time
9234 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9238 struct cfg_hooks gimple_cfg_hooks = {
9239 "gimple",
9240 gimple_verify_flow_info,
9241 gimple_dump_bb, /* dump_bb */
9242 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9243 create_bb, /* create_basic_block */
9244 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9245 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9246 gimple_can_remove_branch_p, /* can_remove_branch_p */
9247 remove_bb, /* delete_basic_block */
9248 gimple_split_block, /* split_block */
9249 gimple_move_block_after, /* move_block_after */
9250 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9251 gimple_merge_blocks, /* merge_blocks */
9252 gimple_predict_edge, /* predict_edge */
9253 gimple_predicted_by_p, /* predicted_by_p */
9254 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9255 gimple_duplicate_bb, /* duplicate_block */
9256 gimple_split_edge, /* split_edge */
9257 gimple_make_forwarder_block, /* make_forward_block */
9258 NULL, /* tidy_fallthru_edge */
9259 NULL, /* force_nonfallthru */
9260 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9261 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9262 gimple_flow_call_edges_add, /* flow_call_edges_add */
9263 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9264 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9265 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9266 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9267 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9268 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9269 flush_pending_stmts, /* flush_pending_stmts */
9270 gimple_empty_block_p, /* block_empty_p */
9271 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9272 gimple_account_profile_record,
9276 /* Split all critical edges. Split some extra (not necessarily critical) edges
9277 if FOR_EDGE_INSERTION_P is true. */
9279 unsigned int
9280 split_critical_edges (bool for_edge_insertion_p /* = false */)
9282 basic_block bb;
9283 edge e;
9284 edge_iterator ei;
9286 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9287 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9288 mappings around the calls to split_edge. */
9289 start_recording_case_labels ();
9290 FOR_ALL_BB_FN (bb, cfun)
9292 FOR_EACH_EDGE (e, ei, bb->succs)
9294 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9295 split_edge (e);
9296 /* PRE inserts statements to edges and expects that
9297 since split_critical_edges was done beforehand, committing edge
9298 insertions will not split more edges. In addition to critical
9299 edges we must split edges that have multiple successors and
9300 end by control flow statements, such as RESX.
9301 Go ahead and split them too. This matches the logic in
9302 gimple_find_edge_insert_loc. */
9303 else if (for_edge_insertion_p
9304 && (!single_pred_p (e->dest)
9305 || !gimple_seq_empty_p (phi_nodes (e->dest))
9306 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9307 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9308 && !(e->flags & EDGE_ABNORMAL))
9310 gimple_stmt_iterator gsi;
9312 gsi = gsi_last_bb (e->src);
9313 if (!gsi_end_p (gsi)
9314 && stmt_ends_bb_p (gsi_stmt (gsi))
9315 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9316 && !gimple_call_builtin_p (gsi_stmt (gsi),
9317 BUILT_IN_RETURN)))
9318 split_edge (e);
9322 end_recording_case_labels ();
9323 return 0;
9326 namespace {
9328 const pass_data pass_data_split_crit_edges =
9330 GIMPLE_PASS, /* type */
9331 "crited", /* name */
9332 OPTGROUP_NONE, /* optinfo_flags */
9333 TV_TREE_SPLIT_EDGES, /* tv_id */
9334 PROP_cfg, /* properties_required */
9335 PROP_no_crit_edges, /* properties_provided */
9336 0, /* properties_destroyed */
9337 0, /* todo_flags_start */
9338 0, /* todo_flags_finish */
9341 class pass_split_crit_edges : public gimple_opt_pass
9343 public:
9344 pass_split_crit_edges (gcc::context *ctxt)
9345 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9348 /* opt_pass methods: */
9349 unsigned int execute (function *) final override
9351 return split_critical_edges ();
9354 opt_pass * clone () final override
9356 return new pass_split_crit_edges (m_ctxt);
9358 }; // class pass_split_crit_edges
9360 } // anon namespace
9362 gimple_opt_pass *
9363 make_pass_split_crit_edges (gcc::context *ctxt)
9365 return new pass_split_crit_edges (ctxt);
9369 /* Insert COND expression which is GIMPLE_COND after STMT
9370 in basic block BB with appropriate basic block split
9371 and creation of a new conditionally executed basic block.
9372 Update profile so the new bb is visited with probability PROB.
9373 Return created basic block. */
9374 basic_block
9375 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9376 profile_probability prob)
9378 edge fall = split_block (bb, stmt);
9379 gimple_stmt_iterator iter = gsi_last_bb (bb);
9380 basic_block new_bb;
9382 /* Insert cond statement. */
9383 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9384 if (gsi_end_p (iter))
9385 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9386 else
9387 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9389 /* Create conditionally executed block. */
9390 new_bb = create_empty_bb (bb);
9391 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9392 e->probability = prob;
9393 new_bb->count = e->count ();
9394 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9396 /* Fix edge for split bb. */
9397 fall->flags = EDGE_FALSE_VALUE;
9398 fall->probability -= e->probability;
9400 /* Update dominance info. */
9401 if (dom_info_available_p (CDI_DOMINATORS))
9403 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9404 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9407 /* Update loop info. */
9408 if (current_loops)
9409 add_bb_to_loop (new_bb, bb->loop_father);
9411 return new_bb;
9416 /* Given a basic block B which ends with a conditional and has
9417 precisely two successors, determine which of the edges is taken if
9418 the conditional is true and which is taken if the conditional is
9419 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9421 void
9422 extract_true_false_edges_from_block (basic_block b,
9423 edge *true_edge,
9424 edge *false_edge)
9426 edge e = EDGE_SUCC (b, 0);
9428 if (e->flags & EDGE_TRUE_VALUE)
9430 *true_edge = e;
9431 *false_edge = EDGE_SUCC (b, 1);
9433 else
9435 *false_edge = e;
9436 *true_edge = EDGE_SUCC (b, 1);
9441 /* From a controlling predicate in the immediate dominator DOM of
9442 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9443 predicate evaluates to true and false and store them to
9444 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9445 they are non-NULL. Returns true if the edges can be determined,
9446 else return false. */
9448 bool
9449 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9450 edge *true_controlled_edge,
9451 edge *false_controlled_edge)
9453 basic_block bb = phiblock;
9454 edge true_edge, false_edge, tem;
9455 edge e0 = NULL, e1 = NULL;
9457 /* We have to verify that one edge into the PHI node is dominated
9458 by the true edge of the predicate block and the other edge
9459 dominated by the false edge. This ensures that the PHI argument
9460 we are going to take is completely determined by the path we
9461 take from the predicate block.
9462 We can only use BB dominance checks below if the destination of
9463 the true/false edges are dominated by their edge, thus only
9464 have a single predecessor. */
9465 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9466 tem = EDGE_PRED (bb, 0);
9467 if (tem == true_edge
9468 || (single_pred_p (true_edge->dest)
9469 && (tem->src == true_edge->dest
9470 || dominated_by_p (CDI_DOMINATORS,
9471 tem->src, true_edge->dest))))
9472 e0 = tem;
9473 else if (tem == false_edge
9474 || (single_pred_p (false_edge->dest)
9475 && (tem->src == false_edge->dest
9476 || dominated_by_p (CDI_DOMINATORS,
9477 tem->src, false_edge->dest))))
9478 e1 = tem;
9479 else
9480 return false;
9481 tem = EDGE_PRED (bb, 1);
9482 if (tem == true_edge
9483 || (single_pred_p (true_edge->dest)
9484 && (tem->src == true_edge->dest
9485 || dominated_by_p (CDI_DOMINATORS,
9486 tem->src, true_edge->dest))))
9487 e0 = tem;
9488 else if (tem == false_edge
9489 || (single_pred_p (false_edge->dest)
9490 && (tem->src == false_edge->dest
9491 || dominated_by_p (CDI_DOMINATORS,
9492 tem->src, false_edge->dest))))
9493 e1 = tem;
9494 else
9495 return false;
9496 if (!e0 || !e1)
9497 return false;
9499 if (true_controlled_edge)
9500 *true_controlled_edge = e0;
9501 if (false_controlled_edge)
9502 *false_controlled_edge = e1;
9504 return true;
9507 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9508 range [low, high]. Place associated stmts before *GSI. */
9510 void
9511 generate_range_test (basic_block bb, tree index, tree low, tree high,
9512 tree *lhs, tree *rhs)
9514 tree type = TREE_TYPE (index);
9515 tree utype = range_check_type (type);
9517 low = fold_convert (utype, low);
9518 high = fold_convert (utype, high);
9520 gimple_seq seq = NULL;
9521 index = gimple_convert (&seq, utype, index);
9522 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9523 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9525 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9526 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9529 /* Return the basic block that belongs to label numbered INDEX
9530 of a switch statement. */
9532 basic_block
9533 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9535 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9538 /* Return the default basic block of a switch statement. */
9540 basic_block
9541 gimple_switch_default_bb (function *ifun, gswitch *gs)
9543 return gimple_switch_label_bb (ifun, gs, 0);
9546 /* Return the edge that belongs to label numbered INDEX
9547 of a switch statement. */
9549 edge
9550 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9552 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9555 /* Return the default edge of a switch statement. */
9557 edge
9558 gimple_switch_default_edge (function *ifun, gswitch *gs)
9560 return gimple_switch_edge (ifun, gs, 0);
9563 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9565 bool
9566 cond_only_block_p (basic_block bb)
9568 /* BB must have no executable statements. */
9569 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9570 if (phi_nodes (bb))
9571 return false;
9572 while (!gsi_end_p (gsi))
9574 gimple *stmt = gsi_stmt (gsi);
9575 if (is_gimple_debug (stmt))
9577 else if (gimple_code (stmt) == GIMPLE_NOP
9578 || gimple_code (stmt) == GIMPLE_PREDICT
9579 || gimple_code (stmt) == GIMPLE_COND)
9581 else
9582 return false;
9583 gsi_next (&gsi);
9585 return true;
9589 /* Emit return warnings. */
9591 namespace {
9593 const pass_data pass_data_warn_function_return =
9595 GIMPLE_PASS, /* type */
9596 "*warn_function_return", /* name */
9597 OPTGROUP_NONE, /* optinfo_flags */
9598 TV_NONE, /* tv_id */
9599 PROP_cfg, /* properties_required */
9600 0, /* properties_provided */
9601 0, /* properties_destroyed */
9602 0, /* todo_flags_start */
9603 0, /* todo_flags_finish */
9606 class pass_warn_function_return : public gimple_opt_pass
9608 public:
9609 pass_warn_function_return (gcc::context *ctxt)
9610 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9613 /* opt_pass methods: */
9614 unsigned int execute (function *) final override;
9616 }; // class pass_warn_function_return
9618 unsigned int
9619 pass_warn_function_return::execute (function *fun)
9621 location_t location;
9622 gimple *last;
9623 edge e;
9624 edge_iterator ei;
9626 if (!targetm.warn_func_return (fun->decl))
9627 return 0;
9629 /* If we have a path to EXIT, then we do return. */
9630 if (TREE_THIS_VOLATILE (fun->decl)
9631 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9633 location = UNKNOWN_LOCATION;
9634 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9635 (e = ei_safe_edge (ei)); )
9637 last = *gsi_last_bb (e->src);
9638 if ((gimple_code (last) == GIMPLE_RETURN
9639 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9640 && location == UNKNOWN_LOCATION
9641 && ((location = LOCATION_LOCUS (gimple_location (last)))
9642 != UNKNOWN_LOCATION)
9643 && !optimize)
9644 break;
9645 /* When optimizing, replace return stmts in noreturn functions
9646 with __builtin_unreachable () call. */
9647 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9649 location_t loc = gimple_location (last);
9650 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
9651 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9652 gsi_replace (&gsi, new_stmt, true);
9653 remove_edge (e);
9655 else
9656 ei_next (&ei);
9658 if (location == UNKNOWN_LOCATION)
9659 location = cfun->function_end_locus;
9660 warning_at (location, 0, "%<noreturn%> function does return");
9663 /* If we see "return;" in some basic block, then we do reach the end
9664 without returning a value. */
9665 else if (warn_return_type > 0
9666 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9667 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9669 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9671 greturn *return_stmt = dyn_cast <greturn *> (*gsi_last_bb (e->src));
9672 if (return_stmt
9673 && gimple_return_retval (return_stmt) == NULL
9674 && !warning_suppressed_p (return_stmt, OPT_Wreturn_type))
9676 location = gimple_location (return_stmt);
9677 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9678 location = fun->function_end_locus;
9679 if (warning_at (location, OPT_Wreturn_type,
9680 "control reaches end of non-void function"))
9681 suppress_warning (fun->decl, OPT_Wreturn_type);
9682 break;
9685 /* The C++ FE turns fallthrough from the end of non-void function
9686 into __builtin_unreachable () call with BUILTINS_LOCATION.
9687 Recognize those as well as calls from ubsan_instrument_return. */
9688 basic_block bb;
9689 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9690 FOR_EACH_BB_FN (bb, fun)
9691 if (EDGE_COUNT (bb->succs) == 0)
9693 gimple *last = *gsi_last_bb (bb);
9694 const enum built_in_function ubsan_missing_ret
9695 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9696 if (last
9697 && ((LOCATION_LOCUS (gimple_location (last))
9698 == BUILTINS_LOCATION
9699 && (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
9700 || gimple_call_builtin_p (last,
9701 BUILT_IN_UNREACHABLE_TRAP)
9702 || gimple_call_builtin_p (last, BUILT_IN_TRAP)))
9703 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9705 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9706 gsi_prev_nondebug (&gsi);
9707 gimple *prev = gsi_stmt (gsi);
9708 if (prev == NULL)
9709 location = UNKNOWN_LOCATION;
9710 else
9711 location = gimple_location (prev);
9712 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9713 location = fun->function_end_locus;
9714 if (warning_at (location, OPT_Wreturn_type,
9715 "control reaches end of non-void function"))
9716 suppress_warning (fun->decl, OPT_Wreturn_type);
9717 break;
9721 return 0;
9724 } // anon namespace
9726 gimple_opt_pass *
9727 make_pass_warn_function_return (gcc::context *ctxt)
9729 return new pass_warn_function_return (ctxt);
9732 /* Walk a gimplified function and warn for functions whose return value is
9733 ignored and attribute((warn_unused_result)) is set. This is done before
9734 inlining, so we don't have to worry about that. */
9736 static void
9737 do_warn_unused_result (gimple_seq seq)
9739 tree fdecl, ftype;
9740 gimple_stmt_iterator i;
9742 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9744 gimple *g = gsi_stmt (i);
9746 switch (gimple_code (g))
9748 case GIMPLE_BIND:
9749 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9750 break;
9751 case GIMPLE_TRY:
9752 do_warn_unused_result (gimple_try_eval (g));
9753 do_warn_unused_result (gimple_try_cleanup (g));
9754 break;
9755 case GIMPLE_CATCH:
9756 do_warn_unused_result (gimple_catch_handler (
9757 as_a <gcatch *> (g)));
9758 break;
9759 case GIMPLE_EH_FILTER:
9760 do_warn_unused_result (gimple_eh_filter_failure (g));
9761 break;
9763 case GIMPLE_CALL:
9764 if (gimple_call_lhs (g))
9765 break;
9766 if (gimple_call_internal_p (g))
9767 break;
9769 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9770 LHS. All calls whose value is ignored should be
9771 represented like this. Look for the attribute. */
9772 fdecl = gimple_call_fndecl (g);
9773 ftype = gimple_call_fntype (g);
9775 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9777 location_t loc = gimple_location (g);
9779 if (fdecl)
9780 warning_at (loc, OPT_Wunused_result,
9781 "ignoring return value of %qD "
9782 "declared with attribute %<warn_unused_result%>",
9783 fdecl);
9784 else
9785 warning_at (loc, OPT_Wunused_result,
9786 "ignoring return value of function "
9787 "declared with attribute %<warn_unused_result%>");
9789 break;
9791 default:
9792 /* Not a container, not a call, or a call whose value is used. */
9793 break;
9798 namespace {
9800 const pass_data pass_data_warn_unused_result =
9802 GIMPLE_PASS, /* type */
9803 "*warn_unused_result", /* name */
9804 OPTGROUP_NONE, /* optinfo_flags */
9805 TV_NONE, /* tv_id */
9806 PROP_gimple_any, /* properties_required */
9807 0, /* properties_provided */
9808 0, /* properties_destroyed */
9809 0, /* todo_flags_start */
9810 0, /* todo_flags_finish */
9813 class pass_warn_unused_result : public gimple_opt_pass
9815 public:
9816 pass_warn_unused_result (gcc::context *ctxt)
9817 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9820 /* opt_pass methods: */
9821 bool gate (function *) final override { return flag_warn_unused_result; }
9822 unsigned int execute (function *) final override
9824 do_warn_unused_result (gimple_body (current_function_decl));
9825 return 0;
9828 }; // class pass_warn_unused_result
9830 } // anon namespace
9832 gimple_opt_pass *
9833 make_pass_warn_unused_result (gcc::context *ctxt)
9835 return new pass_warn_unused_result (ctxt);
9838 /* Maybe Remove stores to variables we marked write-only.
9839 Return true if a store was removed. */
9840 static bool
9841 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9842 bitmap dce_ssa_names)
9844 /* Keep access when store has side effect, i.e. in case when source
9845 is volatile. */
9846 if (!gimple_store_p (stmt)
9847 || gimple_has_side_effects (stmt)
9848 || optimize_debug)
9849 return false;
9851 tree lhs = get_base_address (gimple_get_lhs (stmt));
9853 if (!VAR_P (lhs)
9854 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9855 || !varpool_node::get (lhs)->writeonly)
9856 return false;
9858 if (dump_file && (dump_flags & TDF_DETAILS))
9860 fprintf (dump_file, "Removing statement, writes"
9861 " to write only var:\n");
9862 print_gimple_stmt (dump_file, stmt, 0,
9863 TDF_VOPS|TDF_MEMSYMS);
9866 /* Mark ssa name defining to be checked for simple dce. */
9867 if (gimple_assign_single_p (stmt))
9869 tree rhs = gimple_assign_rhs1 (stmt);
9870 if (TREE_CODE (rhs) == SSA_NAME
9871 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9872 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9874 unlink_stmt_vdef (stmt);
9875 gsi_remove (&gsi, true);
9876 release_defs (stmt);
9877 return true;
9880 /* IPA passes, compilation of earlier functions or inlining
9881 might have changed some properties, such as marked functions nothrow,
9882 pure, const or noreturn.
9883 Remove redundant edges and basic blocks, and create new ones if necessary. */
9885 unsigned int
9886 execute_fixup_cfg (void)
9888 basic_block bb;
9889 gimple_stmt_iterator gsi;
9890 int todo = 0;
9891 cgraph_node *node = cgraph_node::get (current_function_decl);
9892 /* Same scaling is also done by ipa_merge_profiles. */
9893 profile_count num = node->count;
9894 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9895 bool scale = num.initialized_p () && !(num == den);
9896 auto_bitmap dce_ssa_names;
9898 if (scale)
9900 profile_count::adjust_for_ipa_scaling (&num, &den);
9901 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9902 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9903 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9906 FOR_EACH_BB_FN (bb, cfun)
9908 if (scale)
9909 bb->count = bb->count.apply_scale (num, den);
9910 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9912 gimple *stmt = gsi_stmt (gsi);
9913 tree decl = is_gimple_call (stmt)
9914 ? gimple_call_fndecl (stmt)
9915 : NULL;
9916 if (decl)
9918 int flags = gimple_call_flags (stmt);
9919 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9921 if (gimple_in_ssa_p (cfun))
9923 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9924 update_stmt (stmt);
9927 if (flags & ECF_NORETURN
9928 && fixup_noreturn_call (stmt))
9929 todo |= TODO_cleanup_cfg;
9932 /* Remove stores to variables we marked write-only. */
9933 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
9935 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9936 continue;
9939 /* For calls we can simply remove LHS when it is known
9940 to be write-only. */
9941 if (is_gimple_call (stmt)
9942 && gimple_get_lhs (stmt))
9944 tree lhs = get_base_address (gimple_get_lhs (stmt));
9946 if (VAR_P (lhs)
9947 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9948 && varpool_node::get (lhs)->writeonly)
9950 gimple_call_set_lhs (stmt, NULL);
9951 update_stmt (stmt);
9952 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9956 gsi_next (&gsi);
9958 if (gimple *last = *gsi_last_bb (bb))
9960 if (maybe_clean_eh_stmt (last)
9961 && gimple_purge_dead_eh_edges (bb))
9962 todo |= TODO_cleanup_cfg;
9963 if (gimple_purge_dead_abnormal_call_edges (bb))
9964 todo |= TODO_cleanup_cfg;
9967 /* If we have a basic block with no successors that does not
9968 end with a control statement or a noreturn call end it with
9969 a call to __builtin_unreachable. This situation can occur
9970 when inlining a noreturn call that does in fact return. */
9971 if (EDGE_COUNT (bb->succs) == 0)
9973 gimple *stmt = last_nondebug_stmt (bb);
9974 if (!stmt
9975 || (!is_ctrl_stmt (stmt)
9976 && (!is_gimple_call (stmt)
9977 || !gimple_call_noreturn_p (stmt))))
9979 if (stmt && is_gimple_call (stmt))
9980 gimple_call_set_ctrl_altering (stmt, false);
9981 stmt = gimple_build_builtin_unreachable (UNKNOWN_LOCATION);
9982 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9983 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9984 if (!cfun->after_inlining)
9985 if (tree fndecl = gimple_call_fndecl (stmt))
9987 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9988 node->create_edge (cgraph_node::get_create (fndecl),
9989 call_stmt, bb->count);
9994 if (scale)
9996 update_max_bb_count ();
9997 compute_function_frequency ();
10000 if (current_loops
10001 && (todo & TODO_cleanup_cfg))
10002 loops_state_set (LOOPS_NEED_FIXUP);
10004 simple_dce_from_worklist (dce_ssa_names);
10006 return todo;
10009 namespace {
10011 const pass_data pass_data_fixup_cfg =
10013 GIMPLE_PASS, /* type */
10014 "fixup_cfg", /* name */
10015 OPTGROUP_NONE, /* optinfo_flags */
10016 TV_NONE, /* tv_id */
10017 PROP_cfg, /* properties_required */
10018 0, /* properties_provided */
10019 0, /* properties_destroyed */
10020 0, /* todo_flags_start */
10021 0, /* todo_flags_finish */
10024 class pass_fixup_cfg : public gimple_opt_pass
10026 public:
10027 pass_fixup_cfg (gcc::context *ctxt)
10028 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
10031 /* opt_pass methods: */
10032 opt_pass * clone () final override { return new pass_fixup_cfg (m_ctxt); }
10033 unsigned int execute (function *) final override
10035 return execute_fixup_cfg ();
10038 }; // class pass_fixup_cfg
10040 } // anon namespace
10042 gimple_opt_pass *
10043 make_pass_fixup_cfg (gcc::context *ctxt)
10045 return new pass_fixup_cfg (ctxt);
10048 /* Garbage collection support for edge_def. */
10050 extern void gt_ggc_mx (tree&);
10051 extern void gt_ggc_mx (gimple *&);
10052 extern void gt_ggc_mx (rtx&);
10053 extern void gt_ggc_mx (basic_block&);
10055 static void
10056 gt_ggc_mx (rtx_insn *& x)
10058 if (x)
10059 gt_ggc_mx_rtx_def ((void *) x);
10062 void
10063 gt_ggc_mx (edge_def *e)
10065 tree block = LOCATION_BLOCK (e->goto_locus);
10066 gt_ggc_mx (e->src);
10067 gt_ggc_mx (e->dest);
10068 if (current_ir_type () == IR_GIMPLE)
10069 gt_ggc_mx (e->insns.g);
10070 else
10071 gt_ggc_mx (e->insns.r);
10072 gt_ggc_mx (block);
10075 /* PCH support for edge_def. */
10077 extern void gt_pch_nx (tree&);
10078 extern void gt_pch_nx (gimple *&);
10079 extern void gt_pch_nx (rtx&);
10080 extern void gt_pch_nx (basic_block&);
10082 static void
10083 gt_pch_nx (rtx_insn *& x)
10085 if (x)
10086 gt_pch_nx_rtx_def ((void *) x);
10089 void
10090 gt_pch_nx (edge_def *e)
10092 tree block = LOCATION_BLOCK (e->goto_locus);
10093 gt_pch_nx (e->src);
10094 gt_pch_nx (e->dest);
10095 if (current_ir_type () == IR_GIMPLE)
10096 gt_pch_nx (e->insns.g);
10097 else
10098 gt_pch_nx (e->insns.r);
10099 gt_pch_nx (block);
10102 void
10103 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
10105 tree block = LOCATION_BLOCK (e->goto_locus);
10106 op (&(e->src), NULL, cookie);
10107 op (&(e->dest), NULL, cookie);
10108 if (current_ir_type () == IR_GIMPLE)
10109 op (&(e->insns.g), NULL, cookie);
10110 else
10111 op (&(e->insns.r), NULL, cookie);
10112 op (&(block), &(block), cookie);
10115 #if CHECKING_P
10117 namespace selftest {
10119 /* Helper function for CFG selftests: create a dummy function decl
10120 and push it as cfun. */
10122 static tree
10123 push_fndecl (const char *name)
10125 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
10126 /* FIXME: this uses input_location: */
10127 tree fndecl = build_fn_decl (name, fn_type);
10128 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
10129 NULL_TREE, integer_type_node);
10130 DECL_RESULT (fndecl) = retval;
10131 push_struct_function (fndecl);
10132 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10133 ASSERT_TRUE (fun != NULL);
10134 init_empty_tree_cfg_for_function (fun);
10135 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10136 ASSERT_EQ (0, n_edges_for_fn (fun));
10137 return fndecl;
10140 /* These tests directly create CFGs.
10141 Compare with the static fns within tree-cfg.cc:
10142 - build_gimple_cfg
10143 - make_blocks: calls create_basic_block (seq, bb);
10144 - make_edges. */
10146 /* Verify a simple cfg of the form:
10147 ENTRY -> A -> B -> C -> EXIT. */
10149 static void
10150 test_linear_chain ()
10152 gimple_register_cfg_hooks ();
10154 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10155 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10157 /* Create some empty blocks. */
10158 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10159 basic_block bb_b = create_empty_bb (bb_a);
10160 basic_block bb_c = create_empty_bb (bb_b);
10162 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10163 ASSERT_EQ (0, n_edges_for_fn (fun));
10165 /* Create some edges: a simple linear chain of BBs. */
10166 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10167 make_edge (bb_a, bb_b, 0);
10168 make_edge (bb_b, bb_c, 0);
10169 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10171 /* Verify the edges. */
10172 ASSERT_EQ (4, n_edges_for_fn (fun));
10173 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10174 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10175 ASSERT_EQ (1, bb_a->preds->length ());
10176 ASSERT_EQ (1, bb_a->succs->length ());
10177 ASSERT_EQ (1, bb_b->preds->length ());
10178 ASSERT_EQ (1, bb_b->succs->length ());
10179 ASSERT_EQ (1, bb_c->preds->length ());
10180 ASSERT_EQ (1, bb_c->succs->length ());
10181 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10182 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10184 /* Verify the dominance information
10185 Each BB in our simple chain should be dominated by the one before
10186 it. */
10187 calculate_dominance_info (CDI_DOMINATORS);
10188 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10189 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10190 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10191 ASSERT_EQ (1, dom_by_b.length ());
10192 ASSERT_EQ (bb_c, dom_by_b[0]);
10193 free_dominance_info (CDI_DOMINATORS);
10195 /* Similarly for post-dominance: each BB in our chain is post-dominated
10196 by the one after it. */
10197 calculate_dominance_info (CDI_POST_DOMINATORS);
10198 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10199 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10200 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10201 ASSERT_EQ (1, postdom_by_b.length ());
10202 ASSERT_EQ (bb_a, postdom_by_b[0]);
10203 free_dominance_info (CDI_POST_DOMINATORS);
10205 pop_cfun ();
10208 /* Verify a simple CFG of the form:
10209 ENTRY
10213 /t \f
10219 EXIT. */
10221 static void
10222 test_diamond ()
10224 gimple_register_cfg_hooks ();
10226 tree fndecl = push_fndecl ("cfg_test_diamond");
10227 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10229 /* Create some empty blocks. */
10230 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10231 basic_block bb_b = create_empty_bb (bb_a);
10232 basic_block bb_c = create_empty_bb (bb_a);
10233 basic_block bb_d = create_empty_bb (bb_b);
10235 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10236 ASSERT_EQ (0, n_edges_for_fn (fun));
10238 /* Create the edges. */
10239 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10240 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10241 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10242 make_edge (bb_b, bb_d, 0);
10243 make_edge (bb_c, bb_d, 0);
10244 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10246 /* Verify the edges. */
10247 ASSERT_EQ (6, n_edges_for_fn (fun));
10248 ASSERT_EQ (1, bb_a->preds->length ());
10249 ASSERT_EQ (2, bb_a->succs->length ());
10250 ASSERT_EQ (1, bb_b->preds->length ());
10251 ASSERT_EQ (1, bb_b->succs->length ());
10252 ASSERT_EQ (1, bb_c->preds->length ());
10253 ASSERT_EQ (1, bb_c->succs->length ());
10254 ASSERT_EQ (2, bb_d->preds->length ());
10255 ASSERT_EQ (1, bb_d->succs->length ());
10257 /* Verify the dominance information. */
10258 calculate_dominance_info (CDI_DOMINATORS);
10259 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10260 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10261 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10262 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10263 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10264 dom_by_a.release ();
10265 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10266 ASSERT_EQ (0, dom_by_b.length ());
10267 dom_by_b.release ();
10268 free_dominance_info (CDI_DOMINATORS);
10270 /* Similarly for post-dominance. */
10271 calculate_dominance_info (CDI_POST_DOMINATORS);
10272 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10273 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10274 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10275 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10276 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10277 postdom_by_d.release ();
10278 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10279 ASSERT_EQ (0, postdom_by_b.length ());
10280 postdom_by_b.release ();
10281 free_dominance_info (CDI_POST_DOMINATORS);
10283 pop_cfun ();
10286 /* Verify that we can handle a CFG containing a "complete" aka
10287 fully-connected subgraph (where A B C D below all have edges
10288 pointing to each other node, also to themselves).
10289 e.g.:
10290 ENTRY EXIT
10296 A<--->B
10297 ^^ ^^
10298 | \ / |
10299 | X |
10300 | / \ |
10301 VV VV
10302 C<--->D
10305 static void
10306 test_fully_connected ()
10308 gimple_register_cfg_hooks ();
10310 tree fndecl = push_fndecl ("cfg_fully_connected");
10311 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10313 const int n = 4;
10315 /* Create some empty blocks. */
10316 auto_vec <basic_block> subgraph_nodes;
10317 for (int i = 0; i < n; i++)
10318 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10320 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10321 ASSERT_EQ (0, n_edges_for_fn (fun));
10323 /* Create the edges. */
10324 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10325 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10326 for (int i = 0; i < n; i++)
10327 for (int j = 0; j < n; j++)
10328 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10330 /* Verify the edges. */
10331 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10332 /* The first one is linked to ENTRY/EXIT as well as itself and
10333 everything else. */
10334 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10335 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10336 /* The other ones in the subgraph are linked to everything in
10337 the subgraph (including themselves). */
10338 for (int i = 1; i < n; i++)
10340 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10341 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10344 /* Verify the dominance information. */
10345 calculate_dominance_info (CDI_DOMINATORS);
10346 /* The initial block in the subgraph should be dominated by ENTRY. */
10347 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10348 get_immediate_dominator (CDI_DOMINATORS,
10349 subgraph_nodes[0]));
10350 /* Every other block in the subgraph should be dominated by the
10351 initial block. */
10352 for (int i = 1; i < n; i++)
10353 ASSERT_EQ (subgraph_nodes[0],
10354 get_immediate_dominator (CDI_DOMINATORS,
10355 subgraph_nodes[i]));
10356 free_dominance_info (CDI_DOMINATORS);
10358 /* Similarly for post-dominance. */
10359 calculate_dominance_info (CDI_POST_DOMINATORS);
10360 /* The initial block in the subgraph should be postdominated by EXIT. */
10361 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10362 get_immediate_dominator (CDI_POST_DOMINATORS,
10363 subgraph_nodes[0]));
10364 /* Every other block in the subgraph should be postdominated by the
10365 initial block, since that leads to EXIT. */
10366 for (int i = 1; i < n; i++)
10367 ASSERT_EQ (subgraph_nodes[0],
10368 get_immediate_dominator (CDI_POST_DOMINATORS,
10369 subgraph_nodes[i]));
10370 free_dominance_info (CDI_POST_DOMINATORS);
10372 pop_cfun ();
10375 /* Run all of the selftests within this file. */
10377 void
10378 tree_cfg_cc_tests ()
10380 test_linear_chain ();
10381 test_diamond ();
10382 test_fully_connected ();
10385 } // namespace selftest
10387 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10388 - loop
10389 - nested loops
10390 - switch statement (a block with many out-edges)
10391 - something that jumps to itself
10392 - etc */
10394 #endif /* CHECKING_P */