ada: output.adb: fix newline being inserted when buffer is full
[official-gcc.git] / gcc / tree-cfg.cc
blob9ecf138cbfd9092ffe70052a5aae1d8a2b276406
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2023 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map<edge, tree> *edge_to_cases;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
101 /* CFG statistics. */
102 struct cfg_stats_d
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
119 int location_line;
120 int discriminator;
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
135 inline hashval_t
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
138 return item->location_line;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
144 inline bool
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
148 return a->location_line == b->location_line;
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
181 void
182 init_empty_tree_cfg_for_function (struct function *fn)
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
205 void
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
218 static void
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
228 make_blocks (seq);
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
321 for (auto loop : loops_list (cfun, 0))
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
330 /* Push the global flag_finite_loops state down to individual loops. */
331 loop->finite_p = flag_finite_loops;
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
353 break;
354 default:
355 gcc_unreachable ();
358 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 gimple_call_arg (stmt, 0));
361 gsi_replace (&gsi, stmt, true);
366 static unsigned int
367 execute_build_cfg (void)
369 gimple_seq body = gimple_body (current_function_decl);
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
378 cleanup_tree_cfg ();
380 bb_to_omp_idx.release ();
382 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383 replace_loop_annotate ();
384 return 0;
387 namespace {
389 const pass_data pass_data_build_cfg =
391 GIMPLE_PASS, /* type */
392 "cfg", /* name */
393 OPTGROUP_NONE, /* optinfo_flags */
394 TV_TREE_CFG, /* tv_id */
395 PROP_gimple_leh, /* properties_required */
396 ( PROP_cfg | PROP_loops ), /* properties_provided */
397 0, /* properties_destroyed */
398 0, /* todo_flags_start */
399 0, /* todo_flags_finish */
402 class pass_build_cfg : public gimple_opt_pass
404 public:
405 pass_build_cfg (gcc::context *ctxt)
406 : gimple_opt_pass (pass_data_build_cfg, ctxt)
409 /* opt_pass methods: */
410 unsigned int execute (function *) final override
412 return execute_build_cfg ();
415 }; // class pass_build_cfg
417 } // anon namespace
419 gimple_opt_pass *
420 make_pass_build_cfg (gcc::context *ctxt)
422 return new pass_build_cfg (ctxt);
426 /* Return true if T is a computed goto. */
428 bool
429 computed_goto_p (gimple *t)
431 return (gimple_code (t) == GIMPLE_GOTO
432 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
435 /* Returns true if the sequence of statements STMTS only contains
436 a call to __builtin_unreachable (). */
438 bool
439 gimple_seq_unreachable_p (gimple_seq stmts)
441 if (stmts == NULL
442 /* Return false if -fsanitize=unreachable, we don't want to
443 optimize away those calls, but rather turn them into
444 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
445 later. */
446 || sanitize_flags_p (SANITIZE_UNREACHABLE))
447 return false;
449 gimple_stmt_iterator gsi = gsi_last (stmts);
451 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
452 return false;
454 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
456 gimple *stmt = gsi_stmt (gsi);
457 if (gimple_code (stmt) != GIMPLE_LABEL
458 && !is_gimple_debug (stmt)
459 && !gimple_clobber_p (stmt))
460 return false;
462 return true;
465 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
466 the other edge points to a bb with just __builtin_unreachable ().
467 I.e. return true for C->M edge in:
468 <bb C>:
470 if (something)
471 goto <bb N>;
472 else
473 goto <bb M>;
474 <bb N>:
475 __builtin_unreachable ();
476 <bb M>: */
478 bool
479 assert_unreachable_fallthru_edge_p (edge e)
481 basic_block pred_bb = e->src;
482 gimple *last = last_stmt (pred_bb);
483 if (last && gimple_code (last) == GIMPLE_COND)
485 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
486 if (other_bb == e->dest)
487 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
488 if (EDGE_COUNT (other_bb->succs) == 0)
489 return gimple_seq_unreachable_p (bb_seq (other_bb));
491 return false;
495 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
496 could alter control flow except via eh. We initialize the flag at
497 CFG build time and only ever clear it later. */
499 static void
500 gimple_call_initialize_ctrl_altering (gimple *stmt)
502 int flags = gimple_call_flags (stmt);
504 /* A call alters control flow if it can make an abnormal goto. */
505 if (call_can_make_abnormal_goto (stmt)
506 /* A call also alters control flow if it does not return. */
507 || flags & ECF_NORETURN
508 /* TM ending statements have backedges out of the transaction.
509 Return true so we split the basic block containing them.
510 Note that the TM_BUILTIN test is merely an optimization. */
511 || ((flags & ECF_TM_BUILTIN)
512 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
513 /* BUILT_IN_RETURN call is same as return statement. */
514 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
515 /* IFN_UNIQUE should be the last insn, to make checking for it
516 as cheap as possible. */
517 || (gimple_call_internal_p (stmt)
518 && gimple_call_internal_unique_p (stmt)))
519 gimple_call_set_ctrl_altering (stmt, true);
520 else
521 gimple_call_set_ctrl_altering (stmt, false);
525 /* Insert SEQ after BB and build a flowgraph. */
527 static basic_block
528 make_blocks_1 (gimple_seq seq, basic_block bb)
530 gimple_stmt_iterator i = gsi_start (seq);
531 gimple *stmt = NULL;
532 gimple *prev_stmt = NULL;
533 bool start_new_block = true;
534 bool first_stmt_of_seq = true;
536 while (!gsi_end_p (i))
538 /* PREV_STMT should only be set to a debug stmt if the debug
539 stmt is before nondebug stmts. Once stmt reaches a nondebug
540 nonlabel, prev_stmt will be set to it, so that
541 stmt_starts_bb_p will know to start a new block if a label is
542 found. However, if stmt was a label after debug stmts only,
543 keep the label in prev_stmt even if we find further debug
544 stmts, for there may be other labels after them, and they
545 should land in the same block. */
546 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
547 prev_stmt = stmt;
548 stmt = gsi_stmt (i);
550 if (stmt && is_gimple_call (stmt))
551 gimple_call_initialize_ctrl_altering (stmt);
553 /* If the statement starts a new basic block or if we have determined
554 in a previous pass that we need to create a new block for STMT, do
555 so now. */
556 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
558 if (!first_stmt_of_seq)
559 gsi_split_seq_before (&i, &seq);
560 bb = create_basic_block (seq, bb);
561 start_new_block = false;
562 prev_stmt = NULL;
565 /* Now add STMT to BB and create the subgraphs for special statement
566 codes. */
567 gimple_set_bb (stmt, bb);
569 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
570 next iteration. */
571 if (stmt_ends_bb_p (stmt))
573 /* If the stmt can make abnormal goto use a new temporary
574 for the assignment to the LHS. This makes sure the old value
575 of the LHS is available on the abnormal edge. Otherwise
576 we will end up with overlapping life-ranges for abnormal
577 SSA names. */
578 if (gimple_has_lhs (stmt)
579 && stmt_can_make_abnormal_goto (stmt)
580 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
582 tree lhs = gimple_get_lhs (stmt);
583 tree tmp = create_tmp_var (TREE_TYPE (lhs));
584 gimple *s = gimple_build_assign (lhs, tmp);
585 gimple_set_location (s, gimple_location (stmt));
586 gimple_set_block (s, gimple_block (stmt));
587 gimple_set_lhs (stmt, tmp);
588 gsi_insert_after (&i, s, GSI_SAME_STMT);
590 start_new_block = true;
593 gsi_next (&i);
594 first_stmt_of_seq = false;
596 return bb;
599 /* Build a flowgraph for the sequence of stmts SEQ. */
601 static void
602 make_blocks (gimple_seq seq)
604 /* Look for debug markers right before labels, and move the debug
605 stmts after the labels. Accepting labels among debug markers
606 adds no value, just complexity; if we wanted to annotate labels
607 with view numbers (so sequencing among markers would matter) or
608 somesuch, we're probably better off still moving the labels, but
609 adding other debug annotations in their original positions or
610 emitting nonbind or bind markers associated with the labels in
611 the original position of the labels.
613 Moving labels would probably be simpler, but we can't do that:
614 moving labels assigns label ids to them, and doing so because of
615 debug markers makes for -fcompare-debug and possibly even codegen
616 differences. So, we have to move the debug stmts instead. To
617 that end, we scan SEQ backwards, marking the position of the
618 latest (earliest we find) label, and moving debug stmts that are
619 not separated from it by nondebug nonlabel stmts after the
620 label. */
621 if (MAY_HAVE_DEBUG_MARKER_STMTS)
623 gimple_stmt_iterator label = gsi_none ();
625 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
627 gimple *stmt = gsi_stmt (i);
629 /* If this is the first label we encounter (latest in SEQ)
630 before nondebug stmts, record its position. */
631 if (is_a <glabel *> (stmt))
633 if (gsi_end_p (label))
634 label = i;
635 continue;
638 /* Without a recorded label position to move debug stmts to,
639 there's nothing to do. */
640 if (gsi_end_p (label))
641 continue;
643 /* Move the debug stmt at I after LABEL. */
644 if (is_gimple_debug (stmt))
646 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
647 /* As STMT is removed, I advances to the stmt after
648 STMT, so the gsi_prev in the for "increment"
649 expression gets us to the stmt we're to visit after
650 STMT. LABEL, however, would advance to the moved
651 stmt if we passed it to gsi_move_after, so pass it a
652 copy instead, so as to keep LABEL pointing to the
653 LABEL. */
654 gimple_stmt_iterator copy = label;
655 gsi_move_after (&i, &copy);
656 continue;
659 /* There aren't any (more?) debug stmts before label, so
660 there isn't anything else to move after it. */
661 label = gsi_none ();
665 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
668 /* Create and return a new empty basic block after bb AFTER. */
670 static basic_block
671 create_bb (void *h, void *e, basic_block after)
673 basic_block bb;
675 gcc_assert (!e);
677 /* Create and initialize a new basic block. Since alloc_block uses
678 GC allocation that clears memory to allocate a basic block, we do
679 not have to clear the newly allocated basic block here. */
680 bb = alloc_block ();
682 bb->index = last_basic_block_for_fn (cfun);
683 bb->flags = BB_NEW;
684 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
686 /* Add the new block to the linked list of blocks. */
687 link_block (bb, after);
689 /* Grow the basic block array if needed. */
690 if ((size_t) last_basic_block_for_fn (cfun)
691 == basic_block_info_for_fn (cfun)->length ())
692 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
693 last_basic_block_for_fn (cfun) + 1);
695 /* Add the newly created block to the array. */
696 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
698 n_basic_blocks_for_fn (cfun)++;
699 last_basic_block_for_fn (cfun)++;
701 return bb;
705 /*---------------------------------------------------------------------------
706 Edge creation
707 ---------------------------------------------------------------------------*/
709 /* If basic block BB has an abnormal edge to a basic block
710 containing IFN_ABNORMAL_DISPATCHER internal call, return
711 that the dispatcher's basic block, otherwise return NULL. */
713 basic_block
714 get_abnormal_succ_dispatcher (basic_block bb)
716 edge e;
717 edge_iterator ei;
719 FOR_EACH_EDGE (e, ei, bb->succs)
720 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
722 gimple_stmt_iterator gsi
723 = gsi_start_nondebug_after_labels_bb (e->dest);
724 gimple *g = gsi_stmt (gsi);
725 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
726 return e->dest;
728 return NULL;
731 /* Helper function for make_edges. Create a basic block with
732 with ABNORMAL_DISPATCHER internal call in it if needed, and
733 create abnormal edges from BBS to it and from it to FOR_BB
734 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
736 static void
737 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
738 auto_vec<basic_block> *bbs, bool computed_goto)
740 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
741 unsigned int idx = 0;
742 basic_block bb;
743 bool inner = false;
745 if (!bb_to_omp_idx.is_empty ())
747 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
748 if (bb_to_omp_idx[for_bb->index] != 0)
749 inner = true;
752 /* If the dispatcher has been created already, then there are basic
753 blocks with abnormal edges to it, so just make a new edge to
754 for_bb. */
755 if (*dispatcher == NULL)
757 /* Check if there are any basic blocks that need to have
758 abnormal edges to this dispatcher. If there are none, return
759 early. */
760 if (bb_to_omp_idx.is_empty ())
762 if (bbs->is_empty ())
763 return;
765 else
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
769 break;
770 if (bb == NULL)
771 return;
774 /* Create the dispatcher bb. */
775 *dispatcher = create_basic_block (NULL, for_bb);
776 if (computed_goto)
778 /* Factor computed gotos into a common computed goto site. Also
779 record the location of that site so that we can un-factor the
780 gotos after we have converted back to normal form. */
781 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
783 /* Create the destination of the factored goto. Each original
784 computed goto will put its desired destination into this
785 variable and jump to the label we create immediately below. */
786 tree var = create_tmp_var (ptr_type_node, "gotovar");
788 /* Build a label for the new block which will contain the
789 factored computed goto. */
790 tree factored_label_decl
791 = create_artificial_label (UNKNOWN_LOCATION);
792 gimple *factored_computed_goto_label
793 = gimple_build_label (factored_label_decl);
794 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
796 /* Build our new computed goto. */
797 gimple *factored_computed_goto = gimple_build_goto (var);
798 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 if (!bb_to_omp_idx.is_empty ()
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 continue;
806 gsi = gsi_last_bb (bb);
807 gimple *last = gsi_stmt (gsi);
809 gcc_assert (computed_goto_p (last));
811 /* Copy the original computed goto's destination into VAR. */
812 gimple *assignment
813 = gimple_build_assign (var, gimple_goto_dest (last));
814 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
816 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
817 e->goto_locus = gimple_location (last);
818 gsi_remove (&gsi, true);
821 else
823 tree arg = inner ? boolean_true_node : boolean_false_node;
824 gcall *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
825 1, arg);
826 gimple_call_set_ctrl_altering (g, true);
827 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
828 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
830 /* Create predecessor edges of the dispatcher. */
831 FOR_EACH_VEC_ELT (*bbs, idx, bb)
833 if (!bb_to_omp_idx.is_empty ()
834 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
835 continue;
836 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
841 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
844 /* Creates outgoing edges for BB. Returns 1 when it ends with an
845 computed goto, returns 2 when it ends with a statement that
846 might return to this function via an nonlocal goto, otherwise
847 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
849 static int
850 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
852 gimple *last = last_stmt (bb);
853 bool fallthru = false;
854 int ret = 0;
856 if (!last)
857 return ret;
859 switch (gimple_code (last))
861 case GIMPLE_GOTO:
862 if (make_goto_expr_edges (bb))
863 ret = 1;
864 fallthru = false;
865 break;
866 case GIMPLE_RETURN:
868 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
869 e->goto_locus = gimple_location (last);
870 fallthru = false;
872 break;
873 case GIMPLE_COND:
874 make_cond_expr_edges (bb);
875 fallthru = false;
876 break;
877 case GIMPLE_SWITCH:
878 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
879 fallthru = false;
880 break;
881 case GIMPLE_RESX:
882 make_eh_edges (last);
883 fallthru = false;
884 break;
885 case GIMPLE_EH_DISPATCH:
886 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
887 break;
889 case GIMPLE_CALL:
890 /* If this function receives a nonlocal goto, then we need to
891 make edges from this call site to all the nonlocal goto
892 handlers. */
893 if (stmt_can_make_abnormal_goto (last))
894 ret = 2;
896 /* If this statement has reachable exception handlers, then
897 create abnormal edges to them. */
898 make_eh_edges (last);
900 /* BUILTIN_RETURN is really a return statement. */
901 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
903 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
904 fallthru = false;
906 /* Some calls are known not to return. */
907 else
908 fallthru = !gimple_call_noreturn_p (last);
909 break;
911 case GIMPLE_ASSIGN:
912 /* A GIMPLE_ASSIGN may throw internally and thus be considered
913 control-altering. */
914 if (is_ctrl_altering_stmt (last))
915 make_eh_edges (last);
916 fallthru = true;
917 break;
919 case GIMPLE_ASM:
920 make_gimple_asm_edges (bb);
921 fallthru = true;
922 break;
924 CASE_GIMPLE_OMP:
925 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
926 break;
928 case GIMPLE_TRANSACTION:
930 gtransaction *txn = as_a <gtransaction *> (last);
931 tree label1 = gimple_transaction_label_norm (txn);
932 tree label2 = gimple_transaction_label_uninst (txn);
934 if (label1)
935 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
936 if (label2)
937 make_edge (bb, label_to_block (cfun, label2),
938 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
940 tree label3 = gimple_transaction_label_over (txn);
941 if (gimple_transaction_subcode (txn)
942 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
943 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
945 fallthru = false;
947 break;
949 default:
950 gcc_assert (!stmt_ends_bb_p (last));
951 fallthru = true;
952 break;
955 if (fallthru)
956 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
958 return ret;
961 /* Join all the blocks in the flowgraph. */
963 static void
964 make_edges (void)
966 basic_block bb;
967 struct omp_region *cur_region = NULL;
968 auto_vec<basic_block> ab_edge_goto;
969 auto_vec<basic_block> ab_edge_call;
970 int cur_omp_region_idx = 0;
972 /* Create an edge from entry to the first block with executable
973 statements in it. */
974 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
975 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
976 EDGE_FALLTHRU);
978 /* Traverse the basic block array placing edges. */
979 FOR_EACH_BB_FN (bb, cfun)
981 int mer;
983 if (!bb_to_omp_idx.is_empty ())
984 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
986 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
987 if (mer == 1)
988 ab_edge_goto.safe_push (bb);
989 else if (mer == 2)
990 ab_edge_call.safe_push (bb);
992 if (cur_region && bb_to_omp_idx.is_empty ())
993 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
996 /* Computed gotos are hell to deal with, especially if there are
997 lots of them with a large number of destinations. So we factor
998 them to a common computed goto location before we build the
999 edge list. After we convert back to normal form, we will un-factor
1000 the computed gotos since factoring introduces an unwanted jump.
1001 For non-local gotos and abnormal edges from calls to calls that return
1002 twice or forced labels, factor the abnormal edges too, by having all
1003 abnormal edges from the calls go to a common artificial basic block
1004 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005 basic block to all forced labels and calls returning twice.
1006 We do this per-OpenMP structured block, because those regions
1007 are guaranteed to be single entry single exit by the standard,
1008 so it is not allowed to enter or exit such regions abnormally this way,
1009 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010 must not transfer control across SESE region boundaries. */
1011 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1013 gimple_stmt_iterator gsi;
1014 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1015 basic_block *dispatcher_bbs = dispatcher_bb_array;
1016 int count = n_basic_blocks_for_fn (cfun);
1018 if (!bb_to_omp_idx.is_empty ())
1019 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1021 FOR_EACH_BB_FN (bb, cfun)
1023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1025 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1026 tree target;
1028 if (!label_stmt)
1029 break;
1031 target = gimple_label_label (label_stmt);
1033 /* Make an edge to every label block that has been marked as a
1034 potential target for a computed goto or a non-local goto. */
1035 if (FORCED_LABEL (target))
1036 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1037 true);
1038 if (DECL_NONLOCAL (target))
1040 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1041 false);
1042 break;
1046 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1047 gsi_next_nondebug (&gsi);
1048 if (!gsi_end_p (gsi))
1050 /* Make an edge to every setjmp-like call. */
1051 gimple *call_stmt = gsi_stmt (gsi);
1052 if (is_gimple_call (call_stmt)
1053 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1054 || gimple_call_builtin_p (call_stmt,
1055 BUILT_IN_SETJMP_RECEIVER)))
1056 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1057 false);
1061 if (!bb_to_omp_idx.is_empty ())
1062 XDELETE (dispatcher_bbs);
1065 omp_free_regions ();
1068 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1069 needed. Returns true if new bbs were created.
1070 Note: This is transitional code, and should not be used for new code. We
1071 should be able to get rid of this by rewriting all target va-arg
1072 gimplification hooks to use an interface gimple_build_cond_value as described
1073 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1075 bool
1076 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1078 gimple *stmt = gsi_stmt (*gsi);
1079 basic_block bb = gimple_bb (stmt);
1080 basic_block lastbb, afterbb;
1081 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1082 edge e;
1083 lastbb = make_blocks_1 (seq, bb);
1084 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1085 return false;
1086 e = split_block (bb, stmt);
1087 /* Move e->dest to come after the new basic blocks. */
1088 afterbb = e->dest;
1089 unlink_block (afterbb);
1090 link_block (afterbb, lastbb);
1091 redirect_edge_succ (e, bb->next_bb);
1092 bb = bb->next_bb;
1093 while (bb != afterbb)
1095 struct omp_region *cur_region = NULL;
1096 profile_count cnt = profile_count::zero ();
1097 bool all = true;
1099 int cur_omp_region_idx = 0;
1100 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1101 gcc_assert (!mer && !cur_region);
1102 add_bb_to_loop (bb, afterbb->loop_father);
1104 edge e;
1105 edge_iterator ei;
1106 FOR_EACH_EDGE (e, ei, bb->preds)
1108 if (e->count ().initialized_p ())
1109 cnt += e->count ();
1110 else
1111 all = false;
1113 tree_guess_outgoing_edge_probabilities (bb);
1114 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1115 bb->count = cnt;
1117 bb = bb->next_bb;
1119 return true;
1122 /* Find the next available discriminator value for LOCUS. The
1123 discriminator distinguishes among several basic blocks that
1124 share a common locus, allowing for more accurate sample-based
1125 profiling. */
1127 static int
1128 next_discriminator_for_locus (int line)
1130 struct locus_discrim_map item;
1131 struct locus_discrim_map **slot;
1133 item.location_line = line;
1134 item.discriminator = 0;
1135 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1136 gcc_assert (slot);
1137 if (*slot == HTAB_EMPTY_ENTRY)
1139 *slot = XNEW (struct locus_discrim_map);
1140 gcc_assert (*slot);
1141 (*slot)->location_line = line;
1142 (*slot)->discriminator = 0;
1144 (*slot)->discriminator++;
1145 return (*slot)->discriminator;
1148 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1150 static bool
1151 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1153 expanded_location to;
1155 if (locus1 == locus2)
1156 return true;
1158 to = expand_location (locus2);
1160 if (from->line != to.line)
1161 return false;
1162 if (from->file == to.file)
1163 return true;
1164 return (from->file != NULL
1165 && to.file != NULL
1166 && filename_cmp (from->file, to.file) == 0);
1169 /* Assign a unique discriminator value to all statements in block bb that
1170 have the same line number as locus. */
1172 static void
1173 assign_discriminator (location_t locus, basic_block bb)
1175 gimple_stmt_iterator gsi;
1176 int discriminator;
1178 if (locus == UNKNOWN_LOCATION)
1179 return;
1181 expanded_location locus_e = expand_location (locus);
1183 discriminator = next_discriminator_for_locus (locus_e.line);
1185 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1187 gimple *stmt = gsi_stmt (gsi);
1188 location_t stmt_locus = gimple_location (stmt);
1189 if (same_line_p (locus, &locus_e, stmt_locus))
1190 gimple_set_location (stmt,
1191 location_with_discriminator (stmt_locus, discriminator));
1195 /* Assign discriminators to statement locations. */
1197 static void
1198 assign_discriminators (void)
1200 basic_block bb;
1202 FOR_EACH_BB_FN (bb, cfun)
1204 edge e;
1205 edge_iterator ei;
1206 gimple_stmt_iterator gsi;
1207 location_t curr_locus = UNKNOWN_LOCATION;
1208 expanded_location curr_locus_e = {};
1209 int curr_discr = 0;
1211 /* Traverse the basic block, if two function calls within a basic block
1212 are mapped to the same line, assign a new discriminator because a call
1213 stmt could be a split point of a basic block. */
1214 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1216 gimple *stmt = gsi_stmt (gsi);
1218 if (curr_locus == UNKNOWN_LOCATION)
1220 curr_locus = gimple_location (stmt);
1221 curr_locus_e = expand_location (curr_locus);
1223 else if (!same_line_p (curr_locus, &curr_locus_e, gimple_location (stmt)))
1225 curr_locus = gimple_location (stmt);
1226 curr_locus_e = expand_location (curr_locus);
1227 curr_discr = 0;
1229 else if (curr_discr != 0)
1231 location_t loc = gimple_location (stmt);
1232 location_t dloc = location_with_discriminator (loc, curr_discr);
1233 gimple_set_location (stmt, dloc);
1235 /* Allocate a new discriminator for CALL stmt. */
1236 if (gimple_code (stmt) == GIMPLE_CALL)
1237 curr_discr = next_discriminator_for_locus (curr_locus);
1240 gimple *last = last_stmt (bb);
1241 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1242 if (locus == UNKNOWN_LOCATION)
1243 continue;
1245 expanded_location locus_e = expand_location (locus);
1247 FOR_EACH_EDGE (e, ei, bb->succs)
1249 gimple *first = first_non_label_stmt (e->dest);
1250 gimple *last = last_stmt (e->dest);
1252 gimple *stmt_on_same_line = NULL;
1253 if (first && same_line_p (locus, &locus_e,
1254 gimple_location (first)))
1255 stmt_on_same_line = first;
1256 else if (last && same_line_p (locus, &locus_e,
1257 gimple_location (last)))
1258 stmt_on_same_line = last;
1260 if (stmt_on_same_line)
1262 if (has_discriminator (gimple_location (stmt_on_same_line))
1263 && !has_discriminator (locus))
1264 assign_discriminator (locus, bb);
1265 else
1266 assign_discriminator (locus, e->dest);
1272 /* Create the edges for a GIMPLE_COND starting at block BB. */
1274 static void
1275 make_cond_expr_edges (basic_block bb)
1277 gcond *entry = as_a <gcond *> (last_stmt (bb));
1278 gimple *then_stmt, *else_stmt;
1279 basic_block then_bb, else_bb;
1280 tree then_label, else_label;
1281 edge e;
1283 gcc_assert (entry);
1284 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1286 /* Entry basic blocks for each component. */
1287 then_label = gimple_cond_true_label (entry);
1288 else_label = gimple_cond_false_label (entry);
1289 then_bb = label_to_block (cfun, then_label);
1290 else_bb = label_to_block (cfun, else_label);
1291 then_stmt = first_stmt (then_bb);
1292 else_stmt = first_stmt (else_bb);
1294 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1295 e->goto_locus = gimple_location (then_stmt);
1296 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1297 if (e)
1298 e->goto_locus = gimple_location (else_stmt);
1300 /* We do not need the labels anymore. */
1301 gimple_cond_set_true_label (entry, NULL_TREE);
1302 gimple_cond_set_false_label (entry, NULL_TREE);
1306 /* Called for each element in the hash table (P) as we delete the
1307 edge to cases hash table.
1309 Clear all the CASE_CHAINs to prevent problems with copying of
1310 SWITCH_EXPRs and structure sharing rules, then free the hash table
1311 element. */
1313 bool
1314 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1316 tree t, next;
1318 for (t = value; t; t = next)
1320 next = CASE_CHAIN (t);
1321 CASE_CHAIN (t) = NULL;
1324 return true;
1327 /* Start recording information mapping edges to case labels. */
1329 void
1330 start_recording_case_labels (void)
1332 gcc_assert (edge_to_cases == NULL);
1333 edge_to_cases = new hash_map<edge, tree>;
1334 touched_switch_bbs = BITMAP_ALLOC (NULL);
1337 /* Return nonzero if we are recording information for case labels. */
1339 static bool
1340 recording_case_labels_p (void)
1342 return (edge_to_cases != NULL);
1345 /* Stop recording information mapping edges to case labels and
1346 remove any information we have recorded. */
1347 void
1348 end_recording_case_labels (void)
1350 bitmap_iterator bi;
1351 unsigned i;
1352 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1353 delete edge_to_cases;
1354 edge_to_cases = NULL;
1355 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1357 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1358 if (bb)
1360 gimple *stmt = last_stmt (bb);
1361 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1362 group_case_labels_stmt (as_a <gswitch *> (stmt));
1365 BITMAP_FREE (touched_switch_bbs);
1368 /* If we are inside a {start,end}_recording_cases block, then return
1369 a chain of CASE_LABEL_EXPRs from T which reference E.
1371 Otherwise return NULL. */
1373 tree
1374 get_cases_for_edge (edge e, gswitch *t)
1376 tree *slot;
1377 size_t i, n;
1379 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1380 chains available. Return NULL so the caller can detect this case. */
1381 if (!recording_case_labels_p ())
1382 return NULL;
1384 slot = edge_to_cases->get (e);
1385 if (slot)
1386 return *slot;
1388 /* If we did not find E in the hash table, then this must be the first
1389 time we have been queried for information about E & T. Add all the
1390 elements from T to the hash table then perform the query again. */
1392 n = gimple_switch_num_labels (t);
1393 for (i = 0; i < n; i++)
1395 tree elt = gimple_switch_label (t, i);
1396 tree lab = CASE_LABEL (elt);
1397 basic_block label_bb = label_to_block (cfun, lab);
1398 edge this_edge = find_edge (e->src, label_bb);
1400 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1401 a new chain. */
1402 tree &s = edge_to_cases->get_or_insert (this_edge);
1403 CASE_CHAIN (elt) = s;
1404 s = elt;
1407 return *edge_to_cases->get (e);
1410 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1412 static void
1413 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1415 size_t i, n;
1417 n = gimple_switch_num_labels (entry);
1419 for (i = 0; i < n; ++i)
1421 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1422 make_edge (bb, label_bb, 0);
1427 /* Return the basic block holding label DEST. */
1429 basic_block
1430 label_to_block (struct function *ifun, tree dest)
1432 int uid = LABEL_DECL_UID (dest);
1434 /* We would die hard when faced by an undefined label. Emit a label to
1435 the very first basic block. This will hopefully make even the dataflow
1436 and undefined variable warnings quite right. */
1437 if (seen_error () && uid < 0)
1439 gimple_stmt_iterator gsi =
1440 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1441 gimple *stmt;
1443 stmt = gimple_build_label (dest);
1444 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1445 uid = LABEL_DECL_UID (dest);
1447 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1448 return NULL;
1449 return (*ifun->cfg->x_label_to_block_map)[uid];
1452 /* Create edges for a goto statement at block BB. Returns true
1453 if abnormal edges should be created. */
1455 static bool
1456 make_goto_expr_edges (basic_block bb)
1458 gimple_stmt_iterator last = gsi_last_bb (bb);
1459 gimple *goto_t = gsi_stmt (last);
1461 /* A simple GOTO creates normal edges. */
1462 if (simple_goto_p (goto_t))
1464 tree dest = gimple_goto_dest (goto_t);
1465 basic_block label_bb = label_to_block (cfun, dest);
1466 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1467 e->goto_locus = gimple_location (goto_t);
1468 gsi_remove (&last, true);
1469 return false;
1472 /* A computed GOTO creates abnormal edges. */
1473 return true;
1476 /* Create edges for an asm statement with labels at block BB. */
1478 static void
1479 make_gimple_asm_edges (basic_block bb)
1481 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1482 int i, n = gimple_asm_nlabels (stmt);
1484 for (i = 0; i < n; ++i)
1486 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1487 basic_block label_bb = label_to_block (cfun, label);
1488 make_edge (bb, label_bb, 0);
1492 /*---------------------------------------------------------------------------
1493 Flowgraph analysis
1494 ---------------------------------------------------------------------------*/
1496 /* Cleanup useless labels in basic blocks. This is something we wish
1497 to do early because it allows us to group case labels before creating
1498 the edges for the CFG, and it speeds up block statement iterators in
1499 all passes later on.
1500 We rerun this pass after CFG is created, to get rid of the labels that
1501 are no longer referenced. After then we do not run it any more, since
1502 (almost) no new labels should be created. */
1504 /* A map from basic block index to the leading label of that block. */
1505 struct label_record
1507 /* The label. */
1508 tree label;
1510 /* True if the label is referenced from somewhere. */
1511 bool used;
1514 /* Given LABEL return the first label in the same basic block. */
1516 static tree
1517 main_block_label (tree label, label_record *label_for_bb)
1519 basic_block bb = label_to_block (cfun, label);
1520 tree main_label = label_for_bb[bb->index].label;
1522 /* label_to_block possibly inserted undefined label into the chain. */
1523 if (!main_label)
1525 label_for_bb[bb->index].label = label;
1526 main_label = label;
1529 label_for_bb[bb->index].used = true;
1530 return main_label;
1533 /* Clean up redundant labels within the exception tree. */
1535 static void
1536 cleanup_dead_labels_eh (label_record *label_for_bb)
1538 eh_landing_pad lp;
1539 eh_region r;
1540 tree lab;
1541 int i;
1543 if (cfun->eh == NULL)
1544 return;
1546 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1547 if (lp && lp->post_landing_pad)
1549 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1550 if (lab != lp->post_landing_pad)
1552 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1553 lp->post_landing_pad = lab;
1554 EH_LANDING_PAD_NR (lab) = lp->index;
1558 FOR_ALL_EH_REGION (r)
1559 switch (r->type)
1561 case ERT_CLEANUP:
1562 case ERT_MUST_NOT_THROW:
1563 break;
1565 case ERT_TRY:
1567 eh_catch c;
1568 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1570 lab = c->label;
1571 if (lab)
1572 c->label = main_block_label (lab, label_for_bb);
1575 break;
1577 case ERT_ALLOWED_EXCEPTIONS:
1578 lab = r->u.allowed.label;
1579 if (lab)
1580 r->u.allowed.label = main_block_label (lab, label_for_bb);
1581 break;
1586 /* Cleanup redundant labels. This is a three-step process:
1587 1) Find the leading label for each block.
1588 2) Redirect all references to labels to the leading labels.
1589 3) Cleanup all useless labels. */
1591 void
1592 cleanup_dead_labels (void)
1594 basic_block bb;
1595 label_record *label_for_bb = XCNEWVEC (struct label_record,
1596 last_basic_block_for_fn (cfun));
1598 /* Find a suitable label for each block. We use the first user-defined
1599 label if there is one, or otherwise just the first label we see. */
1600 FOR_EACH_BB_FN (bb, cfun)
1602 gimple_stmt_iterator i;
1604 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1606 tree label;
1607 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1609 if (!label_stmt)
1610 break;
1612 label = gimple_label_label (label_stmt);
1614 /* If we have not yet seen a label for the current block,
1615 remember this one and see if there are more labels. */
1616 if (!label_for_bb[bb->index].label)
1618 label_for_bb[bb->index].label = label;
1619 continue;
1622 /* If we did see a label for the current block already, but it
1623 is an artificially created label, replace it if the current
1624 label is a user defined label. */
1625 if (!DECL_ARTIFICIAL (label)
1626 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1628 label_for_bb[bb->index].label = label;
1629 break;
1634 /* Now redirect all jumps/branches to the selected label.
1635 First do so for each block ending in a control statement. */
1636 FOR_EACH_BB_FN (bb, cfun)
1638 gimple *stmt = last_stmt (bb);
1639 tree label, new_label;
1641 if (!stmt)
1642 continue;
1644 switch (gimple_code (stmt))
1646 case GIMPLE_COND:
1648 gcond *cond_stmt = as_a <gcond *> (stmt);
1649 label = gimple_cond_true_label (cond_stmt);
1650 if (label)
1652 new_label = main_block_label (label, label_for_bb);
1653 if (new_label != label)
1654 gimple_cond_set_true_label (cond_stmt, new_label);
1657 label = gimple_cond_false_label (cond_stmt);
1658 if (label)
1660 new_label = main_block_label (label, label_for_bb);
1661 if (new_label != label)
1662 gimple_cond_set_false_label (cond_stmt, new_label);
1665 break;
1667 case GIMPLE_SWITCH:
1669 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1670 size_t i, n = gimple_switch_num_labels (switch_stmt);
1672 /* Replace all destination labels. */
1673 for (i = 0; i < n; ++i)
1675 tree case_label = gimple_switch_label (switch_stmt, i);
1676 label = CASE_LABEL (case_label);
1677 new_label = main_block_label (label, label_for_bb);
1678 if (new_label != label)
1679 CASE_LABEL (case_label) = new_label;
1681 break;
1684 case GIMPLE_ASM:
1686 gasm *asm_stmt = as_a <gasm *> (stmt);
1687 int i, n = gimple_asm_nlabels (asm_stmt);
1689 for (i = 0; i < n; ++i)
1691 tree cons = gimple_asm_label_op (asm_stmt, i);
1692 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1693 TREE_VALUE (cons) = label;
1695 break;
1698 /* We have to handle gotos until they're removed, and we don't
1699 remove them until after we've created the CFG edges. */
1700 case GIMPLE_GOTO:
1701 if (!computed_goto_p (stmt))
1703 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1704 label = gimple_goto_dest (goto_stmt);
1705 new_label = main_block_label (label, label_for_bb);
1706 if (new_label != label)
1707 gimple_goto_set_dest (goto_stmt, new_label);
1709 break;
1711 case GIMPLE_TRANSACTION:
1713 gtransaction *txn = as_a <gtransaction *> (stmt);
1715 label = gimple_transaction_label_norm (txn);
1716 if (label)
1718 new_label = main_block_label (label, label_for_bb);
1719 if (new_label != label)
1720 gimple_transaction_set_label_norm (txn, new_label);
1723 label = gimple_transaction_label_uninst (txn);
1724 if (label)
1726 new_label = main_block_label (label, label_for_bb);
1727 if (new_label != label)
1728 gimple_transaction_set_label_uninst (txn, new_label);
1731 label = gimple_transaction_label_over (txn);
1732 if (label)
1734 new_label = main_block_label (label, label_for_bb);
1735 if (new_label != label)
1736 gimple_transaction_set_label_over (txn, new_label);
1739 break;
1741 default:
1742 break;
1746 /* Do the same for the exception region tree labels. */
1747 cleanup_dead_labels_eh (label_for_bb);
1749 /* Finally, purge dead labels. All user-defined labels and labels that
1750 can be the target of non-local gotos and labels which have their
1751 address taken are preserved. */
1752 FOR_EACH_BB_FN (bb, cfun)
1754 gimple_stmt_iterator i;
1755 tree label_for_this_bb = label_for_bb[bb->index].label;
1757 if (!label_for_this_bb)
1758 continue;
1760 /* If the main label of the block is unused, we may still remove it. */
1761 if (!label_for_bb[bb->index].used)
1762 label_for_this_bb = NULL;
1764 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1766 tree label;
1767 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1769 if (!label_stmt)
1770 break;
1772 label = gimple_label_label (label_stmt);
1774 if (label == label_for_this_bb
1775 || !DECL_ARTIFICIAL (label)
1776 || DECL_NONLOCAL (label)
1777 || FORCED_LABEL (label))
1778 gsi_next (&i);
1779 else
1781 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1782 gsi_remove (&i, true);
1787 free (label_for_bb);
1790 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1791 the ones jumping to the same label.
1792 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1794 bool
1795 group_case_labels_stmt (gswitch *stmt)
1797 int old_size = gimple_switch_num_labels (stmt);
1798 int i, next_index, new_size;
1799 basic_block default_bb = NULL;
1800 hash_set<tree> *removed_labels = NULL;
1802 default_bb = gimple_switch_default_bb (cfun, stmt);
1804 /* Look for possible opportunities to merge cases. */
1805 new_size = i = 1;
1806 while (i < old_size)
1808 tree base_case, base_high;
1809 basic_block base_bb;
1811 base_case = gimple_switch_label (stmt, i);
1813 gcc_assert (base_case);
1814 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1816 /* Discard cases that have the same destination as the default case or
1817 whose destination blocks have already been removed as unreachable. */
1818 if (base_bb == NULL
1819 || base_bb == default_bb
1820 || (removed_labels
1821 && removed_labels->contains (CASE_LABEL (base_case))))
1823 i++;
1824 continue;
1827 base_high = CASE_HIGH (base_case)
1828 ? CASE_HIGH (base_case)
1829 : CASE_LOW (base_case);
1830 next_index = i + 1;
1832 /* Try to merge case labels. Break out when we reach the end
1833 of the label vector or when we cannot merge the next case
1834 label with the current one. */
1835 while (next_index < old_size)
1837 tree merge_case = gimple_switch_label (stmt, next_index);
1838 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1839 wide_int bhp1 = wi::to_wide (base_high) + 1;
1841 /* Merge the cases if they jump to the same place,
1842 and their ranges are consecutive. */
1843 if (merge_bb == base_bb
1844 && (removed_labels == NULL
1845 || !removed_labels->contains (CASE_LABEL (merge_case)))
1846 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1848 base_high
1849 = (CASE_HIGH (merge_case)
1850 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1851 CASE_HIGH (base_case) = base_high;
1852 next_index++;
1854 else
1855 break;
1858 /* Discard cases that have an unreachable destination block. */
1859 if (EDGE_COUNT (base_bb->succs) == 0
1860 && gimple_seq_unreachable_p (bb_seq (base_bb))
1861 /* Don't optimize this if __builtin_unreachable () is the
1862 implicitly added one by the C++ FE too early, before
1863 -Wreturn-type can be diagnosed. We'll optimize it later
1864 during switchconv pass or any other cfg cleanup. */
1865 && (gimple_in_ssa_p (cfun)
1866 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1867 != BUILTINS_LOCATION)))
1869 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1870 if (base_edge != NULL)
1872 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1873 !gsi_end_p (gsi); gsi_next (&gsi))
1874 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1876 if (FORCED_LABEL (gimple_label_label (stmt))
1877 || DECL_NONLOCAL (gimple_label_label (stmt)))
1879 /* Forced/non-local labels aren't going to be removed,
1880 but they will be moved to some neighbouring basic
1881 block. If some later case label refers to one of
1882 those labels, we should throw that case away rather
1883 than keeping it around and refering to some random
1884 other basic block without an edge to it. */
1885 if (removed_labels == NULL)
1886 removed_labels = new hash_set<tree>;
1887 removed_labels->add (gimple_label_label (stmt));
1890 else
1891 break;
1892 remove_edge_and_dominated_blocks (base_edge);
1894 i = next_index;
1895 continue;
1898 if (new_size < i)
1899 gimple_switch_set_label (stmt, new_size,
1900 gimple_switch_label (stmt, i));
1901 i = next_index;
1902 new_size++;
1905 gcc_assert (new_size <= old_size);
1907 if (new_size < old_size)
1908 gimple_switch_set_num_labels (stmt, new_size);
1910 delete removed_labels;
1911 return new_size < old_size;
1914 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1915 and scan the sorted vector of cases. Combine the ones jumping to the
1916 same label. */
1918 bool
1919 group_case_labels (void)
1921 basic_block bb;
1922 bool changed = false;
1924 FOR_EACH_BB_FN (bb, cfun)
1926 gimple *stmt = last_stmt (bb);
1927 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1928 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1931 return changed;
1934 /* Checks whether we can merge block B into block A. */
1936 static bool
1937 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1939 gimple *stmt;
1941 if (!single_succ_p (a))
1942 return false;
1944 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1945 return false;
1947 if (single_succ (a) != b)
1948 return false;
1950 if (!single_pred_p (b))
1951 return false;
1953 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1954 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1955 return false;
1957 /* If A ends by a statement causing exceptions or something similar, we
1958 cannot merge the blocks. */
1959 stmt = last_stmt (a);
1960 if (stmt && stmt_ends_bb_p (stmt))
1961 return false;
1963 /* Do not allow a block with only a non-local label to be merged. */
1964 if (stmt)
1965 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1966 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1967 return false;
1969 /* Examine the labels at the beginning of B. */
1970 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1971 gsi_next (&gsi))
1973 tree lab;
1974 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1975 if (!label_stmt)
1976 break;
1977 lab = gimple_label_label (label_stmt);
1979 /* Do not remove user forced labels or for -O0 any user labels. */
1980 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1981 return false;
1984 /* Protect simple loop latches. We only want to avoid merging
1985 the latch with the loop header or with a block in another
1986 loop in this case. */
1987 if (current_loops
1988 && b->loop_father->latch == b
1989 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1990 && (b->loop_father->header == a
1991 || b->loop_father != a->loop_father))
1992 return false;
1994 /* It must be possible to eliminate all phi nodes in B. If ssa form
1995 is not up-to-date and a name-mapping is registered, we cannot eliminate
1996 any phis. Symbols marked for renaming are never a problem though. */
1997 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1998 gsi_next (&gsi))
2000 gphi *phi = gsi.phi ();
2001 /* Technically only new names matter. */
2002 if (name_registered_for_update_p (PHI_RESULT (phi)))
2003 return false;
2006 /* When not optimizing, don't merge if we'd lose goto_locus. */
2007 if (!optimize
2008 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
2010 location_t goto_locus = single_succ_edge (a)->goto_locus;
2011 gimple_stmt_iterator prev, next;
2012 prev = gsi_last_nondebug_bb (a);
2013 next = gsi_after_labels (b);
2014 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
2015 gsi_next_nondebug (&next);
2016 if ((gsi_end_p (prev)
2017 || gimple_location (gsi_stmt (prev)) != goto_locus)
2018 && (gsi_end_p (next)
2019 || gimple_location (gsi_stmt (next)) != goto_locus))
2020 return false;
2023 return true;
2026 /* Replaces all uses of NAME by VAL. */
2028 void
2029 replace_uses_by (tree name, tree val)
2031 imm_use_iterator imm_iter;
2032 use_operand_p use;
2033 gimple *stmt;
2034 edge e;
2036 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2038 /* Mark the block if we change the last stmt in it. */
2039 if (cfgcleanup_altered_bbs
2040 && stmt_ends_bb_p (stmt))
2041 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2043 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2045 replace_exp (use, val);
2047 if (gimple_code (stmt) == GIMPLE_PHI)
2049 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2050 PHI_ARG_INDEX_FROM_USE (use));
2051 if (e->flags & EDGE_ABNORMAL
2052 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2054 /* This can only occur for virtual operands, since
2055 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2056 would prevent replacement. */
2057 gcc_checking_assert (virtual_operand_p (name));
2058 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2063 if (gimple_code (stmt) != GIMPLE_PHI)
2065 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2066 gimple *orig_stmt = stmt;
2067 size_t i;
2069 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2070 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2071 only change sth from non-invariant to invariant, and only
2072 when propagating constants. */
2073 if (is_gimple_min_invariant (val))
2074 for (i = 0; i < gimple_num_ops (stmt); i++)
2076 tree op = gimple_op (stmt, i);
2077 /* Operands may be empty here. For example, the labels
2078 of a GIMPLE_COND are nulled out following the creation
2079 of the corresponding CFG edges. */
2080 if (op && TREE_CODE (op) == ADDR_EXPR)
2081 recompute_tree_invariant_for_addr_expr (op);
2084 if (fold_stmt (&gsi))
2085 stmt = gsi_stmt (gsi);
2087 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2088 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2090 update_stmt (stmt);
2094 gcc_checking_assert (has_zero_uses (name));
2096 /* Also update the trees stored in loop structures. */
2097 if (current_loops)
2099 for (auto loop : loops_list (cfun, 0))
2100 substitute_in_loop_info (loop, name, val);
2104 /* Merge block B into block A. */
2106 static void
2107 gimple_merge_blocks (basic_block a, basic_block b)
2109 gimple_stmt_iterator last, gsi;
2110 gphi_iterator psi;
2112 if (dump_file)
2113 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2115 /* Remove all single-valued PHI nodes from block B of the form
2116 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2117 gsi = gsi_last_bb (a);
2118 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2120 gimple *phi = gsi_stmt (psi);
2121 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2122 gimple *copy;
2123 bool may_replace_uses = (virtual_operand_p (def)
2124 || may_propagate_copy (def, use));
2126 /* In case we maintain loop closed ssa form, do not propagate arguments
2127 of loop exit phi nodes. */
2128 if (current_loops
2129 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2130 && !virtual_operand_p (def)
2131 && TREE_CODE (use) == SSA_NAME
2132 && a->loop_father != b->loop_father)
2133 may_replace_uses = false;
2135 if (!may_replace_uses)
2137 gcc_assert (!virtual_operand_p (def));
2139 /* Note that just emitting the copies is fine -- there is no problem
2140 with ordering of phi nodes. This is because A is the single
2141 predecessor of B, therefore results of the phi nodes cannot
2142 appear as arguments of the phi nodes. */
2143 copy = gimple_build_assign (def, use);
2144 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2145 remove_phi_node (&psi, false);
2147 else
2149 /* If we deal with a PHI for virtual operands, we can simply
2150 propagate these without fussing with folding or updating
2151 the stmt. */
2152 if (virtual_operand_p (def))
2154 imm_use_iterator iter;
2155 use_operand_p use_p;
2156 gimple *stmt;
2158 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2159 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2160 SET_USE (use_p, use);
2162 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2163 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2165 else
2166 replace_uses_by (def, use);
2168 remove_phi_node (&psi, true);
2172 /* Ensure that B follows A. */
2173 move_block_after (b, a);
2175 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2176 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2178 /* Remove labels from B and set gimple_bb to A for other statements. */
2179 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2181 gimple *stmt = gsi_stmt (gsi);
2182 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2184 tree label = gimple_label_label (label_stmt);
2185 int lp_nr;
2187 gsi_remove (&gsi, false);
2189 /* Now that we can thread computed gotos, we might have
2190 a situation where we have a forced label in block B
2191 However, the label at the start of block B might still be
2192 used in other ways (think about the runtime checking for
2193 Fortran assigned gotos). So we cannot just delete the
2194 label. Instead we move the label to the start of block A. */
2195 if (FORCED_LABEL (label))
2197 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2198 tree first_label = NULL_TREE;
2199 if (!gsi_end_p (dest_gsi))
2200 if (glabel *first_label_stmt
2201 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2202 first_label = gimple_label_label (first_label_stmt);
2203 if (first_label
2204 && (DECL_NONLOCAL (first_label)
2205 || EH_LANDING_PAD_NR (first_label) != 0))
2206 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2207 else
2208 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2210 /* Other user labels keep around in a form of a debug stmt. */
2211 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2213 gimple *dbg = gimple_build_debug_bind (label,
2214 integer_zero_node,
2215 stmt);
2216 gimple_debug_bind_reset_value (dbg);
2217 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2220 lp_nr = EH_LANDING_PAD_NR (label);
2221 if (lp_nr)
2223 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2224 lp->post_landing_pad = NULL;
2227 else
2229 gimple_set_bb (stmt, a);
2230 gsi_next (&gsi);
2234 /* When merging two BBs, if their counts are different, the larger count
2235 is selected as the new bb count. This is to handle inconsistent
2236 profiles. */
2237 if (a->loop_father == b->loop_father)
2239 a->count = a->count.merge (b->count);
2242 /* Merge the sequences. */
2243 last = gsi_last_bb (a);
2244 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2245 set_bb_seq (b, NULL);
2247 if (cfgcleanup_altered_bbs)
2248 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2252 /* Return the one of two successors of BB that is not reachable by a
2253 complex edge, if there is one. Else, return BB. We use
2254 this in optimizations that use post-dominators for their heuristics,
2255 to catch the cases in C++ where function calls are involved. */
2257 basic_block
2258 single_noncomplex_succ (basic_block bb)
2260 edge e0, e1;
2261 if (EDGE_COUNT (bb->succs) != 2)
2262 return bb;
2264 e0 = EDGE_SUCC (bb, 0);
2265 e1 = EDGE_SUCC (bb, 1);
2266 if (e0->flags & EDGE_COMPLEX)
2267 return e1->dest;
2268 if (e1->flags & EDGE_COMPLEX)
2269 return e0->dest;
2271 return bb;
2274 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2276 void
2277 notice_special_calls (gcall *call)
2279 int flags = gimple_call_flags (call);
2281 if (flags & ECF_MAY_BE_ALLOCA)
2282 cfun->calls_alloca = true;
2283 if (flags & ECF_RETURNS_TWICE)
2284 cfun->calls_setjmp = true;
2288 /* Clear flags set by notice_special_calls. Used by dead code removal
2289 to update the flags. */
2291 void
2292 clear_special_calls (void)
2294 cfun->calls_alloca = false;
2295 cfun->calls_setjmp = false;
2298 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2300 static void
2301 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2303 /* Since this block is no longer reachable, we can just delete all
2304 of its PHI nodes. */
2305 remove_phi_nodes (bb);
2307 /* Remove edges to BB's successors. */
2308 while (EDGE_COUNT (bb->succs) > 0)
2309 remove_edge (EDGE_SUCC (bb, 0));
2313 /* Remove statements of basic block BB. */
2315 static void
2316 remove_bb (basic_block bb)
2318 gimple_stmt_iterator i;
2320 if (dump_file)
2322 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2323 if (dump_flags & TDF_DETAILS)
2325 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2326 fprintf (dump_file, "\n");
2330 if (current_loops)
2332 class loop *loop = bb->loop_father;
2334 /* If a loop gets removed, clean up the information associated
2335 with it. */
2336 if (loop->latch == bb
2337 || loop->header == bb)
2338 free_numbers_of_iterations_estimates (loop);
2341 /* Remove all the instructions in the block. */
2342 if (bb_seq (bb) != NULL)
2344 /* Walk backwards so as to get a chance to substitute all
2345 released DEFs into debug stmts. See
2346 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2347 details. */
2348 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2350 gimple *stmt = gsi_stmt (i);
2351 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2352 if (label_stmt
2353 && (FORCED_LABEL (gimple_label_label (label_stmt))
2354 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2356 basic_block new_bb;
2357 gimple_stmt_iterator new_gsi;
2359 /* A non-reachable non-local label may still be referenced.
2360 But it no longer needs to carry the extra semantics of
2361 non-locality. */
2362 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2364 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2365 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2368 new_bb = bb->prev_bb;
2369 /* Don't move any labels into ENTRY block. */
2370 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2372 new_bb = single_succ (new_bb);
2373 gcc_assert (new_bb != bb);
2375 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2376 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2377 || (bb_to_omp_idx[bb->index]
2378 != bb_to_omp_idx[new_bb->index])))
2380 /* During cfg pass make sure to put orphaned labels
2381 into the right OMP region. */
2382 unsigned int i;
2383 int idx;
2384 new_bb = NULL;
2385 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2386 if (i >= NUM_FIXED_BLOCKS
2387 && idx == bb_to_omp_idx[bb->index]
2388 && i != (unsigned) bb->index)
2390 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2391 break;
2393 if (new_bb == NULL)
2395 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2396 gcc_assert (new_bb != bb);
2399 new_gsi = gsi_after_labels (new_bb);
2400 gsi_remove (&i, false);
2401 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2403 else
2405 /* Release SSA definitions. */
2406 release_defs (stmt);
2407 gsi_remove (&i, true);
2410 if (gsi_end_p (i))
2411 i = gsi_last_bb (bb);
2412 else
2413 gsi_prev (&i);
2417 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2418 bb_to_omp_idx[bb->index] = -1;
2419 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2420 bb->il.gimple.seq = NULL;
2421 bb->il.gimple.phi_nodes = NULL;
2425 /* Given a basic block BB and a value VAL for use in the final statement
2426 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2427 the edge that will be taken out of the block.
2428 If VAL is NULL_TREE, then the current value of the final statement's
2429 predicate or index is used.
2430 If the value does not match a unique edge, NULL is returned. */
2432 edge
2433 find_taken_edge (basic_block bb, tree val)
2435 gimple *stmt;
2437 stmt = last_stmt (bb);
2439 /* Handle ENTRY and EXIT. */
2440 if (!stmt)
2441 return NULL;
2443 if (gimple_code (stmt) == GIMPLE_COND)
2444 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2446 if (gimple_code (stmt) == GIMPLE_SWITCH)
2447 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2449 if (computed_goto_p (stmt))
2451 /* Only optimize if the argument is a label, if the argument is
2452 not a label then we cannot construct a proper CFG.
2454 It may be the case that we only need to allow the LABEL_REF to
2455 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2456 appear inside a LABEL_EXPR just to be safe. */
2457 if (val
2458 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2459 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2460 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2463 /* Otherwise we only know the taken successor edge if it's unique. */
2464 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2467 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2468 statement, determine which of the outgoing edges will be taken out of the
2469 block. Return NULL if either edge may be taken. */
2471 static edge
2472 find_taken_edge_computed_goto (basic_block bb, tree val)
2474 basic_block dest;
2475 edge e = NULL;
2477 dest = label_to_block (cfun, val);
2478 if (dest)
2479 e = find_edge (bb, dest);
2481 /* It's possible for find_edge to return NULL here on invalid code
2482 that abuses the labels-as-values extension (e.g. code that attempts to
2483 jump *between* functions via stored labels-as-values; PR 84136).
2484 If so, then we simply return that NULL for the edge.
2485 We don't currently have a way of detecting such invalid code, so we
2486 can't assert that it was the case when a NULL edge occurs here. */
2488 return e;
2491 /* Given COND_STMT and a constant value VAL for use as the predicate,
2492 determine which of the two edges will be taken out of
2493 the statement's block. Return NULL if either edge may be taken.
2494 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2495 is used. */
2497 static edge
2498 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2500 edge true_edge, false_edge;
2502 if (val == NULL_TREE)
2504 /* Use the current value of the predicate. */
2505 if (gimple_cond_true_p (cond_stmt))
2506 val = integer_one_node;
2507 else if (gimple_cond_false_p (cond_stmt))
2508 val = integer_zero_node;
2509 else
2510 return NULL;
2512 else if (TREE_CODE (val) != INTEGER_CST)
2513 return NULL;
2515 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2516 &true_edge, &false_edge);
2518 return (integer_zerop (val) ? false_edge : true_edge);
2521 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2522 which edge will be taken out of the statement's block. Return NULL if any
2523 edge may be taken.
2524 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2525 is used. */
2527 edge
2528 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2530 basic_block dest_bb;
2531 edge e;
2532 tree taken_case;
2534 if (gimple_switch_num_labels (switch_stmt) == 1)
2535 taken_case = gimple_switch_default_label (switch_stmt);
2536 else
2538 if (val == NULL_TREE)
2539 val = gimple_switch_index (switch_stmt);
2540 if (TREE_CODE (val) != INTEGER_CST)
2541 return NULL;
2542 else
2543 taken_case = find_case_label_for_value (switch_stmt, val);
2545 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2547 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2548 gcc_assert (e);
2549 return e;
2553 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2554 We can make optimal use here of the fact that the case labels are
2555 sorted: We can do a binary search for a case matching VAL. */
2557 tree
2558 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2560 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2561 tree default_case = gimple_switch_default_label (switch_stmt);
2563 for (low = 0, high = n; high - low > 1; )
2565 size_t i = (high + low) / 2;
2566 tree t = gimple_switch_label (switch_stmt, i);
2567 int cmp;
2569 /* Cache the result of comparing CASE_LOW and val. */
2570 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2572 if (cmp > 0)
2573 high = i;
2574 else
2575 low = i;
2577 if (CASE_HIGH (t) == NULL)
2579 /* A singe-valued case label. */
2580 if (cmp == 0)
2581 return t;
2583 else
2585 /* A case range. We can only handle integer ranges. */
2586 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2587 return t;
2591 return default_case;
2595 /* Dump a basic block on stderr. */
2597 void
2598 gimple_debug_bb (basic_block bb)
2600 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2604 /* Dump basic block with index N on stderr. */
2606 basic_block
2607 gimple_debug_bb_n (int n)
2609 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2610 return BASIC_BLOCK_FOR_FN (cfun, n);
2614 /* Dump the CFG on stderr.
2616 FLAGS are the same used by the tree dumping functions
2617 (see TDF_* in dumpfile.h). */
2619 void
2620 gimple_debug_cfg (dump_flags_t flags)
2622 gimple_dump_cfg (stderr, flags);
2626 /* Dump the program showing basic block boundaries on the given FILE.
2628 FLAGS are the same used by the tree dumping functions (see TDF_* in
2629 tree.h). */
2631 void
2632 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2634 if (flags & TDF_DETAILS)
2636 dump_function_header (file, current_function_decl, flags);
2637 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2638 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2639 last_basic_block_for_fn (cfun));
2641 brief_dump_cfg (file, flags);
2642 fprintf (file, "\n");
2645 if (flags & TDF_STATS)
2646 dump_cfg_stats (file);
2648 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2652 /* Dump CFG statistics on FILE. */
2654 void
2655 dump_cfg_stats (FILE *file)
2657 static long max_num_merged_labels = 0;
2658 unsigned long size, total = 0;
2659 long num_edges;
2660 basic_block bb;
2661 const char * const fmt_str = "%-30s%-13s%12s\n";
2662 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2663 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2664 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2665 const char *funcname = current_function_name ();
2667 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2669 fprintf (file, "---------------------------------------------------------\n");
2670 fprintf (file, fmt_str, "", " Number of ", "Memory");
2671 fprintf (file, fmt_str, "", " instances ", "used ");
2672 fprintf (file, "---------------------------------------------------------\n");
2674 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2675 total += size;
2676 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2677 SIZE_AMOUNT (size));
2679 num_edges = 0;
2680 FOR_EACH_BB_FN (bb, cfun)
2681 num_edges += EDGE_COUNT (bb->succs);
2682 size = num_edges * sizeof (class edge_def);
2683 total += size;
2684 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2686 fprintf (file, "---------------------------------------------------------\n");
2687 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2688 SIZE_AMOUNT (total));
2689 fprintf (file, "---------------------------------------------------------\n");
2690 fprintf (file, "\n");
2692 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2693 max_num_merged_labels = cfg_stats.num_merged_labels;
2695 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2696 cfg_stats.num_merged_labels, max_num_merged_labels);
2698 fprintf (file, "\n");
2702 /* Dump CFG statistics on stderr. Keep extern so that it's always
2703 linked in the final executable. */
2705 DEBUG_FUNCTION void
2706 debug_cfg_stats (void)
2708 dump_cfg_stats (stderr);
2711 /*---------------------------------------------------------------------------
2712 Miscellaneous helpers
2713 ---------------------------------------------------------------------------*/
2715 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2716 flow. Transfers of control flow associated with EH are excluded. */
2718 static bool
2719 call_can_make_abnormal_goto (gimple *t)
2721 /* If the function has no non-local labels, then a call cannot make an
2722 abnormal transfer of control. */
2723 if (!cfun->has_nonlocal_label
2724 && !cfun->calls_setjmp)
2725 return false;
2727 /* Likewise if the call has no side effects. */
2728 if (!gimple_has_side_effects (t))
2729 return false;
2731 /* Likewise if the called function is leaf. */
2732 if (gimple_call_flags (t) & ECF_LEAF)
2733 return false;
2735 return true;
2739 /* Return true if T can make an abnormal transfer of control flow.
2740 Transfers of control flow associated with EH are excluded. */
2742 bool
2743 stmt_can_make_abnormal_goto (gimple *t)
2745 if (computed_goto_p (t))
2746 return true;
2747 if (is_gimple_call (t))
2748 return call_can_make_abnormal_goto (t);
2749 return false;
2753 /* Return true if T represents a stmt that always transfers control. */
2755 bool
2756 is_ctrl_stmt (gimple *t)
2758 switch (gimple_code (t))
2760 case GIMPLE_COND:
2761 case GIMPLE_SWITCH:
2762 case GIMPLE_GOTO:
2763 case GIMPLE_RETURN:
2764 case GIMPLE_RESX:
2765 return true;
2766 default:
2767 return false;
2772 /* Return true if T is a statement that may alter the flow of control
2773 (e.g., a call to a non-returning function). */
2775 bool
2776 is_ctrl_altering_stmt (gimple *t)
2778 gcc_assert (t);
2780 switch (gimple_code (t))
2782 case GIMPLE_CALL:
2783 /* Per stmt call flag indicates whether the call could alter
2784 controlflow. */
2785 if (gimple_call_ctrl_altering_p (t))
2786 return true;
2787 break;
2789 case GIMPLE_EH_DISPATCH:
2790 /* EH_DISPATCH branches to the individual catch handlers at
2791 this level of a try or allowed-exceptions region. It can
2792 fallthru to the next statement as well. */
2793 return true;
2795 case GIMPLE_ASM:
2796 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2797 return true;
2798 break;
2800 CASE_GIMPLE_OMP:
2801 /* OpenMP directives alter control flow. */
2802 return true;
2804 case GIMPLE_TRANSACTION:
2805 /* A transaction start alters control flow. */
2806 return true;
2808 default:
2809 break;
2812 /* If a statement can throw, it alters control flow. */
2813 return stmt_can_throw_internal (cfun, t);
2817 /* Return true if T is a simple local goto. */
2819 bool
2820 simple_goto_p (gimple *t)
2822 return (gimple_code (t) == GIMPLE_GOTO
2823 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2827 /* Return true if STMT should start a new basic block. PREV_STMT is
2828 the statement preceding STMT. It is used when STMT is a label or a
2829 case label. Labels should only start a new basic block if their
2830 previous statement wasn't a label. Otherwise, sequence of labels
2831 would generate unnecessary basic blocks that only contain a single
2832 label. */
2834 static inline bool
2835 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2837 if (stmt == NULL)
2838 return false;
2840 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2841 any nondebug stmts in the block. We don't want to start another
2842 block in this case: the debug stmt will already have started the
2843 one STMT would start if we weren't outputting debug stmts. */
2844 if (prev_stmt && is_gimple_debug (prev_stmt))
2845 return false;
2847 /* Labels start a new basic block only if the preceding statement
2848 wasn't a label of the same type. This prevents the creation of
2849 consecutive blocks that have nothing but a single label. */
2850 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2852 /* Nonlocal and computed GOTO targets always start a new block. */
2853 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2854 || FORCED_LABEL (gimple_label_label (label_stmt)))
2855 return true;
2857 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2859 if (DECL_NONLOCAL (gimple_label_label (plabel))
2860 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2861 return true;
2863 cfg_stats.num_merged_labels++;
2864 return false;
2866 else
2867 return true;
2869 else if (gimple_code (stmt) == GIMPLE_CALL)
2871 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2872 /* setjmp acts similar to a nonlocal GOTO target and thus should
2873 start a new block. */
2874 return true;
2875 if (gimple_call_internal_p (stmt, IFN_PHI)
2876 && prev_stmt
2877 && gimple_code (prev_stmt) != GIMPLE_LABEL
2878 && (gimple_code (prev_stmt) != GIMPLE_CALL
2879 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2880 /* PHI nodes start a new block unless preceeded by a label
2881 or another PHI. */
2882 return true;
2885 return false;
2889 /* Return true if T should end a basic block. */
2891 bool
2892 stmt_ends_bb_p (gimple *t)
2894 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2897 /* Remove block annotations and other data structures. */
2899 void
2900 delete_tree_cfg_annotations (struct function *fn)
2902 vec_free (label_to_block_map_for_fn (fn));
2905 /* Return the virtual phi in BB. */
2907 gphi *
2908 get_virtual_phi (basic_block bb)
2910 for (gphi_iterator gsi = gsi_start_phis (bb);
2911 !gsi_end_p (gsi);
2912 gsi_next (&gsi))
2914 gphi *phi = gsi.phi ();
2916 if (virtual_operand_p (PHI_RESULT (phi)))
2917 return phi;
2920 return NULL;
2923 /* Return the first statement in basic block BB. */
2925 gimple *
2926 first_stmt (basic_block bb)
2928 gimple_stmt_iterator i = gsi_start_bb (bb);
2929 gimple *stmt = NULL;
2931 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2933 gsi_next (&i);
2934 stmt = NULL;
2936 return stmt;
2939 /* Return the first non-label statement in basic block BB. */
2941 static gimple *
2942 first_non_label_stmt (basic_block bb)
2944 gimple_stmt_iterator i = gsi_start_bb (bb);
2945 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2946 gsi_next (&i);
2947 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2950 /* Return the last statement in basic block BB. */
2952 gimple *
2953 last_stmt (basic_block bb)
2955 gimple_stmt_iterator i = gsi_last_bb (bb);
2956 gimple *stmt = NULL;
2958 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2960 gsi_prev (&i);
2961 stmt = NULL;
2963 return stmt;
2966 /* Return the last statement of an otherwise empty block. Return NULL
2967 if the block is totally empty, or if it contains more than one
2968 statement. */
2970 gimple *
2971 last_and_only_stmt (basic_block bb)
2973 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2974 gimple *last, *prev;
2976 if (gsi_end_p (i))
2977 return NULL;
2979 last = gsi_stmt (i);
2980 gsi_prev_nondebug (&i);
2981 if (gsi_end_p (i))
2982 return last;
2984 /* Empty statements should no longer appear in the instruction stream.
2985 Everything that might have appeared before should be deleted by
2986 remove_useless_stmts, and the optimizers should just gsi_remove
2987 instead of smashing with build_empty_stmt.
2989 Thus the only thing that should appear here in a block containing
2990 one executable statement is a label. */
2991 prev = gsi_stmt (i);
2992 if (gimple_code (prev) == GIMPLE_LABEL)
2993 return last;
2994 else
2995 return NULL;
2998 /* Returns the basic block after which the new basic block created
2999 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3000 near its "logical" location. This is of most help to humans looking
3001 at debugging dumps. */
3003 basic_block
3004 split_edge_bb_loc (edge edge_in)
3006 basic_block dest = edge_in->dest;
3007 basic_block dest_prev = dest->prev_bb;
3009 if (dest_prev)
3011 edge e = find_edge (dest_prev, dest);
3012 if (e && !(e->flags & EDGE_COMPLEX))
3013 return edge_in->src;
3015 return dest_prev;
3018 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3019 Abort on abnormal edges. */
3021 static basic_block
3022 gimple_split_edge (edge edge_in)
3024 basic_block new_bb, after_bb, dest;
3025 edge new_edge, e;
3027 /* Abnormal edges cannot be split. */
3028 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3030 dest = edge_in->dest;
3032 after_bb = split_edge_bb_loc (edge_in);
3034 new_bb = create_empty_bb (after_bb);
3035 new_bb->count = edge_in->count ();
3037 /* We want to avoid re-allocating PHIs when we first
3038 add the fallthru edge from new_bb to dest but we also
3039 want to avoid changing PHI argument order when
3040 first redirecting edge_in away from dest. The former
3041 avoids changing PHI argument order by adding them
3042 last and then the redirection swapping it back into
3043 place by means of unordered remove.
3044 So hack around things by temporarily removing all PHIs
3045 from the destination during the edge redirection and then
3046 making sure the edges stay in order. */
3047 gimple_seq saved_phis = phi_nodes (dest);
3048 unsigned old_dest_idx = edge_in->dest_idx;
3049 set_phi_nodes (dest, NULL);
3050 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3051 e = redirect_edge_and_branch (edge_in, new_bb);
3052 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
3053 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3054 dest->il.gimple.phi_nodes = saved_phis;
3056 return new_bb;
3060 /* Verify properties of the address expression T whose base should be
3061 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3063 static bool
3064 verify_address (tree t, bool verify_addressable)
3066 bool old_constant;
3067 bool old_side_effects;
3068 bool new_constant;
3069 bool new_side_effects;
3071 old_constant = TREE_CONSTANT (t);
3072 old_side_effects = TREE_SIDE_EFFECTS (t);
3074 recompute_tree_invariant_for_addr_expr (t);
3075 new_side_effects = TREE_SIDE_EFFECTS (t);
3076 new_constant = TREE_CONSTANT (t);
3078 if (old_constant != new_constant)
3080 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3081 return true;
3083 if (old_side_effects != new_side_effects)
3085 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3086 return true;
3089 tree base = TREE_OPERAND (t, 0);
3090 while (handled_component_p (base))
3091 base = TREE_OPERAND (base, 0);
3093 if (!(VAR_P (base)
3094 || TREE_CODE (base) == PARM_DECL
3095 || TREE_CODE (base) == RESULT_DECL))
3096 return false;
3098 if (verify_addressable && !TREE_ADDRESSABLE (base))
3100 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3101 return true;
3104 return false;
3108 /* Verify if EXPR is a valid GIMPLE reference expression. If
3109 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3110 if there is an error, otherwise false. */
3112 static bool
3113 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3115 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3117 if (TREE_CODE (expr) == REALPART_EXPR
3118 || TREE_CODE (expr) == IMAGPART_EXPR
3119 || TREE_CODE (expr) == BIT_FIELD_REF)
3121 tree op = TREE_OPERAND (expr, 0);
3122 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3124 error ("non-scalar %qs", code_name);
3125 return true;
3128 if (TREE_CODE (expr) == BIT_FIELD_REF)
3130 tree t1 = TREE_OPERAND (expr, 1);
3131 tree t2 = TREE_OPERAND (expr, 2);
3132 poly_uint64 size, bitpos;
3133 if (!poly_int_tree_p (t1, &size)
3134 || !poly_int_tree_p (t2, &bitpos)
3135 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3136 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3138 error ("invalid position or size operand to %qs", code_name);
3139 return true;
3141 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3142 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3144 error ("integral result type precision does not match "
3145 "field size of %qs", code_name);
3146 return true;
3148 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3149 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3150 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3151 size))
3153 error ("mode size of non-integral result does not "
3154 "match field size of %qs",
3155 code_name);
3156 return true;
3158 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3159 && !type_has_mode_precision_p (TREE_TYPE (op)))
3161 error ("%qs of non-mode-precision operand", code_name);
3162 return true;
3164 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3165 && maybe_gt (size + bitpos,
3166 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3168 error ("position plus size exceeds size of referenced object in "
3169 "%qs", code_name);
3170 return true;
3174 if ((TREE_CODE (expr) == REALPART_EXPR
3175 || TREE_CODE (expr) == IMAGPART_EXPR)
3176 && !useless_type_conversion_p (TREE_TYPE (expr),
3177 TREE_TYPE (TREE_TYPE (op))))
3179 error ("type mismatch in %qs reference", code_name);
3180 debug_generic_stmt (TREE_TYPE (expr));
3181 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3182 return true;
3184 expr = op;
3187 while (handled_component_p (expr))
3189 code_name = get_tree_code_name (TREE_CODE (expr));
3191 if (TREE_CODE (expr) == REALPART_EXPR
3192 || TREE_CODE (expr) == IMAGPART_EXPR
3193 || TREE_CODE (expr) == BIT_FIELD_REF)
3195 error ("non-top-level %qs", code_name);
3196 return true;
3199 tree op = TREE_OPERAND (expr, 0);
3201 if (TREE_CODE (expr) == ARRAY_REF
3202 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3204 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3205 || (TREE_OPERAND (expr, 2)
3206 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3207 || (TREE_OPERAND (expr, 3)
3208 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3210 error ("invalid operands to %qs", code_name);
3211 debug_generic_stmt (expr);
3212 return true;
3216 /* Verify if the reference array element types are compatible. */
3217 if (TREE_CODE (expr) == ARRAY_REF
3218 && !useless_type_conversion_p (TREE_TYPE (expr),
3219 TREE_TYPE (TREE_TYPE (op))))
3221 error ("type mismatch in %qs", code_name);
3222 debug_generic_stmt (TREE_TYPE (expr));
3223 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3224 return true;
3226 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3227 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3228 TREE_TYPE (TREE_TYPE (op))))
3230 error ("type mismatch in %qs", code_name);
3231 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3232 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3233 return true;
3236 if (TREE_CODE (expr) == COMPONENT_REF)
3238 if (TREE_OPERAND (expr, 2)
3239 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3241 error ("invalid %qs offset operator", code_name);
3242 return true;
3244 if (!useless_type_conversion_p (TREE_TYPE (expr),
3245 TREE_TYPE (TREE_OPERAND (expr, 1))))
3247 error ("type mismatch in %qs", code_name);
3248 debug_generic_stmt (TREE_TYPE (expr));
3249 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3250 return true;
3254 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3256 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3257 that their operand is not an SSA name or an invariant when
3258 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3259 bug). Otherwise there is nothing to verify, gross mismatches at
3260 most invoke undefined behavior. */
3261 if (require_lvalue
3262 && (TREE_CODE (op) == SSA_NAME
3263 || is_gimple_min_invariant (op)))
3265 error ("conversion of %qs on the left hand side of %qs",
3266 get_tree_code_name (TREE_CODE (op)), code_name);
3267 debug_generic_stmt (expr);
3268 return true;
3270 else if (TREE_CODE (op) == SSA_NAME
3271 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3273 error ("conversion of register to a different size in %qs",
3274 code_name);
3275 debug_generic_stmt (expr);
3276 return true;
3278 else if (!handled_component_p (op))
3279 return false;
3282 expr = op;
3285 code_name = get_tree_code_name (TREE_CODE (expr));
3287 if (TREE_CODE (expr) == MEM_REF)
3289 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3290 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3291 && verify_address (TREE_OPERAND (expr, 0), false)))
3293 error ("invalid address operand in %qs", code_name);
3294 debug_generic_stmt (expr);
3295 return true;
3297 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3298 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3300 error ("invalid offset operand in %qs", code_name);
3301 debug_generic_stmt (expr);
3302 return true;
3304 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3305 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3307 error ("invalid clique in %qs", code_name);
3308 debug_generic_stmt (expr);
3309 return true;
3312 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3314 if (!TMR_BASE (expr)
3315 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3316 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3317 && verify_address (TMR_BASE (expr), false)))
3319 error ("invalid address operand in %qs", code_name);
3320 return true;
3322 if (!TMR_OFFSET (expr)
3323 || !poly_int_tree_p (TMR_OFFSET (expr))
3324 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3326 error ("invalid offset operand in %qs", code_name);
3327 debug_generic_stmt (expr);
3328 return true;
3330 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3331 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3333 error ("invalid clique in %qs", code_name);
3334 debug_generic_stmt (expr);
3335 return true;
3338 else if (TREE_CODE (expr) == INDIRECT_REF)
3340 error ("%qs in gimple IL", code_name);
3341 debug_generic_stmt (expr);
3342 return true;
3345 if (!require_lvalue
3346 && (TREE_CODE (expr) == SSA_NAME || is_gimple_min_invariant (expr)))
3347 return false;
3349 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3350 return false;
3352 if (TREE_CODE (expr) != TARGET_MEM_REF
3353 && TREE_CODE (expr) != MEM_REF)
3355 error ("invalid expression for min lvalue");
3356 return true;
3359 return false;
3362 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3363 list of pointer-to types that is trivially convertible to DEST. */
3365 static bool
3366 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3368 tree src;
3370 if (!TYPE_POINTER_TO (src_obj))
3371 return true;
3373 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3374 if (useless_type_conversion_p (dest, src))
3375 return true;
3377 return false;
3380 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3381 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3383 static bool
3384 valid_fixed_convert_types_p (tree type1, tree type2)
3386 return (FIXED_POINT_TYPE_P (type1)
3387 && (INTEGRAL_TYPE_P (type2)
3388 || SCALAR_FLOAT_TYPE_P (type2)
3389 || FIXED_POINT_TYPE_P (type2)));
3392 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3393 is a problem, otherwise false. */
3395 static bool
3396 verify_gimple_call (gcall *stmt)
3398 tree fn = gimple_call_fn (stmt);
3399 tree fntype, fndecl;
3400 unsigned i;
3402 if (gimple_call_internal_p (stmt))
3404 if (fn)
3406 error ("gimple call has two targets");
3407 debug_generic_stmt (fn);
3408 return true;
3411 else
3413 if (!fn)
3415 error ("gimple call has no target");
3416 return true;
3420 if (fn && !is_gimple_call_addr (fn))
3422 error ("invalid function in gimple call");
3423 debug_generic_stmt (fn);
3424 return true;
3427 if (fn
3428 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3429 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3430 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3432 error ("non-function in gimple call");
3433 return true;
3436 fndecl = gimple_call_fndecl (stmt);
3437 if (fndecl
3438 && TREE_CODE (fndecl) == FUNCTION_DECL
3439 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3440 && !DECL_PURE_P (fndecl)
3441 && !TREE_READONLY (fndecl))
3443 error ("invalid pure const state for function");
3444 return true;
3447 tree lhs = gimple_call_lhs (stmt);
3448 if (lhs
3449 && (!is_gimple_reg (lhs)
3450 && (!is_gimple_lvalue (lhs)
3451 || verify_types_in_gimple_reference
3452 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3453 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3455 error ("invalid LHS in gimple call");
3456 return true;
3459 if (gimple_call_ctrl_altering_p (stmt)
3460 && gimple_call_noreturn_p (stmt)
3461 && should_remove_lhs_p (lhs))
3463 error ("LHS in %<noreturn%> call");
3464 return true;
3467 fntype = gimple_call_fntype (stmt);
3468 if (fntype
3469 && lhs
3470 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3471 /* ??? At least C++ misses conversions at assignments from
3472 void * call results.
3473 For now simply allow arbitrary pointer type conversions. */
3474 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3475 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3477 error ("invalid conversion in gimple call");
3478 debug_generic_stmt (TREE_TYPE (lhs));
3479 debug_generic_stmt (TREE_TYPE (fntype));
3480 return true;
3483 if (gimple_call_chain (stmt)
3484 && !is_gimple_val (gimple_call_chain (stmt)))
3486 error ("invalid static chain in gimple call");
3487 debug_generic_stmt (gimple_call_chain (stmt));
3488 return true;
3491 /* If there is a static chain argument, the call should either be
3492 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3493 if (gimple_call_chain (stmt)
3494 && fndecl
3495 && !DECL_STATIC_CHAIN (fndecl))
3497 error ("static chain with function that doesn%'t use one");
3498 return true;
3501 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3503 switch (DECL_FUNCTION_CODE (fndecl))
3505 case BUILT_IN_UNREACHABLE:
3506 case BUILT_IN_TRAP:
3507 if (gimple_call_num_args (stmt) > 0)
3509 /* Built-in unreachable with parameters might not be caught by
3510 undefined behavior sanitizer. Front-ends do check users do not
3511 call them that way but we also produce calls to
3512 __builtin_unreachable internally, for example when IPA figures
3513 out a call cannot happen in a legal program. In such cases,
3514 we must make sure arguments are stripped off. */
3515 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3516 "with arguments");
3517 return true;
3519 break;
3520 default:
3521 break;
3525 /* For a call to .DEFERRED_INIT,
3526 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3527 we should guarantee that when the 1st argument is a constant, it should
3528 be the same as the size of the LHS. */
3530 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3532 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3533 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3535 if (TREE_CODE (lhs) == SSA_NAME)
3536 lhs = SSA_NAME_VAR (lhs);
3538 poly_uint64 size_from_arg0, size_from_lhs;
3539 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3540 &size_from_arg0);
3541 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3542 &size_from_lhs);
3543 if (is_constant_size_arg0 && is_constant_size_lhs)
3544 if (maybe_ne (size_from_arg0, size_from_lhs))
3546 error ("%<DEFERRED_INIT%> calls should have same "
3547 "constant size for the first argument and LHS");
3548 return true;
3552 /* ??? The C frontend passes unpromoted arguments in case it
3553 didn't see a function declaration before the call. So for now
3554 leave the call arguments mostly unverified. Once we gimplify
3555 unit-at-a-time we have a chance to fix this. */
3556 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3558 tree arg = gimple_call_arg (stmt, i);
3559 if ((is_gimple_reg_type (TREE_TYPE (arg))
3560 && !is_gimple_val (arg))
3561 || (!is_gimple_reg_type (TREE_TYPE (arg))
3562 && !is_gimple_lvalue (arg)))
3564 error ("invalid argument to gimple call");
3565 debug_generic_expr (arg);
3566 return true;
3568 if (!is_gimple_reg (arg))
3570 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3571 arg = TREE_OPERAND (arg, 0);
3572 if (verify_types_in_gimple_reference (arg, false))
3573 return true;
3577 return false;
3580 /* Verifies the gimple comparison with the result type TYPE and
3581 the operands OP0 and OP1, comparison code is CODE. */
3583 static bool
3584 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3586 tree op0_type = TREE_TYPE (op0);
3587 tree op1_type = TREE_TYPE (op1);
3589 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3591 error ("invalid operands in gimple comparison");
3592 return true;
3595 /* For comparisons we do not have the operations type as the
3596 effective type the comparison is carried out in. Instead
3597 we require that either the first operand is trivially
3598 convertible into the second, or the other way around. */
3599 if (!useless_type_conversion_p (op0_type, op1_type)
3600 && !useless_type_conversion_p (op1_type, op0_type))
3602 error ("mismatching comparison operand types");
3603 debug_generic_expr (op0_type);
3604 debug_generic_expr (op1_type);
3605 return true;
3608 /* The resulting type of a comparison may be an effective boolean type. */
3609 if (INTEGRAL_TYPE_P (type)
3610 && (TREE_CODE (type) == BOOLEAN_TYPE
3611 || TYPE_PRECISION (type) == 1))
3613 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3614 || TREE_CODE (op1_type) == VECTOR_TYPE)
3615 && code != EQ_EXPR && code != NE_EXPR
3616 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3617 && !VECTOR_INTEGER_TYPE_P (op0_type))
3619 error ("unsupported operation or type for vector comparison"
3620 " returning a boolean");
3621 debug_generic_expr (op0_type);
3622 debug_generic_expr (op1_type);
3623 return true;
3626 /* Or a boolean vector type with the same element count
3627 as the comparison operand types. */
3628 else if (TREE_CODE (type) == VECTOR_TYPE
3629 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3631 if (TREE_CODE (op0_type) != VECTOR_TYPE
3632 || TREE_CODE (op1_type) != VECTOR_TYPE)
3634 error ("non-vector operands in vector comparison");
3635 debug_generic_expr (op0_type);
3636 debug_generic_expr (op1_type);
3637 return true;
3640 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3641 TYPE_VECTOR_SUBPARTS (op0_type)))
3643 error ("invalid vector comparison resulting type");
3644 debug_generic_expr (type);
3645 return true;
3648 else
3650 error ("bogus comparison result type");
3651 debug_generic_expr (type);
3652 return true;
3655 return false;
3658 /* Verify a gimple assignment statement STMT with an unary rhs.
3659 Returns true if anything is wrong. */
3661 static bool
3662 verify_gimple_assign_unary (gassign *stmt)
3664 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3665 tree lhs = gimple_assign_lhs (stmt);
3666 tree lhs_type = TREE_TYPE (lhs);
3667 tree rhs1 = gimple_assign_rhs1 (stmt);
3668 tree rhs1_type = TREE_TYPE (rhs1);
3670 if (!is_gimple_reg (lhs))
3672 error ("non-register as LHS of unary operation");
3673 return true;
3676 if (!is_gimple_val (rhs1))
3678 error ("invalid operand in unary operation");
3679 return true;
3682 const char* const code_name = get_tree_code_name (rhs_code);
3684 /* First handle conversions. */
3685 switch (rhs_code)
3687 CASE_CONVERT:
3689 /* Allow conversions between vectors with the same number of elements,
3690 provided that the conversion is OK for the element types too. */
3691 if (VECTOR_TYPE_P (lhs_type)
3692 && VECTOR_TYPE_P (rhs1_type)
3693 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3694 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3696 lhs_type = TREE_TYPE (lhs_type);
3697 rhs1_type = TREE_TYPE (rhs1_type);
3699 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3701 error ("invalid vector types in nop conversion");
3702 debug_generic_expr (lhs_type);
3703 debug_generic_expr (rhs1_type);
3704 return true;
3707 /* Allow conversions from pointer type to integral type only if
3708 there is no sign or zero extension involved.
3709 For targets were the precision of ptrofftype doesn't match that
3710 of pointers we allow conversions to types where
3711 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3712 if ((POINTER_TYPE_P (lhs_type)
3713 && INTEGRAL_TYPE_P (rhs1_type))
3714 || (POINTER_TYPE_P (rhs1_type)
3715 && INTEGRAL_TYPE_P (lhs_type)
3716 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3717 #if defined(POINTERS_EXTEND_UNSIGNED)
3718 || (TYPE_MODE (rhs1_type) == ptr_mode
3719 && (TYPE_PRECISION (lhs_type)
3720 == BITS_PER_WORD /* word_mode */
3721 || (TYPE_PRECISION (lhs_type)
3722 == GET_MODE_PRECISION (Pmode))))
3723 #endif
3725 return false;
3727 /* Allow conversion from integral to offset type and vice versa. */
3728 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3729 && INTEGRAL_TYPE_P (rhs1_type))
3730 || (INTEGRAL_TYPE_P (lhs_type)
3731 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3732 return false;
3734 /* Otherwise assert we are converting between types of the
3735 same kind. */
3736 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3738 error ("invalid types in nop conversion");
3739 debug_generic_expr (lhs_type);
3740 debug_generic_expr (rhs1_type);
3741 return true;
3744 return false;
3747 case ADDR_SPACE_CONVERT_EXPR:
3749 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3750 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3751 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3753 error ("invalid types in address space conversion");
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3756 return true;
3759 return false;
3762 case FIXED_CONVERT_EXPR:
3764 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3765 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3767 error ("invalid types in fixed-point conversion");
3768 debug_generic_expr (lhs_type);
3769 debug_generic_expr (rhs1_type);
3770 return true;
3773 return false;
3776 case FLOAT_EXPR:
3778 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3779 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3780 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3782 error ("invalid types in conversion to floating-point");
3783 debug_generic_expr (lhs_type);
3784 debug_generic_expr (rhs1_type);
3785 return true;
3788 return false;
3791 case FIX_TRUNC_EXPR:
3793 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3794 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3795 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3797 error ("invalid types in conversion to integer");
3798 debug_generic_expr (lhs_type);
3799 debug_generic_expr (rhs1_type);
3800 return true;
3803 return false;
3806 case VEC_UNPACK_HI_EXPR:
3807 case VEC_UNPACK_LO_EXPR:
3808 case VEC_UNPACK_FLOAT_HI_EXPR:
3809 case VEC_UNPACK_FLOAT_LO_EXPR:
3810 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3811 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3812 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3813 || TREE_CODE (lhs_type) != VECTOR_TYPE
3814 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3815 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3816 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3817 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3818 || ((rhs_code == VEC_UNPACK_HI_EXPR
3819 || rhs_code == VEC_UNPACK_LO_EXPR)
3820 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3821 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3822 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3823 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3824 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3825 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3826 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3827 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3828 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3829 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3830 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3831 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3832 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3833 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3834 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3835 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3837 error ("type mismatch in %qs expression", code_name);
3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3840 return true;
3843 return false;
3845 case NEGATE_EXPR:
3846 case ABS_EXPR:
3847 case BIT_NOT_EXPR:
3848 case PAREN_EXPR:
3849 case CONJ_EXPR:
3850 /* Disallow pointer and offset types for many of the unary gimple. */
3851 if (POINTER_TYPE_P (lhs_type)
3852 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3854 error ("invalid types for %qs", code_name);
3855 debug_generic_expr (lhs_type);
3856 debug_generic_expr (rhs1_type);
3857 return true;
3859 break;
3861 case ABSU_EXPR:
3862 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3863 || !TYPE_UNSIGNED (lhs_type)
3864 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3865 || TYPE_UNSIGNED (rhs1_type)
3866 || element_precision (lhs_type) != element_precision (rhs1_type))
3868 error ("invalid types for %qs", code_name);
3869 debug_generic_expr (lhs_type);
3870 debug_generic_expr (rhs1_type);
3871 return true;
3873 return false;
3875 case VEC_DUPLICATE_EXPR:
3876 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3877 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3879 error ("%qs should be from a scalar to a like vector", code_name);
3880 debug_generic_expr (lhs_type);
3881 debug_generic_expr (rhs1_type);
3882 return true;
3884 return false;
3886 default:
3887 gcc_unreachable ();
3890 /* For the remaining codes assert there is no conversion involved. */
3891 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3893 error ("non-trivial conversion in unary operation");
3894 debug_generic_expr (lhs_type);
3895 debug_generic_expr (rhs1_type);
3896 return true;
3899 return false;
3902 /* Verify a gimple assignment statement STMT with a binary rhs.
3903 Returns true if anything is wrong. */
3905 static bool
3906 verify_gimple_assign_binary (gassign *stmt)
3908 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3909 tree lhs = gimple_assign_lhs (stmt);
3910 tree lhs_type = TREE_TYPE (lhs);
3911 tree rhs1 = gimple_assign_rhs1 (stmt);
3912 tree rhs1_type = TREE_TYPE (rhs1);
3913 tree rhs2 = gimple_assign_rhs2 (stmt);
3914 tree rhs2_type = TREE_TYPE (rhs2);
3916 if (!is_gimple_reg (lhs))
3918 error ("non-register as LHS of binary operation");
3919 return true;
3922 if (!is_gimple_val (rhs1)
3923 || !is_gimple_val (rhs2))
3925 error ("invalid operands in binary operation");
3926 return true;
3929 const char* const code_name = get_tree_code_name (rhs_code);
3931 /* First handle operations that involve different types. */
3932 switch (rhs_code)
3934 case COMPLEX_EXPR:
3936 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3937 || !(INTEGRAL_TYPE_P (rhs1_type)
3938 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3939 || !(INTEGRAL_TYPE_P (rhs2_type)
3940 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3942 error ("type mismatch in %qs", code_name);
3943 debug_generic_expr (lhs_type);
3944 debug_generic_expr (rhs1_type);
3945 debug_generic_expr (rhs2_type);
3946 return true;
3949 return false;
3952 case LSHIFT_EXPR:
3953 case RSHIFT_EXPR:
3954 case LROTATE_EXPR:
3955 case RROTATE_EXPR:
3957 /* Shifts and rotates are ok on integral types, fixed point
3958 types and integer vector types. */
3959 if ((!INTEGRAL_TYPE_P (rhs1_type)
3960 && !FIXED_POINT_TYPE_P (rhs1_type)
3961 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3962 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3963 || (!INTEGRAL_TYPE_P (rhs2_type)
3964 /* Vector shifts of vectors are also ok. */
3965 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3966 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3967 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3968 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3969 || !useless_type_conversion_p (lhs_type, rhs1_type))
3971 error ("type mismatch in %qs", code_name);
3972 debug_generic_expr (lhs_type);
3973 debug_generic_expr (rhs1_type);
3974 debug_generic_expr (rhs2_type);
3975 return true;
3978 return false;
3981 case WIDEN_LSHIFT_EXPR:
3983 if (!INTEGRAL_TYPE_P (lhs_type)
3984 || !INTEGRAL_TYPE_P (rhs1_type)
3985 || TREE_CODE (rhs2) != INTEGER_CST
3986 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3988 error ("type mismatch in %qs", code_name);
3989 debug_generic_expr (lhs_type);
3990 debug_generic_expr (rhs1_type);
3991 debug_generic_expr (rhs2_type);
3992 return true;
3995 return false;
3998 case VEC_WIDEN_LSHIFT_HI_EXPR:
3999 case VEC_WIDEN_LSHIFT_LO_EXPR:
4001 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4002 || TREE_CODE (lhs_type) != VECTOR_TYPE
4003 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4004 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4005 || TREE_CODE (rhs2) != INTEGER_CST
4006 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4007 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4009 error ("type mismatch in %qs", code_name);
4010 debug_generic_expr (lhs_type);
4011 debug_generic_expr (rhs1_type);
4012 debug_generic_expr (rhs2_type);
4013 return true;
4016 return false;
4019 case WIDEN_PLUS_EXPR:
4020 case WIDEN_MINUS_EXPR:
4021 case PLUS_EXPR:
4022 case MINUS_EXPR:
4024 tree lhs_etype = lhs_type;
4025 tree rhs1_etype = rhs1_type;
4026 tree rhs2_etype = rhs2_type;
4027 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
4029 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4030 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4032 error ("invalid non-vector operands to %qs", code_name);
4033 return true;
4035 lhs_etype = TREE_TYPE (lhs_type);
4036 rhs1_etype = TREE_TYPE (rhs1_type);
4037 rhs2_etype = TREE_TYPE (rhs2_type);
4039 if (POINTER_TYPE_P (lhs_etype)
4040 || POINTER_TYPE_P (rhs1_etype)
4041 || POINTER_TYPE_P (rhs2_etype))
4043 error ("invalid (pointer) operands %qs", code_name);
4044 return true;
4047 /* Continue with generic binary expression handling. */
4048 break;
4051 case POINTER_PLUS_EXPR:
4053 if (!POINTER_TYPE_P (rhs1_type)
4054 || !useless_type_conversion_p (lhs_type, rhs1_type)
4055 || !ptrofftype_p (rhs2_type))
4057 error ("type mismatch in %qs", code_name);
4058 debug_generic_stmt (lhs_type);
4059 debug_generic_stmt (rhs1_type);
4060 debug_generic_stmt (rhs2_type);
4061 return true;
4064 return false;
4067 case POINTER_DIFF_EXPR:
4069 if (!POINTER_TYPE_P (rhs1_type)
4070 || !POINTER_TYPE_P (rhs2_type)
4071 /* Because we special-case pointers to void we allow difference
4072 of arbitrary pointers with the same mode. */
4073 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4074 || !INTEGRAL_TYPE_P (lhs_type)
4075 || TYPE_UNSIGNED (lhs_type)
4076 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4078 error ("type mismatch in %qs", code_name);
4079 debug_generic_stmt (lhs_type);
4080 debug_generic_stmt (rhs1_type);
4081 debug_generic_stmt (rhs2_type);
4082 return true;
4085 return false;
4088 case TRUTH_ANDIF_EXPR:
4089 case TRUTH_ORIF_EXPR:
4090 case TRUTH_AND_EXPR:
4091 case TRUTH_OR_EXPR:
4092 case TRUTH_XOR_EXPR:
4094 gcc_unreachable ();
4096 case LT_EXPR:
4097 case LE_EXPR:
4098 case GT_EXPR:
4099 case GE_EXPR:
4100 case EQ_EXPR:
4101 case NE_EXPR:
4102 case UNORDERED_EXPR:
4103 case ORDERED_EXPR:
4104 case UNLT_EXPR:
4105 case UNLE_EXPR:
4106 case UNGT_EXPR:
4107 case UNGE_EXPR:
4108 case UNEQ_EXPR:
4109 case LTGT_EXPR:
4110 /* Comparisons are also binary, but the result type is not
4111 connected to the operand types. */
4112 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4114 case WIDEN_MULT_EXPR:
4115 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4116 return true;
4117 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4118 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4120 case WIDEN_SUM_EXPR:
4122 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4123 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4124 && ((!INTEGRAL_TYPE_P (rhs1_type)
4125 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4126 || (!INTEGRAL_TYPE_P (lhs_type)
4127 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4128 || !useless_type_conversion_p (lhs_type, rhs2_type)
4129 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4130 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4132 error ("type mismatch in %qs", code_name);
4133 debug_generic_expr (lhs_type);
4134 debug_generic_expr (rhs1_type);
4135 debug_generic_expr (rhs2_type);
4136 return true;
4138 return false;
4141 case VEC_WIDEN_MINUS_HI_EXPR:
4142 case VEC_WIDEN_MINUS_LO_EXPR:
4143 case VEC_WIDEN_PLUS_HI_EXPR:
4144 case VEC_WIDEN_PLUS_LO_EXPR:
4145 case VEC_WIDEN_MULT_HI_EXPR:
4146 case VEC_WIDEN_MULT_LO_EXPR:
4147 case VEC_WIDEN_MULT_EVEN_EXPR:
4148 case VEC_WIDEN_MULT_ODD_EXPR:
4150 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4151 || TREE_CODE (lhs_type) != VECTOR_TYPE
4152 || !types_compatible_p (rhs1_type, rhs2_type)
4153 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4154 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4156 error ("type mismatch in %qs", code_name);
4157 debug_generic_expr (lhs_type);
4158 debug_generic_expr (rhs1_type);
4159 debug_generic_expr (rhs2_type);
4160 return true;
4162 return false;
4165 case VEC_PACK_TRUNC_EXPR:
4166 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4167 vector boolean types. */
4168 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4169 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4170 && types_compatible_p (rhs1_type, rhs2_type)
4171 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4172 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4173 return false;
4175 /* Fallthru. */
4176 case VEC_PACK_SAT_EXPR:
4177 case VEC_PACK_FIX_TRUNC_EXPR:
4179 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4180 || TREE_CODE (lhs_type) != VECTOR_TYPE
4181 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4182 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4183 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4184 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4185 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4186 || !types_compatible_p (rhs1_type, rhs2_type)
4187 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4188 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4189 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4190 TYPE_VECTOR_SUBPARTS (lhs_type)))
4192 error ("type mismatch in %qs", code_name);
4193 debug_generic_expr (lhs_type);
4194 debug_generic_expr (rhs1_type);
4195 debug_generic_expr (rhs2_type);
4196 return true;
4199 return false;
4202 case VEC_PACK_FLOAT_EXPR:
4203 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4204 || TREE_CODE (lhs_type) != VECTOR_TYPE
4205 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4206 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4207 || !types_compatible_p (rhs1_type, rhs2_type)
4208 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4209 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4210 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4211 TYPE_VECTOR_SUBPARTS (lhs_type)))
4213 error ("type mismatch in %qs", code_name);
4214 debug_generic_expr (lhs_type);
4215 debug_generic_expr (rhs1_type);
4216 debug_generic_expr (rhs2_type);
4217 return true;
4220 return false;
4222 case MULT_EXPR:
4223 case MULT_HIGHPART_EXPR:
4224 case TRUNC_DIV_EXPR:
4225 case CEIL_DIV_EXPR:
4226 case FLOOR_DIV_EXPR:
4227 case ROUND_DIV_EXPR:
4228 case TRUNC_MOD_EXPR:
4229 case CEIL_MOD_EXPR:
4230 case FLOOR_MOD_EXPR:
4231 case ROUND_MOD_EXPR:
4232 case RDIV_EXPR:
4233 case EXACT_DIV_EXPR:
4234 case BIT_IOR_EXPR:
4235 case BIT_XOR_EXPR:
4236 /* Disallow pointer and offset types for many of the binary gimple. */
4237 if (POINTER_TYPE_P (lhs_type)
4238 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4240 error ("invalid types for %qs", code_name);
4241 debug_generic_expr (lhs_type);
4242 debug_generic_expr (rhs1_type);
4243 debug_generic_expr (rhs2_type);
4244 return true;
4246 /* Continue with generic binary expression handling. */
4247 break;
4249 case MIN_EXPR:
4250 case MAX_EXPR:
4251 /* Continue with generic binary expression handling. */
4252 break;
4254 case BIT_AND_EXPR:
4255 if (POINTER_TYPE_P (lhs_type)
4256 && TREE_CODE (rhs2) == INTEGER_CST)
4257 break;
4258 /* Disallow pointer and offset types for many of the binary gimple. */
4259 if (POINTER_TYPE_P (lhs_type)
4260 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4262 error ("invalid types for %qs", code_name);
4263 debug_generic_expr (lhs_type);
4264 debug_generic_expr (rhs1_type);
4265 debug_generic_expr (rhs2_type);
4266 return true;
4268 /* Continue with generic binary expression handling. */
4269 break;
4271 case VEC_SERIES_EXPR:
4272 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4274 error ("type mismatch in %qs", code_name);
4275 debug_generic_expr (rhs1_type);
4276 debug_generic_expr (rhs2_type);
4277 return true;
4279 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4280 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4282 error ("vector type expected in %qs", code_name);
4283 debug_generic_expr (lhs_type);
4284 return true;
4286 return false;
4288 default:
4289 gcc_unreachable ();
4292 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4293 || !useless_type_conversion_p (lhs_type, rhs2_type))
4295 error ("type mismatch in binary expression");
4296 debug_generic_stmt (lhs_type);
4297 debug_generic_stmt (rhs1_type);
4298 debug_generic_stmt (rhs2_type);
4299 return true;
4302 return false;
4305 /* Verify a gimple assignment statement STMT with a ternary rhs.
4306 Returns true if anything is wrong. */
4308 static bool
4309 verify_gimple_assign_ternary (gassign *stmt)
4311 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4312 tree lhs = gimple_assign_lhs (stmt);
4313 tree lhs_type = TREE_TYPE (lhs);
4314 tree rhs1 = gimple_assign_rhs1 (stmt);
4315 tree rhs1_type = TREE_TYPE (rhs1);
4316 tree rhs2 = gimple_assign_rhs2 (stmt);
4317 tree rhs2_type = TREE_TYPE (rhs2);
4318 tree rhs3 = gimple_assign_rhs3 (stmt);
4319 tree rhs3_type = TREE_TYPE (rhs3);
4321 if (!is_gimple_reg (lhs))
4323 error ("non-register as LHS of ternary operation");
4324 return true;
4327 if (!is_gimple_val (rhs1)
4328 || !is_gimple_val (rhs2)
4329 || !is_gimple_val (rhs3))
4331 error ("invalid operands in ternary operation");
4332 return true;
4335 const char* const code_name = get_tree_code_name (rhs_code);
4337 /* First handle operations that involve different types. */
4338 switch (rhs_code)
4340 case WIDEN_MULT_PLUS_EXPR:
4341 case WIDEN_MULT_MINUS_EXPR:
4342 if ((!INTEGRAL_TYPE_P (rhs1_type)
4343 && !FIXED_POINT_TYPE_P (rhs1_type))
4344 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4345 || !useless_type_conversion_p (lhs_type, rhs3_type)
4346 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4347 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4349 error ("type mismatch in %qs", code_name);
4350 debug_generic_expr (lhs_type);
4351 debug_generic_expr (rhs1_type);
4352 debug_generic_expr (rhs2_type);
4353 debug_generic_expr (rhs3_type);
4354 return true;
4356 break;
4358 case VEC_COND_EXPR:
4359 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4360 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4361 TYPE_VECTOR_SUBPARTS (lhs_type)))
4363 error ("the first argument of a %qs must be of a "
4364 "boolean vector type of the same number of elements "
4365 "as the result", code_name);
4366 debug_generic_expr (lhs_type);
4367 debug_generic_expr (rhs1_type);
4368 return true;
4370 /* Fallthrough. */
4371 case COND_EXPR:
4372 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4373 || !useless_type_conversion_p (lhs_type, rhs3_type))
4375 error ("type mismatch in %qs", code_name);
4376 debug_generic_expr (lhs_type);
4377 debug_generic_expr (rhs2_type);
4378 debug_generic_expr (rhs3_type);
4379 return true;
4381 break;
4383 case VEC_PERM_EXPR:
4384 /* If permute is constant, then we allow for lhs and rhs
4385 to have different vector types, provided:
4386 (1) lhs, rhs1, rhs2 have same element type.
4387 (2) rhs3 vector is constant and has integer element type.
4388 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4390 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4391 || TREE_CODE (rhs1_type) != VECTOR_TYPE
4392 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4393 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4395 error ("vector types expected in %qs", code_name);
4396 debug_generic_expr (lhs_type);
4397 debug_generic_expr (rhs1_type);
4398 debug_generic_expr (rhs2_type);
4399 debug_generic_expr (rhs3_type);
4400 return true;
4403 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4404 as long as lhs, rhs1 and rhs2 have same element type. */
4405 if (TREE_CONSTANT (rhs3)
4406 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs1_type))
4407 || !useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs2_type)))
4408 : (!useless_type_conversion_p (lhs_type, rhs1_type)
4409 || !useless_type_conversion_p (lhs_type, rhs2_type)))
4411 error ("type mismatch in %qs", code_name);
4412 debug_generic_expr (lhs_type);
4413 debug_generic_expr (rhs1_type);
4414 debug_generic_expr (rhs2_type);
4415 debug_generic_expr (rhs3_type);
4416 return true;
4419 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4420 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4421 TYPE_VECTOR_SUBPARTS (rhs2_type))
4422 || (!TREE_CONSTANT(rhs3)
4423 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4424 TYPE_VECTOR_SUBPARTS (rhs3_type)))
4425 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4426 TYPE_VECTOR_SUBPARTS (lhs_type)))
4428 error ("vectors with different element number found in %qs",
4429 code_name);
4430 debug_generic_expr (lhs_type);
4431 debug_generic_expr (rhs1_type);
4432 debug_generic_expr (rhs2_type);
4433 debug_generic_expr (rhs3_type);
4434 return true;
4437 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4438 || (TREE_CODE (rhs3) != VECTOR_CST
4439 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4440 (TREE_TYPE (rhs3_type)))
4441 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4442 (TREE_TYPE (rhs1_type))))))
4444 error ("invalid mask type in %qs", code_name);
4445 debug_generic_expr (lhs_type);
4446 debug_generic_expr (rhs1_type);
4447 debug_generic_expr (rhs2_type);
4448 debug_generic_expr (rhs3_type);
4449 return true;
4452 return false;
4454 case SAD_EXPR:
4455 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4456 || !useless_type_conversion_p (lhs_type, rhs3_type)
4457 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4458 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4460 error ("type mismatch in %qs", code_name);
4461 debug_generic_expr (lhs_type);
4462 debug_generic_expr (rhs1_type);
4463 debug_generic_expr (rhs2_type);
4464 debug_generic_expr (rhs3_type);
4465 return true;
4468 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4469 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4470 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4472 error ("vector types expected in %qs", code_name);
4473 debug_generic_expr (lhs_type);
4474 debug_generic_expr (rhs1_type);
4475 debug_generic_expr (rhs2_type);
4476 debug_generic_expr (rhs3_type);
4477 return true;
4480 return false;
4482 case BIT_INSERT_EXPR:
4483 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4485 error ("type mismatch in %qs", code_name);
4486 debug_generic_expr (lhs_type);
4487 debug_generic_expr (rhs1_type);
4488 return true;
4490 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4491 && INTEGRAL_TYPE_P (rhs2_type))
4492 /* Vector element insert. */
4493 || (VECTOR_TYPE_P (rhs1_type)
4494 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4495 /* Aligned sub-vector insert. */
4496 || (VECTOR_TYPE_P (rhs1_type)
4497 && VECTOR_TYPE_P (rhs2_type)
4498 && types_compatible_p (TREE_TYPE (rhs1_type),
4499 TREE_TYPE (rhs2_type))
4500 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4501 TYPE_VECTOR_SUBPARTS (rhs2_type))
4502 && multiple_p (wi::to_poly_offset (rhs3),
4503 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4505 error ("not allowed type combination in %qs", code_name);
4506 debug_generic_expr (rhs1_type);
4507 debug_generic_expr (rhs2_type);
4508 return true;
4510 if (! tree_fits_uhwi_p (rhs3)
4511 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4512 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4514 error ("invalid position or size in %qs", code_name);
4515 return true;
4517 if (INTEGRAL_TYPE_P (rhs1_type)
4518 && !type_has_mode_precision_p (rhs1_type))
4520 error ("%qs into non-mode-precision operand", code_name);
4521 return true;
4523 if (INTEGRAL_TYPE_P (rhs1_type))
4525 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4526 if (bitpos >= TYPE_PRECISION (rhs1_type)
4527 || (bitpos + TYPE_PRECISION (rhs2_type)
4528 > TYPE_PRECISION (rhs1_type)))
4530 error ("insertion out of range in %qs", code_name);
4531 return true;
4534 else if (VECTOR_TYPE_P (rhs1_type))
4536 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4537 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4538 if (bitpos % bitsize != 0)
4540 error ("%qs not at element boundary", code_name);
4541 return true;
4544 return false;
4546 case DOT_PROD_EXPR:
4548 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4549 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4550 && ((!INTEGRAL_TYPE_P (rhs1_type)
4551 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4552 || (!INTEGRAL_TYPE_P (lhs_type)
4553 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4554 /* rhs1_type and rhs2_type may differ in sign. */
4555 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4556 || !useless_type_conversion_p (lhs_type, rhs3_type)
4557 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4558 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4560 error ("type mismatch in %qs", code_name);
4561 debug_generic_expr (lhs_type);
4562 debug_generic_expr (rhs1_type);
4563 debug_generic_expr (rhs2_type);
4564 return true;
4566 return false;
4569 case REALIGN_LOAD_EXPR:
4570 /* FIXME. */
4571 return false;
4573 default:
4574 gcc_unreachable ();
4576 return false;
4579 /* Verify a gimple assignment statement STMT with a single rhs.
4580 Returns true if anything is wrong. */
4582 static bool
4583 verify_gimple_assign_single (gassign *stmt)
4585 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4586 tree lhs = gimple_assign_lhs (stmt);
4587 tree lhs_type = TREE_TYPE (lhs);
4588 tree rhs1 = gimple_assign_rhs1 (stmt);
4589 tree rhs1_type = TREE_TYPE (rhs1);
4590 bool res = false;
4592 const char* const code_name = get_tree_code_name (rhs_code);
4594 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4596 error ("non-trivial conversion in %qs", code_name);
4597 debug_generic_expr (lhs_type);
4598 debug_generic_expr (rhs1_type);
4599 return true;
4602 if (gimple_clobber_p (stmt)
4603 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4605 error ("%qs LHS in clobber statement",
4606 get_tree_code_name (TREE_CODE (lhs)));
4607 debug_generic_expr (lhs);
4608 return true;
4611 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4613 error ("%qs LHS in assignment statement",
4614 get_tree_code_name (TREE_CODE (lhs)));
4615 debug_generic_expr (lhs);
4616 return true;
4619 if (handled_component_p (lhs)
4620 || TREE_CODE (lhs) == MEM_REF
4621 || TREE_CODE (lhs) == TARGET_MEM_REF)
4622 res |= verify_types_in_gimple_reference (lhs, true);
4624 /* Special codes we cannot handle via their class. */
4625 switch (rhs_code)
4627 case ADDR_EXPR:
4629 tree op = TREE_OPERAND (rhs1, 0);
4630 if (!is_gimple_addressable (op))
4632 error ("invalid operand in %qs", code_name);
4633 return true;
4636 /* Technically there is no longer a need for matching types, but
4637 gimple hygiene asks for this check. In LTO we can end up
4638 combining incompatible units and thus end up with addresses
4639 of globals that change their type to a common one. */
4640 if (!in_lto_p
4641 && !types_compatible_p (TREE_TYPE (op),
4642 TREE_TYPE (TREE_TYPE (rhs1)))
4643 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4644 TREE_TYPE (op)))
4646 error ("type mismatch in %qs", code_name);
4647 debug_generic_stmt (TREE_TYPE (rhs1));
4648 debug_generic_stmt (TREE_TYPE (op));
4649 return true;
4652 return (verify_address (rhs1, true)
4653 || verify_types_in_gimple_reference (op, true));
4656 /* tcc_reference */
4657 case INDIRECT_REF:
4658 error ("%qs in gimple IL", code_name);
4659 return true;
4661 case COMPONENT_REF:
4662 case BIT_FIELD_REF:
4663 case ARRAY_REF:
4664 case ARRAY_RANGE_REF:
4665 case VIEW_CONVERT_EXPR:
4666 case REALPART_EXPR:
4667 case IMAGPART_EXPR:
4668 case TARGET_MEM_REF:
4669 case MEM_REF:
4670 if (!is_gimple_reg (lhs)
4671 && is_gimple_reg_type (TREE_TYPE (lhs)))
4673 error ("invalid RHS for gimple memory store: %qs", code_name);
4674 debug_generic_stmt (lhs);
4675 debug_generic_stmt (rhs1);
4676 return true;
4678 return res || verify_types_in_gimple_reference (rhs1, false);
4680 /* tcc_constant */
4681 case SSA_NAME:
4682 case INTEGER_CST:
4683 case REAL_CST:
4684 case FIXED_CST:
4685 case COMPLEX_CST:
4686 case VECTOR_CST:
4687 case STRING_CST:
4688 return res;
4690 /* tcc_declaration */
4691 case CONST_DECL:
4692 return res;
4693 case VAR_DECL:
4694 case PARM_DECL:
4695 if (!is_gimple_reg (lhs)
4696 && !is_gimple_reg (rhs1)
4697 && is_gimple_reg_type (TREE_TYPE (lhs)))
4699 error ("invalid RHS for gimple memory store: %qs", code_name);
4700 debug_generic_stmt (lhs);
4701 debug_generic_stmt (rhs1);
4702 return true;
4704 return res;
4706 case CONSTRUCTOR:
4707 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4709 unsigned int i;
4710 tree elt_i, elt_v, elt_t = NULL_TREE;
4712 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4713 return res;
4714 /* For vector CONSTRUCTORs we require that either it is empty
4715 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4716 (then the element count must be correct to cover the whole
4717 outer vector and index must be NULL on all elements, or it is
4718 a CONSTRUCTOR of scalar elements, where we as an exception allow
4719 smaller number of elements (assuming zero filling) and
4720 consecutive indexes as compared to NULL indexes (such
4721 CONSTRUCTORs can appear in the IL from FEs). */
4722 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4724 if (elt_t == NULL_TREE)
4726 elt_t = TREE_TYPE (elt_v);
4727 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4729 tree elt_t = TREE_TYPE (elt_v);
4730 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4731 TREE_TYPE (elt_t)))
4733 error ("incorrect type of vector %qs elements",
4734 code_name);
4735 debug_generic_stmt (rhs1);
4736 return true;
4738 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4739 * TYPE_VECTOR_SUBPARTS (elt_t),
4740 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4742 error ("incorrect number of vector %qs elements",
4743 code_name);
4744 debug_generic_stmt (rhs1);
4745 return true;
4748 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4749 elt_t))
4751 error ("incorrect type of vector %qs elements",
4752 code_name);
4753 debug_generic_stmt (rhs1);
4754 return true;
4756 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4757 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4759 error ("incorrect number of vector %qs elements",
4760 code_name);
4761 debug_generic_stmt (rhs1);
4762 return true;
4765 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4767 error ("incorrect type of vector CONSTRUCTOR elements");
4768 debug_generic_stmt (rhs1);
4769 return true;
4771 if (elt_i != NULL_TREE
4772 && (TREE_CODE (elt_t) == VECTOR_TYPE
4773 || TREE_CODE (elt_i) != INTEGER_CST
4774 || compare_tree_int (elt_i, i) != 0))
4776 error ("vector %qs with non-NULL element index",
4777 code_name);
4778 debug_generic_stmt (rhs1);
4779 return true;
4781 if (!is_gimple_val (elt_v))
4783 error ("vector %qs element is not a GIMPLE value",
4784 code_name);
4785 debug_generic_stmt (rhs1);
4786 return true;
4790 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4792 error ("non-vector %qs with elements", code_name);
4793 debug_generic_stmt (rhs1);
4794 return true;
4796 return res;
4798 case WITH_SIZE_EXPR:
4799 error ("%qs RHS in assignment statement",
4800 get_tree_code_name (rhs_code));
4801 debug_generic_expr (rhs1);
4802 return true;
4804 case OBJ_TYPE_REF:
4805 /* FIXME. */
4806 return res;
4808 default:;
4811 return res;
4814 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4815 is a problem, otherwise false. */
4817 static bool
4818 verify_gimple_assign (gassign *stmt)
4820 switch (gimple_assign_rhs_class (stmt))
4822 case GIMPLE_SINGLE_RHS:
4823 return verify_gimple_assign_single (stmt);
4825 case GIMPLE_UNARY_RHS:
4826 return verify_gimple_assign_unary (stmt);
4828 case GIMPLE_BINARY_RHS:
4829 return verify_gimple_assign_binary (stmt);
4831 case GIMPLE_TERNARY_RHS:
4832 return verify_gimple_assign_ternary (stmt);
4834 default:
4835 gcc_unreachable ();
4839 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4840 is a problem, otherwise false. */
4842 static bool
4843 verify_gimple_return (greturn *stmt)
4845 tree op = gimple_return_retval (stmt);
4846 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4848 /* We cannot test for present return values as we do not fix up missing
4849 return values from the original source. */
4850 if (op == NULL)
4851 return false;
4853 if (!is_gimple_val (op)
4854 && TREE_CODE (op) != RESULT_DECL)
4856 error ("invalid operand in return statement");
4857 debug_generic_stmt (op);
4858 return true;
4861 if ((TREE_CODE (op) == RESULT_DECL
4862 && DECL_BY_REFERENCE (op))
4863 || (TREE_CODE (op) == SSA_NAME
4864 && SSA_NAME_VAR (op)
4865 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4866 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4867 op = TREE_TYPE (op);
4869 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4871 error ("invalid conversion in return statement");
4872 debug_generic_stmt (restype);
4873 debug_generic_stmt (TREE_TYPE (op));
4874 return true;
4877 return false;
4881 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4882 is a problem, otherwise false. */
4884 static bool
4885 verify_gimple_goto (ggoto *stmt)
4887 tree dest = gimple_goto_dest (stmt);
4889 /* ??? We have two canonical forms of direct goto destinations, a
4890 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4891 if (TREE_CODE (dest) != LABEL_DECL
4892 && (!is_gimple_val (dest)
4893 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4895 error ("goto destination is neither a label nor a pointer");
4896 return true;
4899 return false;
4902 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4903 is a problem, otherwise false. */
4905 static bool
4906 verify_gimple_switch (gswitch *stmt)
4908 unsigned int i, n;
4909 tree elt, prev_upper_bound = NULL_TREE;
4910 tree index_type, elt_type = NULL_TREE;
4912 if (!is_gimple_val (gimple_switch_index (stmt)))
4914 error ("invalid operand to switch statement");
4915 debug_generic_stmt (gimple_switch_index (stmt));
4916 return true;
4919 index_type = TREE_TYPE (gimple_switch_index (stmt));
4920 if (! INTEGRAL_TYPE_P (index_type))
4922 error ("non-integral type switch statement");
4923 debug_generic_expr (index_type);
4924 return true;
4927 elt = gimple_switch_label (stmt, 0);
4928 if (CASE_LOW (elt) != NULL_TREE
4929 || CASE_HIGH (elt) != NULL_TREE
4930 || CASE_CHAIN (elt) != NULL_TREE)
4932 error ("invalid default case label in switch statement");
4933 debug_generic_expr (elt);
4934 return true;
4937 n = gimple_switch_num_labels (stmt);
4938 for (i = 1; i < n; i++)
4940 elt = gimple_switch_label (stmt, i);
4942 if (CASE_CHAIN (elt))
4944 error ("invalid %<CASE_CHAIN%>");
4945 debug_generic_expr (elt);
4946 return true;
4948 if (! CASE_LOW (elt))
4950 error ("invalid case label in switch statement");
4951 debug_generic_expr (elt);
4952 return true;
4954 if (CASE_HIGH (elt)
4955 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4957 error ("invalid case range in switch statement");
4958 debug_generic_expr (elt);
4959 return true;
4962 if (! elt_type)
4964 elt_type = TREE_TYPE (CASE_LOW (elt));
4965 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4967 error ("type precision mismatch in switch statement");
4968 return true;
4971 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4972 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4974 error ("type mismatch for case label in switch statement");
4975 debug_generic_expr (elt);
4976 return true;
4979 if (prev_upper_bound)
4981 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4983 error ("case labels not sorted in switch statement");
4984 return true;
4988 prev_upper_bound = CASE_HIGH (elt);
4989 if (! prev_upper_bound)
4990 prev_upper_bound = CASE_LOW (elt);
4993 return false;
4996 /* Verify a gimple debug statement STMT.
4997 Returns true if anything is wrong. */
4999 static bool
5000 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5002 /* There isn't much that could be wrong in a gimple debug stmt. A
5003 gimple debug bind stmt, for example, maps a tree, that's usually
5004 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5005 component or member of an aggregate type, to another tree, that
5006 can be an arbitrary expression. These stmts expand into debug
5007 insns, and are converted to debug notes by var-tracking.cc. */
5008 return false;
5011 /* Verify a gimple label statement STMT.
5012 Returns true if anything is wrong. */
5014 static bool
5015 verify_gimple_label (glabel *stmt)
5017 tree decl = gimple_label_label (stmt);
5018 int uid;
5019 bool err = false;
5021 if (TREE_CODE (decl) != LABEL_DECL)
5022 return true;
5023 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5024 && DECL_CONTEXT (decl) != current_function_decl)
5026 error ("label context is not the current function declaration");
5027 err |= true;
5030 uid = LABEL_DECL_UID (decl);
5031 if (cfun->cfg
5032 && (uid == -1
5033 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5035 error ("incorrect entry in %<label_to_block_map%>");
5036 err |= true;
5039 uid = EH_LANDING_PAD_NR (decl);
5040 if (uid)
5042 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5043 if (decl != lp->post_landing_pad)
5045 error ("incorrect setting of landing pad number");
5046 err |= true;
5050 return err;
5053 /* Verify a gimple cond statement STMT.
5054 Returns true if anything is wrong. */
5056 static bool
5057 verify_gimple_cond (gcond *stmt)
5059 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5061 error ("invalid comparison code in gimple cond");
5062 return true;
5064 if (!(!gimple_cond_true_label (stmt)
5065 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5066 || !(!gimple_cond_false_label (stmt)
5067 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5069 error ("invalid labels in gimple cond");
5070 return true;
5073 return verify_gimple_comparison (boolean_type_node,
5074 gimple_cond_lhs (stmt),
5075 gimple_cond_rhs (stmt),
5076 gimple_cond_code (stmt));
5079 /* Verify the GIMPLE statement STMT. Returns true if there is an
5080 error, otherwise false. */
5082 static bool
5083 verify_gimple_stmt (gimple *stmt)
5085 switch (gimple_code (stmt))
5087 case GIMPLE_ASSIGN:
5088 return verify_gimple_assign (as_a <gassign *> (stmt));
5090 case GIMPLE_LABEL:
5091 return verify_gimple_label (as_a <glabel *> (stmt));
5093 case GIMPLE_CALL:
5094 return verify_gimple_call (as_a <gcall *> (stmt));
5096 case GIMPLE_COND:
5097 return verify_gimple_cond (as_a <gcond *> (stmt));
5099 case GIMPLE_GOTO:
5100 return verify_gimple_goto (as_a <ggoto *> (stmt));
5102 case GIMPLE_SWITCH:
5103 return verify_gimple_switch (as_a <gswitch *> (stmt));
5105 case GIMPLE_RETURN:
5106 return verify_gimple_return (as_a <greturn *> (stmt));
5108 case GIMPLE_ASM:
5109 return false;
5111 case GIMPLE_TRANSACTION:
5112 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5114 /* Tuples that do not have tree operands. */
5115 case GIMPLE_NOP:
5116 case GIMPLE_PREDICT:
5117 case GIMPLE_RESX:
5118 case GIMPLE_EH_DISPATCH:
5119 case GIMPLE_EH_MUST_NOT_THROW:
5120 return false;
5122 CASE_GIMPLE_OMP:
5123 /* OpenMP directives are validated by the FE and never operated
5124 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5125 non-gimple expressions when the main index variable has had
5126 its address taken. This does not affect the loop itself
5127 because the header of an GIMPLE_OMP_FOR is merely used to determine
5128 how to setup the parallel iteration. */
5129 return false;
5131 case GIMPLE_ASSUME:
5132 return false;
5134 case GIMPLE_DEBUG:
5135 return verify_gimple_debug (stmt);
5137 default:
5138 gcc_unreachable ();
5142 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5143 and false otherwise. */
5145 static bool
5146 verify_gimple_phi (gphi *phi)
5148 bool err = false;
5149 unsigned i;
5150 tree phi_result = gimple_phi_result (phi);
5151 bool virtual_p;
5153 if (!phi_result)
5155 error ("invalid %<PHI%> result");
5156 return true;
5159 virtual_p = virtual_operand_p (phi_result);
5160 if (TREE_CODE (phi_result) != SSA_NAME
5161 || (virtual_p
5162 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5164 error ("invalid %<PHI%> result");
5165 err = true;
5168 for (i = 0; i < gimple_phi_num_args (phi); i++)
5170 tree t = gimple_phi_arg_def (phi, i);
5172 if (!t)
5174 error ("missing %<PHI%> def");
5175 err |= true;
5176 continue;
5178 /* Addressable variables do have SSA_NAMEs but they
5179 are not considered gimple values. */
5180 else if ((TREE_CODE (t) == SSA_NAME
5181 && virtual_p != virtual_operand_p (t))
5182 || (virtual_p
5183 && (TREE_CODE (t) != SSA_NAME
5184 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5185 || (!virtual_p
5186 && !is_gimple_val (t)))
5188 error ("invalid %<PHI%> argument");
5189 debug_generic_expr (t);
5190 err |= true;
5192 #ifdef ENABLE_TYPES_CHECKING
5193 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5195 error ("incompatible types in %<PHI%> argument %u", i);
5196 debug_generic_stmt (TREE_TYPE (phi_result));
5197 debug_generic_stmt (TREE_TYPE (t));
5198 err |= true;
5200 #endif
5203 return err;
5206 /* Verify the GIMPLE statements inside the sequence STMTS. */
5208 static bool
5209 verify_gimple_in_seq_2 (gimple_seq stmts)
5211 gimple_stmt_iterator ittr;
5212 bool err = false;
5214 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5216 gimple *stmt = gsi_stmt (ittr);
5218 switch (gimple_code (stmt))
5220 case GIMPLE_BIND:
5221 err |= verify_gimple_in_seq_2 (
5222 gimple_bind_body (as_a <gbind *> (stmt)));
5223 break;
5225 case GIMPLE_TRY:
5226 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5227 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5228 break;
5230 case GIMPLE_EH_FILTER:
5231 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5232 break;
5234 case GIMPLE_EH_ELSE:
5236 geh_else *eh_else = as_a <geh_else *> (stmt);
5237 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5238 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5240 break;
5242 case GIMPLE_CATCH:
5243 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5244 as_a <gcatch *> (stmt)));
5245 break;
5247 case GIMPLE_ASSUME:
5248 err |= verify_gimple_in_seq_2 (gimple_assume_body (stmt));
5249 break;
5251 case GIMPLE_TRANSACTION:
5252 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5253 break;
5255 default:
5257 bool err2 = verify_gimple_stmt (stmt);
5258 if (err2)
5259 debug_gimple_stmt (stmt);
5260 err |= err2;
5265 return err;
5268 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5269 is a problem, otherwise false. */
5271 static bool
5272 verify_gimple_transaction (gtransaction *stmt)
5274 tree lab;
5276 lab = gimple_transaction_label_norm (stmt);
5277 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5278 return true;
5279 lab = gimple_transaction_label_uninst (stmt);
5280 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5281 return true;
5282 lab = gimple_transaction_label_over (stmt);
5283 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5284 return true;
5286 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5290 /* Verify the GIMPLE statements inside the statement list STMTS. */
5292 DEBUG_FUNCTION bool
5293 verify_gimple_in_seq (gimple_seq stmts, bool ice)
5295 timevar_push (TV_TREE_STMT_VERIFY);
5296 bool res = verify_gimple_in_seq_2 (stmts);
5297 if (res && ice)
5298 internal_error ("%<verify_gimple%> failed");
5299 timevar_pop (TV_TREE_STMT_VERIFY);
5300 return res;
5303 /* Return true when the T can be shared. */
5305 static bool
5306 tree_node_can_be_shared (tree t)
5308 if (IS_TYPE_OR_DECL_P (t)
5309 || TREE_CODE (t) == SSA_NAME
5310 || TREE_CODE (t) == IDENTIFIER_NODE
5311 || TREE_CODE (t) == CASE_LABEL_EXPR
5312 || is_gimple_min_invariant (t))
5313 return true;
5315 if (t == error_mark_node)
5316 return true;
5318 return false;
5321 /* Called via walk_tree. Verify tree sharing. */
5323 static tree
5324 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5326 hash_set<void *> *visited = (hash_set<void *> *) data;
5328 if (tree_node_can_be_shared (*tp))
5330 *walk_subtrees = false;
5331 return NULL;
5334 if (visited->add (*tp))
5335 return *tp;
5337 return NULL;
5340 /* Called via walk_gimple_stmt. Verify tree sharing. */
5342 static tree
5343 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5345 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5346 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5349 static bool eh_error_found;
5350 bool
5351 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5352 hash_set<gimple *> *visited)
5354 if (!visited->contains (stmt))
5356 error ("dead statement in EH table");
5357 debug_gimple_stmt (stmt);
5358 eh_error_found = true;
5360 return true;
5363 /* Verify if the location LOCs block is in BLOCKS. */
5365 static bool
5366 verify_location (hash_set<tree> *blocks, location_t loc)
5368 tree block = LOCATION_BLOCK (loc);
5369 if (block != NULL_TREE
5370 && !blocks->contains (block))
5372 error ("location references block not in block tree");
5373 return true;
5375 if (block != NULL_TREE)
5376 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5377 return false;
5380 /* Called via walk_tree. Verify that expressions have no blocks. */
5382 static tree
5383 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5385 if (!EXPR_P (*tp))
5387 *walk_subtrees = false;
5388 return NULL;
5391 location_t loc = EXPR_LOCATION (*tp);
5392 if (LOCATION_BLOCK (loc) != NULL)
5393 return *tp;
5395 return NULL;
5398 /* Called via walk_tree. Verify locations of expressions. */
5400 static tree
5401 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5403 hash_set<tree> *blocks = (hash_set<tree> *) data;
5404 tree t = *tp;
5406 /* ??? This doesn't really belong here but there's no good place to
5407 stick this remainder of old verify_expr. */
5408 /* ??? This barfs on debug stmts which contain binds to vars with
5409 different function context. */
5410 #if 0
5411 if (VAR_P (t)
5412 || TREE_CODE (t) == PARM_DECL
5413 || TREE_CODE (t) == RESULT_DECL)
5415 tree context = decl_function_context (t);
5416 if (context != cfun->decl
5417 && !SCOPE_FILE_SCOPE_P (context)
5418 && !TREE_STATIC (t)
5419 && !DECL_EXTERNAL (t))
5421 error ("local declaration from a different function");
5422 return t;
5425 #endif
5427 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5429 tree x = DECL_DEBUG_EXPR (t);
5430 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5431 if (addr)
5432 return addr;
5434 if ((VAR_P (t)
5435 || TREE_CODE (t) == PARM_DECL
5436 || TREE_CODE (t) == RESULT_DECL)
5437 && DECL_HAS_VALUE_EXPR_P (t))
5439 tree x = DECL_VALUE_EXPR (t);
5440 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5441 if (addr)
5442 return addr;
5445 if (!EXPR_P (t))
5447 *walk_subtrees = false;
5448 return NULL;
5451 location_t loc = EXPR_LOCATION (t);
5452 if (verify_location (blocks, loc))
5453 return t;
5455 return NULL;
5458 /* Called via walk_gimple_op. Verify locations of expressions. */
5460 static tree
5461 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5463 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5464 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5467 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5469 static void
5470 collect_subblocks (hash_set<tree> *blocks, tree block)
5472 tree t;
5473 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5475 blocks->add (t);
5476 collect_subblocks (blocks, t);
5480 /* Disable warnings about missing quoting in GCC diagnostics for
5481 the verification errors. Their format strings don't follow
5482 GCC diagnostic conventions and trigger an ICE in the end. */
5483 #if __GNUC__ >= 10
5484 # pragma GCC diagnostic push
5485 # pragma GCC diagnostic ignored "-Wformat-diag"
5486 #endif
5488 /* Verify the GIMPLE statements in the CFG of FN. */
5490 DEBUG_FUNCTION bool
5491 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow, bool ice)
5493 basic_block bb;
5494 bool err = false;
5496 timevar_push (TV_TREE_STMT_VERIFY);
5497 hash_set<void *> visited;
5498 hash_set<gimple *> visited_throwing_stmts;
5500 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5501 hash_set<tree> blocks;
5502 if (DECL_INITIAL (fn->decl))
5504 blocks.add (DECL_INITIAL (fn->decl));
5505 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5508 FOR_EACH_BB_FN (bb, fn)
5510 gimple_stmt_iterator gsi;
5511 edge_iterator ei;
5512 edge e;
5514 for (gphi_iterator gpi = gsi_start_phis (bb);
5515 !gsi_end_p (gpi);
5516 gsi_next (&gpi))
5518 gphi *phi = gpi.phi ();
5519 bool err2 = false;
5520 unsigned i;
5522 if (gimple_bb (phi) != bb)
5524 error ("gimple_bb (phi) is set to a wrong basic block");
5525 err2 = true;
5528 err2 |= verify_gimple_phi (phi);
5530 /* Only PHI arguments have locations. */
5531 if (gimple_location (phi) != UNKNOWN_LOCATION)
5533 error ("PHI node with location");
5534 err2 = true;
5537 for (i = 0; i < gimple_phi_num_args (phi); i++)
5539 tree arg = gimple_phi_arg_def (phi, i);
5540 tree addr = walk_tree (&arg, verify_node_sharing_1,
5541 &visited, NULL);
5542 if (addr)
5544 error ("incorrect sharing of tree nodes");
5545 debug_generic_expr (addr);
5546 err2 |= true;
5548 location_t loc = gimple_phi_arg_location (phi, i);
5549 if (virtual_operand_p (gimple_phi_result (phi))
5550 && loc != UNKNOWN_LOCATION)
5552 error ("virtual PHI with argument locations");
5553 err2 = true;
5555 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5556 if (addr)
5558 debug_generic_expr (addr);
5559 err2 = true;
5561 err2 |= verify_location (&blocks, loc);
5564 if (err2)
5565 debug_gimple_stmt (phi);
5566 err |= err2;
5569 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5571 gimple *stmt = gsi_stmt (gsi);
5572 bool err2 = false;
5573 struct walk_stmt_info wi;
5574 tree addr;
5575 int lp_nr;
5577 if (gimple_bb (stmt) != bb)
5579 error ("gimple_bb (stmt) is set to a wrong basic block");
5580 err2 = true;
5583 err2 |= verify_gimple_stmt (stmt);
5584 err2 |= verify_location (&blocks, gimple_location (stmt));
5586 memset (&wi, 0, sizeof (wi));
5587 wi.info = (void *) &visited;
5588 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5589 if (addr)
5591 error ("incorrect sharing of tree nodes");
5592 debug_generic_expr (addr);
5593 err2 |= true;
5596 memset (&wi, 0, sizeof (wi));
5597 wi.info = (void *) &blocks;
5598 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5599 if (addr)
5601 debug_generic_expr (addr);
5602 err2 |= true;
5605 /* If the statement is marked as part of an EH region, then it is
5606 expected that the statement could throw. Verify that when we
5607 have optimizations that simplify statements such that we prove
5608 that they cannot throw, that we update other data structures
5609 to match. */
5610 lp_nr = lookup_stmt_eh_lp (stmt);
5611 if (lp_nr != 0)
5612 visited_throwing_stmts.add (stmt);
5613 if (lp_nr > 0)
5615 if (!stmt_could_throw_p (cfun, stmt))
5617 if (verify_nothrow)
5619 error ("statement marked for throw, but doesn%'t");
5620 err2 |= true;
5623 else if (!gsi_one_before_end_p (gsi))
5625 error ("statement marked for throw in middle of block");
5626 err2 |= true;
5630 if (err2)
5631 debug_gimple_stmt (stmt);
5632 err |= err2;
5635 FOR_EACH_EDGE (e, ei, bb->succs)
5636 if (e->goto_locus != UNKNOWN_LOCATION)
5637 err |= verify_location (&blocks, e->goto_locus);
5640 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5641 eh_error_found = false;
5642 if (eh_table)
5643 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5644 (&visited_throwing_stmts);
5646 if (ice && (err || eh_error_found))
5647 internal_error ("verify_gimple failed");
5649 verify_histograms ();
5650 timevar_pop (TV_TREE_STMT_VERIFY);
5652 return (err || eh_error_found);
5656 /* Verifies that the flow information is OK. */
5658 static int
5659 gimple_verify_flow_info (void)
5661 int err = 0;
5662 basic_block bb;
5663 gimple_stmt_iterator gsi;
5664 gimple *stmt;
5665 edge e;
5666 edge_iterator ei;
5668 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5669 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5671 error ("ENTRY_BLOCK has IL associated with it");
5672 err = 1;
5675 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5676 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5678 error ("EXIT_BLOCK has IL associated with it");
5679 err = 1;
5682 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5683 if (e->flags & EDGE_FALLTHRU)
5685 error ("fallthru to exit from bb %d", e->src->index);
5686 err = 1;
5689 FOR_EACH_BB_FN (bb, cfun)
5691 bool found_ctrl_stmt = false;
5693 stmt = NULL;
5695 /* Skip labels on the start of basic block. */
5696 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5698 tree label;
5699 gimple *prev_stmt = stmt;
5701 stmt = gsi_stmt (gsi);
5703 if (gimple_code (stmt) != GIMPLE_LABEL)
5704 break;
5706 label = gimple_label_label (as_a <glabel *> (stmt));
5707 if (prev_stmt && DECL_NONLOCAL (label))
5709 error ("nonlocal label %qD is not first in a sequence "
5710 "of labels in bb %d", label, bb->index);
5711 err = 1;
5714 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5716 error ("EH landing pad label %qD is not first in a sequence "
5717 "of labels in bb %d", label, bb->index);
5718 err = 1;
5721 if (label_to_block (cfun, label) != bb)
5723 error ("label %qD to block does not match in bb %d",
5724 label, bb->index);
5725 err = 1;
5728 if (decl_function_context (label) != current_function_decl)
5730 error ("label %qD has incorrect context in bb %d",
5731 label, bb->index);
5732 err = 1;
5736 /* Verify that body of basic block BB is free of control flow. */
5737 bool seen_nondebug_stmt = false;
5738 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5740 gimple *stmt = gsi_stmt (gsi);
5742 if (found_ctrl_stmt)
5744 error ("control flow in the middle of basic block %d",
5745 bb->index);
5746 err = 1;
5749 if (stmt_ends_bb_p (stmt))
5750 found_ctrl_stmt = true;
5752 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5754 error ("label %qD in the middle of basic block %d",
5755 gimple_label_label (label_stmt), bb->index);
5756 err = 1;
5759 /* Check that no statements appear between a returns_twice call
5760 and its associated abnormal edge. */
5761 if (gimple_code (stmt) == GIMPLE_CALL
5762 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
5764 const char *misplaced = NULL;
5765 /* TM is an exception: it points abnormal edges just after the
5766 call that starts a transaction, i.e. it must end the BB. */
5767 if (gimple_call_builtin_p (stmt, BUILT_IN_TM_START))
5769 if (single_succ_p (bb)
5770 && bb_has_abnormal_pred (single_succ (bb))
5771 && !gsi_one_nondebug_before_end_p (gsi))
5772 misplaced = "not last";
5774 else
5776 if (seen_nondebug_stmt
5777 && bb_has_abnormal_pred (bb))
5778 misplaced = "not first";
5780 if (misplaced)
5782 error ("returns_twice call is %s in basic block %d",
5783 misplaced, bb->index);
5784 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
5785 err = 1;
5788 if (!is_gimple_debug (stmt))
5789 seen_nondebug_stmt = true;
5792 gsi = gsi_last_nondebug_bb (bb);
5793 if (gsi_end_p (gsi))
5794 continue;
5796 stmt = gsi_stmt (gsi);
5798 if (gimple_code (stmt) == GIMPLE_LABEL)
5799 continue;
5801 err |= verify_eh_edges (stmt);
5803 if (is_ctrl_stmt (stmt))
5805 FOR_EACH_EDGE (e, ei, bb->succs)
5806 if (e->flags & EDGE_FALLTHRU)
5808 error ("fallthru edge after a control statement in bb %d",
5809 bb->index);
5810 err = 1;
5814 if (gimple_code (stmt) != GIMPLE_COND)
5816 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5817 after anything else but if statement. */
5818 FOR_EACH_EDGE (e, ei, bb->succs)
5819 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5821 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5822 bb->index);
5823 err = 1;
5827 switch (gimple_code (stmt))
5829 case GIMPLE_COND:
5831 edge true_edge;
5832 edge false_edge;
5834 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5836 if (!true_edge
5837 || !false_edge
5838 || !(true_edge->flags & EDGE_TRUE_VALUE)
5839 || !(false_edge->flags & EDGE_FALSE_VALUE)
5840 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5841 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5842 || EDGE_COUNT (bb->succs) >= 3)
5844 error ("wrong outgoing edge flags at end of bb %d",
5845 bb->index);
5846 err = 1;
5849 break;
5851 case GIMPLE_GOTO:
5852 if (simple_goto_p (stmt))
5854 error ("explicit goto at end of bb %d", bb->index);
5855 err = 1;
5857 else
5859 /* FIXME. We should double check that the labels in the
5860 destination blocks have their address taken. */
5861 FOR_EACH_EDGE (e, ei, bb->succs)
5862 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5863 | EDGE_FALSE_VALUE))
5864 || !(e->flags & EDGE_ABNORMAL))
5866 error ("wrong outgoing edge flags at end of bb %d",
5867 bb->index);
5868 err = 1;
5871 break;
5873 case GIMPLE_CALL:
5874 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5875 break;
5876 /* fallthru */
5877 case GIMPLE_RETURN:
5878 if (!single_succ_p (bb)
5879 || (single_succ_edge (bb)->flags
5880 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5881 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5883 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5884 err = 1;
5886 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5888 error ("return edge does not point to exit in bb %d",
5889 bb->index);
5890 err = 1;
5892 break;
5894 case GIMPLE_SWITCH:
5896 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5897 tree prev;
5898 edge e;
5899 size_t i, n;
5901 n = gimple_switch_num_labels (switch_stmt);
5903 /* Mark all the destination basic blocks. */
5904 for (i = 0; i < n; ++i)
5906 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5907 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5908 label_bb->aux = (void *)1;
5911 /* Verify that the case labels are sorted. */
5912 prev = gimple_switch_label (switch_stmt, 0);
5913 for (i = 1; i < n; ++i)
5915 tree c = gimple_switch_label (switch_stmt, i);
5916 if (!CASE_LOW (c))
5918 error ("found default case not at the start of "
5919 "case vector");
5920 err = 1;
5921 continue;
5923 if (CASE_LOW (prev)
5924 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5926 error ("case labels not sorted: ");
5927 print_generic_expr (stderr, prev);
5928 fprintf (stderr," is greater than ");
5929 print_generic_expr (stderr, c);
5930 fprintf (stderr," but comes before it.\n");
5931 err = 1;
5933 prev = c;
5935 /* VRP will remove the default case if it can prove it will
5936 never be executed. So do not verify there always exists
5937 a default case here. */
5939 FOR_EACH_EDGE (e, ei, bb->succs)
5941 if (!e->dest->aux)
5943 error ("extra outgoing edge %d->%d",
5944 bb->index, e->dest->index);
5945 err = 1;
5948 e->dest->aux = (void *)2;
5949 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5950 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5952 error ("wrong outgoing edge flags at end of bb %d",
5953 bb->index);
5954 err = 1;
5958 /* Check that we have all of them. */
5959 for (i = 0; i < n; ++i)
5961 basic_block label_bb = gimple_switch_label_bb (cfun,
5962 switch_stmt, i);
5964 if (label_bb->aux != (void *)2)
5966 error ("missing edge %i->%i", bb->index, label_bb->index);
5967 err = 1;
5971 FOR_EACH_EDGE (e, ei, bb->succs)
5972 e->dest->aux = (void *)0;
5974 break;
5976 case GIMPLE_EH_DISPATCH:
5977 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5978 break;
5980 default:
5981 break;
5985 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5986 verify_dominators (CDI_DOMINATORS);
5988 return err;
5991 #if __GNUC__ >= 10
5992 # pragma GCC diagnostic pop
5993 #endif
5995 /* Updates phi nodes after creating a forwarder block joined
5996 by edge FALLTHRU. */
5998 static void
5999 gimple_make_forwarder_block (edge fallthru)
6001 edge e;
6002 edge_iterator ei;
6003 basic_block dummy, bb;
6004 tree var;
6005 gphi_iterator gsi;
6006 bool forward_location_p;
6008 dummy = fallthru->src;
6009 bb = fallthru->dest;
6011 if (single_pred_p (bb))
6012 return;
6014 /* We can forward location info if we have only one predecessor. */
6015 forward_location_p = single_pred_p (dummy);
6017 /* If we redirected a branch we must create new PHI nodes at the
6018 start of BB. */
6019 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6021 gphi *phi, *new_phi;
6023 phi = gsi.phi ();
6024 var = gimple_phi_result (phi);
6025 new_phi = create_phi_node (var, bb);
6026 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
6027 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
6028 forward_location_p
6029 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
6032 /* Add the arguments we have stored on edges. */
6033 FOR_EACH_EDGE (e, ei, bb->preds)
6035 if (e == fallthru)
6036 continue;
6038 flush_pending_stmts (e);
6043 /* Return a non-special label in the head of basic block BLOCK.
6044 Create one if it doesn't exist. */
6046 tree
6047 gimple_block_label (basic_block bb)
6049 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6050 bool first = true;
6051 tree label;
6052 glabel *stmt;
6054 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6056 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6057 if (!stmt)
6058 break;
6059 label = gimple_label_label (stmt);
6060 if (!DECL_NONLOCAL (label))
6062 if (!first)
6063 gsi_move_before (&i, &s);
6064 return label;
6068 label = create_artificial_label (UNKNOWN_LOCATION);
6069 stmt = gimple_build_label (label);
6070 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6071 return label;
6075 /* Attempt to perform edge redirection by replacing a possibly complex
6076 jump instruction by a goto or by removing the jump completely.
6077 This can apply only if all edges now point to the same block. The
6078 parameters and return values are equivalent to
6079 redirect_edge_and_branch. */
6081 static edge
6082 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6084 basic_block src = e->src;
6085 gimple_stmt_iterator i;
6086 gimple *stmt;
6088 /* We can replace or remove a complex jump only when we have exactly
6089 two edges. */
6090 if (EDGE_COUNT (src->succs) != 2
6091 /* Verify that all targets will be TARGET. Specifically, the
6092 edge that is not E must also go to TARGET. */
6093 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6094 return NULL;
6096 i = gsi_last_bb (src);
6097 if (gsi_end_p (i))
6098 return NULL;
6100 stmt = gsi_stmt (i);
6102 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6104 gsi_remove (&i, true);
6105 e = ssa_redirect_edge (e, target);
6106 e->flags = EDGE_FALLTHRU;
6107 return e;
6110 return NULL;
6114 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6115 edge representing the redirected branch. */
6117 static edge
6118 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6120 basic_block bb = e->src;
6121 gimple_stmt_iterator gsi;
6122 edge ret;
6123 gimple *stmt;
6125 if (e->flags & EDGE_ABNORMAL)
6126 return NULL;
6128 if (e->dest == dest)
6129 return NULL;
6131 if (e->flags & EDGE_EH)
6132 return redirect_eh_edge (e, dest);
6134 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6136 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6137 if (ret)
6138 return ret;
6141 gsi = gsi_last_nondebug_bb (bb);
6142 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6144 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6146 case GIMPLE_COND:
6147 /* For COND_EXPR, we only need to redirect the edge. */
6148 break;
6150 case GIMPLE_GOTO:
6151 /* No non-abnormal edges should lead from a non-simple goto, and
6152 simple ones should be represented implicitly. */
6153 gcc_unreachable ();
6155 case GIMPLE_SWITCH:
6157 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6158 tree label = gimple_block_label (dest);
6159 tree cases = get_cases_for_edge (e, switch_stmt);
6161 /* If we have a list of cases associated with E, then use it
6162 as it's a lot faster than walking the entire case vector. */
6163 if (cases)
6165 edge e2 = find_edge (e->src, dest);
6166 tree last, first;
6168 first = cases;
6169 while (cases)
6171 last = cases;
6172 CASE_LABEL (cases) = label;
6173 cases = CASE_CHAIN (cases);
6176 /* If there was already an edge in the CFG, then we need
6177 to move all the cases associated with E to E2. */
6178 if (e2)
6180 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6182 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6183 CASE_CHAIN (cases2) = first;
6185 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6187 else
6189 size_t i, n = gimple_switch_num_labels (switch_stmt);
6191 for (i = 0; i < n; i++)
6193 tree elt = gimple_switch_label (switch_stmt, i);
6194 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6195 CASE_LABEL (elt) = label;
6199 break;
6201 case GIMPLE_ASM:
6203 gasm *asm_stmt = as_a <gasm *> (stmt);
6204 int i, n = gimple_asm_nlabels (asm_stmt);
6205 tree label = NULL;
6207 for (i = 0; i < n; ++i)
6209 tree cons = gimple_asm_label_op (asm_stmt, i);
6210 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6212 if (!label)
6213 label = gimple_block_label (dest);
6214 TREE_VALUE (cons) = label;
6218 /* If we didn't find any label matching the former edge in the
6219 asm labels, we must be redirecting the fallthrough
6220 edge. */
6221 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6223 break;
6225 case GIMPLE_RETURN:
6226 gsi_remove (&gsi, true);
6227 e->flags |= EDGE_FALLTHRU;
6228 break;
6230 case GIMPLE_OMP_RETURN:
6231 case GIMPLE_OMP_CONTINUE:
6232 case GIMPLE_OMP_SECTIONS_SWITCH:
6233 case GIMPLE_OMP_FOR:
6234 /* The edges from OMP constructs can be simply redirected. */
6235 break;
6237 case GIMPLE_EH_DISPATCH:
6238 if (!(e->flags & EDGE_FALLTHRU))
6239 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6240 break;
6242 case GIMPLE_TRANSACTION:
6243 if (e->flags & EDGE_TM_ABORT)
6244 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6245 gimple_block_label (dest));
6246 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6247 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6248 gimple_block_label (dest));
6249 else
6250 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6251 gimple_block_label (dest));
6252 break;
6254 default:
6255 /* Otherwise it must be a fallthru edge, and we don't need to
6256 do anything besides redirecting it. */
6257 gcc_assert (e->flags & EDGE_FALLTHRU);
6258 break;
6261 /* Update/insert PHI nodes as necessary. */
6263 /* Now update the edges in the CFG. */
6264 e = ssa_redirect_edge (e, dest);
6266 return e;
6269 /* Returns true if it is possible to remove edge E by redirecting
6270 it to the destination of the other edge from E->src. */
6272 static bool
6273 gimple_can_remove_branch_p (const_edge e)
6275 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6276 return false;
6278 return true;
6281 /* Simple wrapper, as we can always redirect fallthru edges. */
6283 static basic_block
6284 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6286 e = gimple_redirect_edge_and_branch (e, dest);
6287 gcc_assert (e);
6289 return NULL;
6293 /* Splits basic block BB after statement STMT (but at least after the
6294 labels). If STMT is NULL, BB is split just after the labels. */
6296 static basic_block
6297 gimple_split_block (basic_block bb, void *stmt)
6299 gimple_stmt_iterator gsi;
6300 gimple_stmt_iterator gsi_tgt;
6301 gimple_seq list;
6302 basic_block new_bb;
6303 edge e;
6304 edge_iterator ei;
6306 new_bb = create_empty_bb (bb);
6308 /* Redirect the outgoing edges. */
6309 new_bb->succs = bb->succs;
6310 bb->succs = NULL;
6311 FOR_EACH_EDGE (e, ei, new_bb->succs)
6312 e->src = new_bb;
6314 /* Get a stmt iterator pointing to the first stmt to move. */
6315 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6316 gsi = gsi_after_labels (bb);
6317 else
6319 gsi = gsi_for_stmt ((gimple *) stmt);
6320 gsi_next (&gsi);
6323 /* Move everything from GSI to the new basic block. */
6324 if (gsi_end_p (gsi))
6325 return new_bb;
6327 /* Split the statement list - avoid re-creating new containers as this
6328 brings ugly quadratic memory consumption in the inliner.
6329 (We are still quadratic since we need to update stmt BB pointers,
6330 sadly.) */
6331 gsi_split_seq_before (&gsi, &list);
6332 set_bb_seq (new_bb, list);
6333 for (gsi_tgt = gsi_start (list);
6334 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6335 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6337 return new_bb;
6341 /* Moves basic block BB after block AFTER. */
6343 static bool
6344 gimple_move_block_after (basic_block bb, basic_block after)
6346 if (bb->prev_bb == after)
6347 return true;
6349 unlink_block (bb);
6350 link_block (bb, after);
6352 return true;
6356 /* Return TRUE if block BB has no executable statements, otherwise return
6357 FALSE. */
6359 static bool
6360 gimple_empty_block_p (basic_block bb)
6362 /* BB must have no executable statements. */
6363 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6364 if (phi_nodes (bb))
6365 return false;
6366 while (!gsi_end_p (gsi))
6368 gimple *stmt = gsi_stmt (gsi);
6369 if (is_gimple_debug (stmt))
6371 else if (gimple_code (stmt) == GIMPLE_NOP
6372 || gimple_code (stmt) == GIMPLE_PREDICT)
6374 else
6375 return false;
6376 gsi_next (&gsi);
6378 return true;
6382 /* Split a basic block if it ends with a conditional branch and if the
6383 other part of the block is not empty. */
6385 static basic_block
6386 gimple_split_block_before_cond_jump (basic_block bb)
6388 gimple *last, *split_point;
6389 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6390 if (gsi_end_p (gsi))
6391 return NULL;
6392 last = gsi_stmt (gsi);
6393 if (gimple_code (last) != GIMPLE_COND
6394 && gimple_code (last) != GIMPLE_SWITCH)
6395 return NULL;
6396 gsi_prev (&gsi);
6397 split_point = gsi_stmt (gsi);
6398 return split_block (bb, split_point)->dest;
6402 /* Return true if basic_block can be duplicated. */
6404 static bool
6405 gimple_can_duplicate_bb_p (const_basic_block bb)
6407 gimple *last = last_stmt (CONST_CAST_BB (bb));
6409 /* Do checks that can only fail for the last stmt, to minimize the work in the
6410 stmt loop. */
6411 if (last) {
6412 /* A transaction is a single entry multiple exit region. It
6413 must be duplicated in its entirety or not at all. */
6414 if (gimple_code (last) == GIMPLE_TRANSACTION)
6415 return false;
6417 /* An IFN_UNIQUE call must be duplicated as part of its group,
6418 or not at all. */
6419 if (is_gimple_call (last)
6420 && gimple_call_internal_p (last)
6421 && gimple_call_internal_unique_p (last))
6422 return false;
6425 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6426 !gsi_end_p (gsi); gsi_next (&gsi))
6428 gimple *g = gsi_stmt (gsi);
6430 /* Prohibit duplication of returns_twice calls, otherwise associated
6431 abnormal edges also need to be duplicated properly.
6432 An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6433 duplicated as part of its group, or not at all.
6434 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6435 group, so the same holds there. */
6436 if (is_gimple_call (g)
6437 && (gimple_call_flags (g) & ECF_RETURNS_TWICE
6438 || gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6439 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6440 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6441 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6442 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6443 return false;
6446 return true;
6449 /* Create a duplicate of the basic block BB. NOTE: This does not
6450 preserve SSA form. */
6452 static basic_block
6453 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6455 basic_block new_bb;
6456 gimple_stmt_iterator gsi_tgt;
6458 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6460 /* Copy the PHI nodes. We ignore PHI node arguments here because
6461 the incoming edges have not been setup yet. */
6462 for (gphi_iterator gpi = gsi_start_phis (bb);
6463 !gsi_end_p (gpi);
6464 gsi_next (&gpi))
6466 gphi *phi, *copy;
6467 phi = gpi.phi ();
6468 copy = create_phi_node (NULL_TREE, new_bb);
6469 create_new_def_for (gimple_phi_result (phi), copy,
6470 gimple_phi_result_ptr (copy));
6471 gimple_set_uid (copy, gimple_uid (phi));
6474 gsi_tgt = gsi_start_bb (new_bb);
6475 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6476 !gsi_end_p (gsi);
6477 gsi_next (&gsi))
6479 def_operand_p def_p;
6480 ssa_op_iter op_iter;
6481 tree lhs;
6482 gimple *stmt, *copy;
6484 stmt = gsi_stmt (gsi);
6485 if (gimple_code (stmt) == GIMPLE_LABEL)
6486 continue;
6488 /* Don't duplicate label debug stmts. */
6489 if (gimple_debug_bind_p (stmt)
6490 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6491 == LABEL_DECL)
6492 continue;
6494 /* Create a new copy of STMT and duplicate STMT's virtual
6495 operands. */
6496 copy = gimple_copy (stmt);
6497 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6499 maybe_duplicate_eh_stmt (copy, stmt);
6500 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6502 /* When copying around a stmt writing into a local non-user
6503 aggregate, make sure it won't share stack slot with other
6504 vars. */
6505 lhs = gimple_get_lhs (stmt);
6506 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6508 tree base = get_base_address (lhs);
6509 if (base
6510 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6511 && DECL_IGNORED_P (base)
6512 && !TREE_STATIC (base)
6513 && !DECL_EXTERNAL (base)
6514 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6515 DECL_NONSHAREABLE (base) = 1;
6518 /* If requested remap dependence info of cliques brought in
6519 via inlining. */
6520 if (id)
6521 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6523 tree op = gimple_op (copy, i);
6524 if (!op)
6525 continue;
6526 if (TREE_CODE (op) == ADDR_EXPR
6527 || TREE_CODE (op) == WITH_SIZE_EXPR)
6528 op = TREE_OPERAND (op, 0);
6529 while (handled_component_p (op))
6530 op = TREE_OPERAND (op, 0);
6531 if ((TREE_CODE (op) == MEM_REF
6532 || TREE_CODE (op) == TARGET_MEM_REF)
6533 && MR_DEPENDENCE_CLIQUE (op) > 1
6534 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6536 if (!id->dependence_map)
6537 id->dependence_map = new hash_map<dependence_hash,
6538 unsigned short>;
6539 bool existed;
6540 unsigned short &newc = id->dependence_map->get_or_insert
6541 (MR_DEPENDENCE_CLIQUE (op), &existed);
6542 if (!existed)
6544 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6545 newc = ++cfun->last_clique;
6547 MR_DEPENDENCE_CLIQUE (op) = newc;
6551 /* Create new names for all the definitions created by COPY and
6552 add replacement mappings for each new name. */
6553 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6554 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6557 return new_bb;
6560 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6562 static void
6563 add_phi_args_after_copy_edge (edge e_copy)
6565 basic_block bb, bb_copy = e_copy->src, dest;
6566 edge e;
6567 edge_iterator ei;
6568 gphi *phi, *phi_copy;
6569 tree def;
6570 gphi_iterator psi, psi_copy;
6572 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6573 return;
6575 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6577 if (e_copy->dest->flags & BB_DUPLICATED)
6578 dest = get_bb_original (e_copy->dest);
6579 else
6580 dest = e_copy->dest;
6582 e = find_edge (bb, dest);
6583 if (!e)
6585 /* During loop unrolling the target of the latch edge is copied.
6586 In this case we are not looking for edge to dest, but to
6587 duplicated block whose original was dest. */
6588 FOR_EACH_EDGE (e, ei, bb->succs)
6590 if ((e->dest->flags & BB_DUPLICATED)
6591 && get_bb_original (e->dest) == dest)
6592 break;
6595 gcc_assert (e != NULL);
6598 for (psi = gsi_start_phis (e->dest),
6599 psi_copy = gsi_start_phis (e_copy->dest);
6600 !gsi_end_p (psi);
6601 gsi_next (&psi), gsi_next (&psi_copy))
6603 phi = psi.phi ();
6604 phi_copy = psi_copy.phi ();
6605 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6606 add_phi_arg (phi_copy, def, e_copy,
6607 gimple_phi_arg_location_from_edge (phi, e));
6612 /* Basic block BB_COPY was created by code duplication. Add phi node
6613 arguments for edges going out of BB_COPY. The blocks that were
6614 duplicated have BB_DUPLICATED set. */
6616 void
6617 add_phi_args_after_copy_bb (basic_block bb_copy)
6619 edge e_copy;
6620 edge_iterator ei;
6622 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6624 add_phi_args_after_copy_edge (e_copy);
6628 /* Blocks in REGION_COPY array of length N_REGION were created by
6629 duplication of basic blocks. Add phi node arguments for edges
6630 going from these blocks. If E_COPY is not NULL, also add
6631 phi node arguments for its destination.*/
6633 void
6634 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6635 edge e_copy)
6637 unsigned i;
6639 for (i = 0; i < n_region; i++)
6640 region_copy[i]->flags |= BB_DUPLICATED;
6642 for (i = 0; i < n_region; i++)
6643 add_phi_args_after_copy_bb (region_copy[i]);
6644 if (e_copy)
6645 add_phi_args_after_copy_edge (e_copy);
6647 for (i = 0; i < n_region; i++)
6648 region_copy[i]->flags &= ~BB_DUPLICATED;
6651 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6652 important exit edge EXIT. By important we mean that no SSA name defined
6653 inside region is live over the other exit edges of the region. All entry
6654 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6655 to the duplicate of the region. Dominance and loop information is
6656 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6657 UPDATE_DOMINANCE is false then we assume that the caller will update the
6658 dominance information after calling this function. The new basic
6659 blocks are stored to REGION_COPY in the same order as they had in REGION,
6660 provided that REGION_COPY is not NULL.
6661 The function returns false if it is unable to copy the region,
6662 true otherwise. */
6664 bool
6665 gimple_duplicate_sese_region (edge entry, edge exit,
6666 basic_block *region, unsigned n_region,
6667 basic_block *region_copy,
6668 bool update_dominance)
6670 unsigned i;
6671 bool free_region_copy = false, copying_header = false;
6672 class loop *loop = entry->dest->loop_father;
6673 edge exit_copy;
6674 edge redirected;
6675 profile_count total_count = profile_count::uninitialized ();
6676 profile_count entry_count = profile_count::uninitialized ();
6678 if (!can_copy_bbs_p (region, n_region))
6679 return false;
6681 /* Some sanity checking. Note that we do not check for all possible
6682 missuses of the functions. I.e. if you ask to copy something weird,
6683 it will work, but the state of structures probably will not be
6684 correct. */
6685 for (i = 0; i < n_region; i++)
6687 /* We do not handle subloops, i.e. all the blocks must belong to the
6688 same loop. */
6689 if (region[i]->loop_father != loop)
6690 return false;
6692 if (region[i] != entry->dest
6693 && region[i] == loop->header)
6694 return false;
6697 /* In case the function is used for loop header copying (which is the primary
6698 use), ensure that EXIT and its copy will be new latch and entry edges. */
6699 if (loop->header == entry->dest)
6701 copying_header = true;
6703 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6704 return false;
6706 for (i = 0; i < n_region; i++)
6707 if (region[i] != exit->src
6708 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6709 return false;
6712 initialize_original_copy_tables ();
6714 if (copying_header)
6715 set_loop_copy (loop, loop_outer (loop));
6716 else
6717 set_loop_copy (loop, loop);
6719 if (!region_copy)
6721 region_copy = XNEWVEC (basic_block, n_region);
6722 free_region_copy = true;
6725 /* Record blocks outside the region that are dominated by something
6726 inside. */
6727 auto_vec<basic_block> doms;
6728 if (update_dominance)
6730 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6733 if (entry->dest->count.initialized_p ())
6735 total_count = entry->dest->count;
6736 entry_count = entry->count ();
6737 /* Fix up corner cases, to avoid division by zero or creation of negative
6738 frequencies. */
6739 if (entry_count > total_count)
6740 entry_count = total_count;
6743 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6744 split_edge_bb_loc (entry), update_dominance);
6745 if (total_count.initialized_p () && entry_count.initialized_p ())
6747 scale_bbs_frequencies_profile_count (region, n_region,
6748 total_count - entry_count,
6749 total_count);
6750 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6751 total_count);
6754 if (copying_header)
6756 loop->header = exit->dest;
6757 loop->latch = exit->src;
6760 /* Redirect the entry and add the phi node arguments. */
6761 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6762 gcc_assert (redirected != NULL);
6763 flush_pending_stmts (entry);
6765 /* Concerning updating of dominators: We must recount dominators
6766 for entry block and its copy. Anything that is outside of the
6767 region, but was dominated by something inside needs recounting as
6768 well. */
6769 if (update_dominance)
6771 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6772 doms.safe_push (get_bb_original (entry->dest));
6773 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6776 /* Add the other PHI node arguments. */
6777 add_phi_args_after_copy (region_copy, n_region, NULL);
6779 if (free_region_copy)
6780 free (region_copy);
6782 free_original_copy_tables ();
6783 return true;
6786 /* Checks if BB is part of the region defined by N_REGION BBS. */
6787 static bool
6788 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6790 unsigned int n;
6792 for (n = 0; n < n_region; n++)
6794 if (bb == bbs[n])
6795 return true;
6797 return false;
6800 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6801 are stored to REGION_COPY in the same order in that they appear
6802 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6803 the region, EXIT an exit from it. The condition guarding EXIT
6804 is moved to ENTRY. Returns true if duplication succeeds, false
6805 otherwise.
6807 For example,
6809 some_code;
6810 if (cond)
6812 else
6815 is transformed to
6817 if (cond)
6819 some_code;
6822 else
6824 some_code;
6829 bool
6830 gimple_duplicate_sese_tail (edge entry, edge exit,
6831 basic_block *region, unsigned n_region,
6832 basic_block *region_copy)
6834 unsigned i;
6835 bool free_region_copy = false;
6836 class loop *loop = exit->dest->loop_father;
6837 class loop *orig_loop = entry->dest->loop_father;
6838 basic_block switch_bb, entry_bb, nentry_bb;
6839 profile_count total_count = profile_count::uninitialized (),
6840 exit_count = profile_count::uninitialized ();
6841 edge exits[2], nexits[2], e;
6842 gimple_stmt_iterator gsi;
6843 gimple *cond_stmt;
6844 edge sorig, snew;
6845 basic_block exit_bb;
6846 gphi_iterator psi;
6847 gphi *phi;
6848 tree def;
6849 class loop *target, *aloop, *cloop;
6851 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6852 exits[0] = exit;
6853 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6855 if (!can_copy_bbs_p (region, n_region))
6856 return false;
6858 initialize_original_copy_tables ();
6859 set_loop_copy (orig_loop, loop);
6861 target= loop;
6862 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6864 if (bb_part_of_region_p (aloop->header, region, n_region))
6866 cloop = duplicate_loop (aloop, target);
6867 duplicate_subloops (aloop, cloop);
6871 if (!region_copy)
6873 region_copy = XNEWVEC (basic_block, n_region);
6874 free_region_copy = true;
6877 gcc_assert (!need_ssa_update_p (cfun));
6879 /* Record blocks outside the region that are dominated by something
6880 inside. */
6881 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6882 n_region);
6884 total_count = exit->src->count;
6885 exit_count = exit->count ();
6886 /* Fix up corner cases, to avoid division by zero or creation of negative
6887 frequencies. */
6888 if (exit_count > total_count)
6889 exit_count = total_count;
6891 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6892 split_edge_bb_loc (exit), true);
6893 if (total_count.initialized_p () && exit_count.initialized_p ())
6895 scale_bbs_frequencies_profile_count (region, n_region,
6896 total_count - exit_count,
6897 total_count);
6898 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6899 total_count);
6902 /* Create the switch block, and put the exit condition to it. */
6903 entry_bb = entry->dest;
6904 nentry_bb = get_bb_copy (entry_bb);
6905 if (!last_stmt (entry->src)
6906 || !stmt_ends_bb_p (last_stmt (entry->src)))
6907 switch_bb = entry->src;
6908 else
6909 switch_bb = split_edge (entry);
6910 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6912 gsi = gsi_last_bb (switch_bb);
6913 cond_stmt = last_stmt (exit->src);
6914 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6915 cond_stmt = gimple_copy (cond_stmt);
6917 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6919 sorig = single_succ_edge (switch_bb);
6920 sorig->flags = exits[1]->flags;
6921 sorig->probability = exits[1]->probability;
6922 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6923 snew->probability = exits[0]->probability;
6926 /* Register the new edge from SWITCH_BB in loop exit lists. */
6927 rescan_loop_exit (snew, true, false);
6929 /* Add the PHI node arguments. */
6930 add_phi_args_after_copy (region_copy, n_region, snew);
6932 /* Get rid of now superfluous conditions and associated edges (and phi node
6933 arguments). */
6934 exit_bb = exit->dest;
6936 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6937 PENDING_STMT (e) = NULL;
6939 /* The latch of ORIG_LOOP was copied, and so was the backedge
6940 to the original header. We redirect this backedge to EXIT_BB. */
6941 for (i = 0; i < n_region; i++)
6942 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6944 gcc_assert (single_succ_edge (region_copy[i]));
6945 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6946 PENDING_STMT (e) = NULL;
6947 for (psi = gsi_start_phis (exit_bb);
6948 !gsi_end_p (psi);
6949 gsi_next (&psi))
6951 phi = psi.phi ();
6952 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6953 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6956 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6957 PENDING_STMT (e) = NULL;
6959 /* Anything that is outside of the region, but was dominated by something
6960 inside needs to update dominance info. */
6961 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6963 if (free_region_copy)
6964 free (region_copy);
6966 free_original_copy_tables ();
6967 return true;
6970 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6971 adding blocks when the dominator traversal reaches EXIT. This
6972 function silently assumes that ENTRY strictly dominates EXIT. */
6974 void
6975 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6976 vec<basic_block> *bbs_p)
6978 basic_block son;
6980 for (son = first_dom_son (CDI_DOMINATORS, entry);
6981 son;
6982 son = next_dom_son (CDI_DOMINATORS, son))
6984 bbs_p->safe_push (son);
6985 if (son != exit)
6986 gather_blocks_in_sese_region (son, exit, bbs_p);
6990 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6991 The duplicates are recorded in VARS_MAP. */
6993 static void
6994 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6995 tree to_context)
6997 tree t = *tp, new_t;
6998 struct function *f = DECL_STRUCT_FUNCTION (to_context);
7000 if (DECL_CONTEXT (t) == to_context)
7001 return;
7003 bool existed;
7004 tree &loc = vars_map->get_or_insert (t, &existed);
7006 if (!existed)
7008 if (SSA_VAR_P (t))
7010 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
7011 add_local_decl (f, new_t);
7013 else
7015 gcc_assert (TREE_CODE (t) == CONST_DECL);
7016 new_t = copy_node (t);
7018 DECL_CONTEXT (new_t) = to_context;
7020 loc = new_t;
7022 else
7023 new_t = loc;
7025 *tp = new_t;
7029 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7030 VARS_MAP maps old ssa names and var_decls to the new ones. */
7032 static tree
7033 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
7034 tree to_context)
7036 tree new_name;
7038 gcc_assert (!virtual_operand_p (name));
7040 tree *loc = vars_map->get (name);
7042 if (!loc)
7044 tree decl = SSA_NAME_VAR (name);
7045 if (decl)
7047 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
7048 replace_by_duplicate_decl (&decl, vars_map, to_context);
7049 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7050 decl, SSA_NAME_DEF_STMT (name));
7052 else
7053 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7054 name, SSA_NAME_DEF_STMT (name));
7056 /* Now that we've used the def stmt to define new_name, make sure it
7057 doesn't define name anymore. */
7058 SSA_NAME_DEF_STMT (name) = NULL;
7060 vars_map->put (name, new_name);
7062 else
7063 new_name = *loc;
7065 return new_name;
7068 struct move_stmt_d
7070 tree orig_block;
7071 tree new_block;
7072 tree from_context;
7073 tree to_context;
7074 hash_map<tree, tree> *vars_map;
7075 htab_t new_label_map;
7076 hash_map<void *, void *> *eh_map;
7077 bool remap_decls_p;
7080 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7081 contained in *TP if it has been ORIG_BLOCK previously and change the
7082 DECL_CONTEXT of every local variable referenced in *TP. */
7084 static tree
7085 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
7087 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7088 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7089 tree t = *tp;
7091 if (EXPR_P (t))
7093 tree block = TREE_BLOCK (t);
7094 if (block == NULL_TREE)
7096 else if (block == p->orig_block
7097 || p->orig_block == NULL_TREE)
7099 /* tree_node_can_be_shared says we can share invariant
7100 addresses but unshare_expr copies them anyways. Make sure
7101 to unshare before adjusting the block in place - we do not
7102 always see a copy here. */
7103 if (TREE_CODE (t) == ADDR_EXPR
7104 && is_gimple_min_invariant (t))
7105 *tp = t = unshare_expr (t);
7106 TREE_SET_BLOCK (t, p->new_block);
7108 else if (flag_checking)
7110 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7111 block = BLOCK_SUPERCONTEXT (block);
7112 gcc_assert (block == p->orig_block);
7115 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7117 if (TREE_CODE (t) == SSA_NAME)
7118 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7119 else if (TREE_CODE (t) == PARM_DECL
7120 && gimple_in_ssa_p (cfun))
7121 *tp = *(p->vars_map->get (t));
7122 else if (TREE_CODE (t) == LABEL_DECL)
7124 if (p->new_label_map)
7126 struct tree_map in, *out;
7127 in.base.from = t;
7128 out = (struct tree_map *)
7129 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7130 if (out)
7131 *tp = t = out->to;
7134 /* For FORCED_LABELs we can end up with references from other
7135 functions if some SESE regions are outlined. It is UB to
7136 jump in between them, but they could be used just for printing
7137 addresses etc. In that case, DECL_CONTEXT on the label should
7138 be the function containing the glabel stmt with that LABEL_DECL,
7139 rather than whatever function a reference to the label was seen
7140 last time. */
7141 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7142 DECL_CONTEXT (t) = p->to_context;
7144 else if (p->remap_decls_p)
7146 /* Replace T with its duplicate. T should no longer appear in the
7147 parent function, so this looks wasteful; however, it may appear
7148 in referenced_vars, and more importantly, as virtual operands of
7149 statements, and in alias lists of other variables. It would be
7150 quite difficult to expunge it from all those places. ??? It might
7151 suffice to do this for addressable variables. */
7152 if ((VAR_P (t) && !is_global_var (t))
7153 || TREE_CODE (t) == CONST_DECL)
7154 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7156 *walk_subtrees = 0;
7158 else if (TYPE_P (t))
7159 *walk_subtrees = 0;
7161 return NULL_TREE;
7164 /* Helper for move_stmt_r. Given an EH region number for the source
7165 function, map that to the duplicate EH regio number in the dest. */
7167 static int
7168 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7170 eh_region old_r, new_r;
7172 old_r = get_eh_region_from_number (old_nr);
7173 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7175 return new_r->index;
7178 /* Similar, but operate on INTEGER_CSTs. */
7180 static tree
7181 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7183 int old_nr, new_nr;
7185 old_nr = tree_to_shwi (old_t_nr);
7186 new_nr = move_stmt_eh_region_nr (old_nr, p);
7188 return build_int_cst (integer_type_node, new_nr);
7191 /* Like move_stmt_op, but for gimple statements.
7193 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7194 contained in the current statement in *GSI_P and change the
7195 DECL_CONTEXT of every local variable referenced in the current
7196 statement. */
7198 static tree
7199 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7200 struct walk_stmt_info *wi)
7202 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7203 gimple *stmt = gsi_stmt (*gsi_p);
7204 tree block = gimple_block (stmt);
7206 if (block == p->orig_block
7207 || (p->orig_block == NULL_TREE
7208 && block != NULL_TREE))
7209 gimple_set_block (stmt, p->new_block);
7211 switch (gimple_code (stmt))
7213 case GIMPLE_CALL:
7214 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7216 tree r, fndecl = gimple_call_fndecl (stmt);
7217 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7218 switch (DECL_FUNCTION_CODE (fndecl))
7220 case BUILT_IN_EH_COPY_VALUES:
7221 r = gimple_call_arg (stmt, 1);
7222 r = move_stmt_eh_region_tree_nr (r, p);
7223 gimple_call_set_arg (stmt, 1, r);
7224 /* FALLTHRU */
7226 case BUILT_IN_EH_POINTER:
7227 case BUILT_IN_EH_FILTER:
7228 r = gimple_call_arg (stmt, 0);
7229 r = move_stmt_eh_region_tree_nr (r, p);
7230 gimple_call_set_arg (stmt, 0, r);
7231 break;
7233 default:
7234 break;
7237 break;
7239 case GIMPLE_RESX:
7241 gresx *resx_stmt = as_a <gresx *> (stmt);
7242 int r = gimple_resx_region (resx_stmt);
7243 r = move_stmt_eh_region_nr (r, p);
7244 gimple_resx_set_region (resx_stmt, r);
7246 break;
7248 case GIMPLE_EH_DISPATCH:
7250 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7251 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7252 r = move_stmt_eh_region_nr (r, p);
7253 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7255 break;
7257 case GIMPLE_OMP_RETURN:
7258 case GIMPLE_OMP_CONTINUE:
7259 break;
7261 case GIMPLE_LABEL:
7263 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7264 so that such labels can be referenced from other regions.
7265 Make sure to update it when seeing a GIMPLE_LABEL though,
7266 that is the owner of the label. */
7267 walk_gimple_op (stmt, move_stmt_op, wi);
7268 *handled_ops_p = true;
7269 tree label = gimple_label_label (as_a <glabel *> (stmt));
7270 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7271 DECL_CONTEXT (label) = p->to_context;
7273 break;
7275 default:
7276 if (is_gimple_omp (stmt))
7278 /* Do not remap variables inside OMP directives. Variables
7279 referenced in clauses and directive header belong to the
7280 parent function and should not be moved into the child
7281 function. */
7282 bool save_remap_decls_p = p->remap_decls_p;
7283 p->remap_decls_p = false;
7284 *handled_ops_p = true;
7286 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7287 move_stmt_op, wi);
7289 p->remap_decls_p = save_remap_decls_p;
7291 break;
7294 return NULL_TREE;
7297 /* Move basic block BB from function CFUN to function DEST_FN. The
7298 block is moved out of the original linked list and placed after
7299 block AFTER in the new list. Also, the block is removed from the
7300 original array of blocks and placed in DEST_FN's array of blocks.
7301 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7302 updated to reflect the moved edges.
7304 The local variables are remapped to new instances, VARS_MAP is used
7305 to record the mapping. */
7307 static void
7308 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7309 basic_block after, bool update_edge_count_p,
7310 struct move_stmt_d *d)
7312 struct control_flow_graph *cfg;
7313 edge_iterator ei;
7314 edge e;
7315 gimple_stmt_iterator si;
7316 unsigned old_len;
7318 /* Remove BB from dominance structures. */
7319 delete_from_dominance_info (CDI_DOMINATORS, bb);
7321 /* Move BB from its current loop to the copy in the new function. */
7322 if (current_loops)
7324 class loop *new_loop = (class loop *)bb->loop_father->aux;
7325 if (new_loop)
7326 bb->loop_father = new_loop;
7329 /* Link BB to the new linked list. */
7330 move_block_after (bb, after);
7332 /* Update the edge count in the corresponding flowgraphs. */
7333 if (update_edge_count_p)
7334 FOR_EACH_EDGE (e, ei, bb->succs)
7336 cfun->cfg->x_n_edges--;
7337 dest_cfun->cfg->x_n_edges++;
7340 /* Remove BB from the original basic block array. */
7341 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7342 cfun->cfg->x_n_basic_blocks--;
7344 /* Grow DEST_CFUN's basic block array if needed. */
7345 cfg = dest_cfun->cfg;
7346 cfg->x_n_basic_blocks++;
7347 if (bb->index >= cfg->x_last_basic_block)
7348 cfg->x_last_basic_block = bb->index + 1;
7350 old_len = vec_safe_length (cfg->x_basic_block_info);
7351 if ((unsigned) cfg->x_last_basic_block >= old_len)
7352 vec_safe_grow_cleared (cfg->x_basic_block_info,
7353 cfg->x_last_basic_block + 1);
7355 (*cfg->x_basic_block_info)[bb->index] = bb;
7357 /* Remap the variables in phi nodes. */
7358 for (gphi_iterator psi = gsi_start_phis (bb);
7359 !gsi_end_p (psi); )
7361 gphi *phi = psi.phi ();
7362 use_operand_p use;
7363 tree op = PHI_RESULT (phi);
7364 ssa_op_iter oi;
7365 unsigned i;
7367 if (virtual_operand_p (op))
7369 /* Remove the phi nodes for virtual operands (alias analysis will be
7370 run for the new function, anyway). But replace all uses that
7371 might be outside of the region we move. */
7372 use_operand_p use_p;
7373 imm_use_iterator iter;
7374 gimple *use_stmt;
7375 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7376 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7377 SET_USE (use_p, SSA_NAME_VAR (op));
7378 remove_phi_node (&psi, true);
7379 continue;
7382 SET_PHI_RESULT (phi,
7383 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7384 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7386 op = USE_FROM_PTR (use);
7387 if (TREE_CODE (op) == SSA_NAME)
7388 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7391 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7393 location_t locus = gimple_phi_arg_location (phi, i);
7394 tree block = LOCATION_BLOCK (locus);
7396 if (locus == UNKNOWN_LOCATION)
7397 continue;
7398 if (d->orig_block == NULL_TREE || block == d->orig_block)
7400 locus = set_block (locus, d->new_block);
7401 gimple_phi_arg_set_location (phi, i, locus);
7405 gsi_next (&psi);
7408 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7410 gimple *stmt = gsi_stmt (si);
7411 struct walk_stmt_info wi;
7413 memset (&wi, 0, sizeof (wi));
7414 wi.info = d;
7415 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7417 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7419 tree label = gimple_label_label (label_stmt);
7420 int uid = LABEL_DECL_UID (label);
7422 gcc_assert (uid > -1);
7424 old_len = vec_safe_length (cfg->x_label_to_block_map);
7425 if (old_len <= (unsigned) uid)
7426 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7428 (*cfg->x_label_to_block_map)[uid] = bb;
7429 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7431 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7433 if (uid >= dest_cfun->cfg->last_label_uid)
7434 dest_cfun->cfg->last_label_uid = uid + 1;
7437 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7438 remove_stmt_from_eh_lp_fn (cfun, stmt);
7440 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7441 gimple_remove_stmt_histograms (cfun, stmt);
7443 /* We cannot leave any operands allocated from the operand caches of
7444 the current function. */
7445 free_stmt_operands (cfun, stmt);
7446 push_cfun (dest_cfun);
7447 update_stmt (stmt);
7448 if (is_gimple_call (stmt))
7449 notice_special_calls (as_a <gcall *> (stmt));
7450 pop_cfun ();
7453 FOR_EACH_EDGE (e, ei, bb->succs)
7454 if (e->goto_locus != UNKNOWN_LOCATION)
7456 tree block = LOCATION_BLOCK (e->goto_locus);
7457 if (d->orig_block == NULL_TREE
7458 || block == d->orig_block)
7459 e->goto_locus = set_block (e->goto_locus, d->new_block);
7463 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7464 the outermost EH region. Use REGION as the incoming base EH region.
7465 If there is no single outermost region, return NULL and set *ALL to
7466 true. */
7468 static eh_region
7469 find_outermost_region_in_block (struct function *src_cfun,
7470 basic_block bb, eh_region region,
7471 bool *all)
7473 gimple_stmt_iterator si;
7475 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7477 gimple *stmt = gsi_stmt (si);
7478 eh_region stmt_region;
7479 int lp_nr;
7481 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7482 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7483 if (stmt_region)
7485 if (region == NULL)
7486 region = stmt_region;
7487 else if (stmt_region != region)
7489 region = eh_region_outermost (src_cfun, stmt_region, region);
7490 if (region == NULL)
7492 *all = true;
7493 return NULL;
7499 return region;
7502 static tree
7503 new_label_mapper (tree decl, void *data)
7505 htab_t hash = (htab_t) data;
7506 struct tree_map *m;
7507 void **slot;
7509 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7511 m = XNEW (struct tree_map);
7512 m->hash = DECL_UID (decl);
7513 m->base.from = decl;
7514 m->to = create_artificial_label (UNKNOWN_LOCATION);
7515 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7516 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7517 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7519 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7520 gcc_assert (*slot == NULL);
7522 *slot = m;
7524 return m->to;
7527 /* Tree walker to replace the decls used inside value expressions by
7528 duplicates. */
7530 static tree
7531 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7533 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7535 switch (TREE_CODE (*tp))
7537 case VAR_DECL:
7538 case PARM_DECL:
7539 case RESULT_DECL:
7540 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7541 break;
7542 default:
7543 break;
7546 if (IS_TYPE_OR_DECL_P (*tp))
7547 *walk_subtrees = false;
7549 return NULL;
7552 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7553 subblocks. */
7555 static void
7556 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7557 tree to_context)
7559 tree *tp, t;
7561 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7563 t = *tp;
7564 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7565 continue;
7566 replace_by_duplicate_decl (&t, vars_map, to_context);
7567 if (t != *tp)
7569 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7571 tree x = DECL_VALUE_EXPR (*tp);
7572 struct replace_decls_d rd = { vars_map, to_context };
7573 unshare_expr (x);
7574 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7575 SET_DECL_VALUE_EXPR (t, x);
7576 DECL_HAS_VALUE_EXPR_P (t) = 1;
7578 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7579 *tp = t;
7583 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7584 replace_block_vars_by_duplicates (block, vars_map, to_context);
7587 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7588 from FN1 to FN2. */
7590 static void
7591 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7592 class loop *loop)
7594 /* Discard it from the old loop array. */
7595 (*get_loops (fn1))[loop->num] = NULL;
7597 /* Place it in the new loop array, assigning it a new number. */
7598 loop->num = number_of_loops (fn2);
7599 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7601 /* Recurse to children. */
7602 for (loop = loop->inner; loop; loop = loop->next)
7603 fixup_loop_arrays_after_move (fn1, fn2, loop);
7606 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7607 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7609 DEBUG_FUNCTION void
7610 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7612 basic_block bb;
7613 edge_iterator ei;
7614 edge e;
7615 bitmap bbs = BITMAP_ALLOC (NULL);
7616 int i;
7618 gcc_assert (entry != NULL);
7619 gcc_assert (entry != exit);
7620 gcc_assert (bbs_p != NULL);
7622 gcc_assert (bbs_p->length () > 0);
7624 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7625 bitmap_set_bit (bbs, bb->index);
7627 gcc_assert (bitmap_bit_p (bbs, entry->index));
7628 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7630 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7632 if (bb == entry)
7634 gcc_assert (single_pred_p (entry));
7635 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7637 else
7638 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7640 e = ei_edge (ei);
7641 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7644 if (bb == exit)
7646 gcc_assert (single_succ_p (exit));
7647 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7649 else
7650 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7652 e = ei_edge (ei);
7653 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7657 BITMAP_FREE (bbs);
7660 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7662 bool
7663 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7665 bitmap release_names = (bitmap)data;
7667 if (TREE_CODE (from) != SSA_NAME)
7668 return true;
7670 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7671 return true;
7674 /* Return LOOP_DIST_ALIAS call if present in BB. */
7676 static gimple *
7677 find_loop_dist_alias (basic_block bb)
7679 gimple *g = last_stmt (bb);
7680 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7681 return NULL;
7683 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7684 gsi_prev (&gsi);
7685 if (gsi_end_p (gsi))
7686 return NULL;
7688 g = gsi_stmt (gsi);
7689 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7690 return g;
7691 return NULL;
7694 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7695 to VALUE and update any immediate uses of it's LHS. */
7697 void
7698 fold_loop_internal_call (gimple *g, tree value)
7700 tree lhs = gimple_call_lhs (g);
7701 use_operand_p use_p;
7702 imm_use_iterator iter;
7703 gimple *use_stmt;
7704 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7706 replace_call_with_value (&gsi, value);
7707 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7709 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7710 SET_USE (use_p, value);
7711 update_stmt (use_stmt);
7715 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7716 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7717 single basic block in the original CFG and the new basic block is
7718 returned. DEST_CFUN must not have a CFG yet.
7720 Note that the region need not be a pure SESE region. Blocks inside
7721 the region may contain calls to abort/exit. The only restriction
7722 is that ENTRY_BB should be the only entry point and it must
7723 dominate EXIT_BB.
7725 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7726 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7727 to the new function.
7729 All local variables referenced in the region are assumed to be in
7730 the corresponding BLOCK_VARS and unexpanded variable lists
7731 associated with DEST_CFUN.
7733 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7734 reimplement move_sese_region_to_fn by duplicating the region rather than
7735 moving it. */
7737 basic_block
7738 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7739 basic_block exit_bb, tree orig_block)
7741 vec<basic_block> bbs;
7742 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7743 basic_block after, bb, *entry_pred, *exit_succ, abb;
7744 struct function *saved_cfun = cfun;
7745 int *entry_flag, *exit_flag;
7746 profile_probability *entry_prob, *exit_prob;
7747 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7748 edge e;
7749 edge_iterator ei;
7750 htab_t new_label_map;
7751 hash_map<void *, void *> *eh_map;
7752 class loop *loop = entry_bb->loop_father;
7753 class loop *loop0 = get_loop (saved_cfun, 0);
7754 struct move_stmt_d d;
7756 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7757 region. */
7758 gcc_assert (entry_bb != exit_bb
7759 && (!exit_bb
7760 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7762 /* Collect all the blocks in the region. Manually add ENTRY_BB
7763 because it won't be added by dfs_enumerate_from. */
7764 bbs.create (0);
7765 bbs.safe_push (entry_bb);
7766 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7768 if (flag_checking)
7769 verify_sese (entry_bb, exit_bb, &bbs);
7771 /* The blocks that used to be dominated by something in BBS will now be
7772 dominated by the new block. */
7773 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7774 bbs.address (),
7775 bbs.length ());
7777 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7778 the predecessor edges to ENTRY_BB and the successor edges to
7779 EXIT_BB so that we can re-attach them to the new basic block that
7780 will replace the region. */
7781 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7782 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7783 entry_flag = XNEWVEC (int, num_entry_edges);
7784 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7785 i = 0;
7786 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7788 entry_prob[i] = e->probability;
7789 entry_flag[i] = e->flags;
7790 entry_pred[i++] = e->src;
7791 remove_edge (e);
7794 if (exit_bb)
7796 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7797 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7798 exit_flag = XNEWVEC (int, num_exit_edges);
7799 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7800 i = 0;
7801 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7803 exit_prob[i] = e->probability;
7804 exit_flag[i] = e->flags;
7805 exit_succ[i++] = e->dest;
7806 remove_edge (e);
7809 else
7811 num_exit_edges = 0;
7812 exit_succ = NULL;
7813 exit_flag = NULL;
7814 exit_prob = NULL;
7817 /* Switch context to the child function to initialize DEST_FN's CFG. */
7818 gcc_assert (dest_cfun->cfg == NULL);
7819 push_cfun (dest_cfun);
7821 init_empty_tree_cfg ();
7823 /* Initialize EH information for the new function. */
7824 eh_map = NULL;
7825 new_label_map = NULL;
7826 if (saved_cfun->eh)
7828 eh_region region = NULL;
7829 bool all = false;
7831 FOR_EACH_VEC_ELT (bbs, i, bb)
7833 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7834 if (all)
7835 break;
7838 init_eh_for_function ();
7839 if (region != NULL || all)
7841 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7842 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7843 new_label_mapper, new_label_map);
7847 /* Initialize an empty loop tree. */
7848 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7849 init_loops_structure (dest_cfun, loops, 1);
7850 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7851 set_loops_for_fn (dest_cfun, loops);
7853 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7855 /* Move the outlined loop tree part. */
7856 num_nodes = bbs.length ();
7857 FOR_EACH_VEC_ELT (bbs, i, bb)
7859 if (bb->loop_father->header == bb)
7861 class loop *this_loop = bb->loop_father;
7862 /* Avoid the need to remap SSA names used in nb_iterations. */
7863 free_numbers_of_iterations_estimates (this_loop);
7864 class loop *outer = loop_outer (this_loop);
7865 if (outer == loop
7866 /* If the SESE region contains some bbs ending with
7867 a noreturn call, those are considered to belong
7868 to the outermost loop in saved_cfun, rather than
7869 the entry_bb's loop_father. */
7870 || outer == loop0)
7872 if (outer != loop)
7873 num_nodes -= this_loop->num_nodes;
7874 flow_loop_tree_node_remove (bb->loop_father);
7875 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7876 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7879 else if (bb->loop_father == loop0 && loop0 != loop)
7880 num_nodes--;
7882 /* Remove loop exits from the outlined region. */
7883 if (loops_for_fn (saved_cfun)->exits)
7884 FOR_EACH_EDGE (e, ei, bb->succs)
7886 struct loops *l = loops_for_fn (saved_cfun);
7887 loop_exit **slot
7888 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7889 NO_INSERT);
7890 if (slot)
7891 l->exits->clear_slot (slot);
7895 /* Adjust the number of blocks in the tree root of the outlined part. */
7896 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7898 /* Setup a mapping to be used by move_block_to_fn. */
7899 loop->aux = current_loops->tree_root;
7900 loop0->aux = current_loops->tree_root;
7902 /* Fix up orig_loop_num. If the block referenced in it has been moved
7903 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7904 signed char *moved_orig_loop_num = NULL;
7905 for (auto dloop : loops_list (dest_cfun, 0))
7906 if (dloop->orig_loop_num)
7908 if (moved_orig_loop_num == NULL)
7909 moved_orig_loop_num
7910 = XCNEWVEC (signed char, vec_safe_length (larray));
7911 if ((*larray)[dloop->orig_loop_num] != NULL
7912 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7914 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7915 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7916 moved_orig_loop_num[dloop->orig_loop_num]++;
7917 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7919 else
7921 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7922 dloop->orig_loop_num = 0;
7925 pop_cfun ();
7927 if (moved_orig_loop_num)
7929 FOR_EACH_VEC_ELT (bbs, i, bb)
7931 gimple *g = find_loop_dist_alias (bb);
7932 if (g == NULL)
7933 continue;
7935 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7936 gcc_assert (orig_loop_num
7937 && (unsigned) orig_loop_num < vec_safe_length (larray));
7938 if (moved_orig_loop_num[orig_loop_num] == 2)
7940 /* If we have moved both loops with this orig_loop_num into
7941 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7942 too, update the first argument. */
7943 gcc_assert ((*larray)[orig_loop_num] != NULL
7944 && (get_loop (saved_cfun, orig_loop_num) == NULL));
7945 tree t = build_int_cst (integer_type_node,
7946 (*larray)[orig_loop_num]->num);
7947 gimple_call_set_arg (g, 0, t);
7948 update_stmt (g);
7949 /* Make sure the following loop will not update it. */
7950 moved_orig_loop_num[orig_loop_num] = 0;
7952 else
7953 /* Otherwise at least one of the loops stayed in saved_cfun.
7954 Remove the LOOP_DIST_ALIAS call. */
7955 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7957 FOR_EACH_BB_FN (bb, saved_cfun)
7959 gimple *g = find_loop_dist_alias (bb);
7960 if (g == NULL)
7961 continue;
7962 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7963 gcc_assert (orig_loop_num
7964 && (unsigned) orig_loop_num < vec_safe_length (larray));
7965 if (moved_orig_loop_num[orig_loop_num])
7966 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7967 of the corresponding loops was moved, remove it. */
7968 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7970 XDELETEVEC (moved_orig_loop_num);
7972 ggc_free (larray);
7974 /* Move blocks from BBS into DEST_CFUN. */
7975 gcc_assert (bbs.length () >= 2);
7976 after = dest_cfun->cfg->x_entry_block_ptr;
7977 hash_map<tree, tree> vars_map;
7979 memset (&d, 0, sizeof (d));
7980 d.orig_block = orig_block;
7981 d.new_block = DECL_INITIAL (dest_cfun->decl);
7982 d.from_context = cfun->decl;
7983 d.to_context = dest_cfun->decl;
7984 d.vars_map = &vars_map;
7985 d.new_label_map = new_label_map;
7986 d.eh_map = eh_map;
7987 d.remap_decls_p = true;
7989 if (gimple_in_ssa_p (cfun))
7990 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7992 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7993 set_ssa_default_def (dest_cfun, arg, narg);
7994 vars_map.put (arg, narg);
7997 FOR_EACH_VEC_ELT (bbs, i, bb)
7999 /* No need to update edge counts on the last block. It has
8000 already been updated earlier when we detached the region from
8001 the original CFG. */
8002 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
8003 after = bb;
8006 /* Adjust the maximum clique used. */
8007 dest_cfun->last_clique = saved_cfun->last_clique;
8009 loop->aux = NULL;
8010 loop0->aux = NULL;
8011 /* Loop sizes are no longer correct, fix them up. */
8012 loop->num_nodes -= num_nodes;
8013 for (class loop *outer = loop_outer (loop);
8014 outer; outer = loop_outer (outer))
8015 outer->num_nodes -= num_nodes;
8016 loop0->num_nodes -= bbs.length () - num_nodes;
8018 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
8020 class loop *aloop;
8021 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
8022 if (aloop != NULL)
8024 if (aloop->simduid)
8026 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
8027 d.to_context);
8028 dest_cfun->has_simduid_loops = true;
8030 if (aloop->force_vectorize)
8031 dest_cfun->has_force_vectorize_loops = true;
8035 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8036 if (orig_block)
8038 tree block;
8039 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8040 == NULL_TREE);
8041 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8042 = BLOCK_SUBBLOCKS (orig_block);
8043 for (block = BLOCK_SUBBLOCKS (orig_block);
8044 block; block = BLOCK_CHAIN (block))
8045 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
8046 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
8049 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
8050 &vars_map, dest_cfun->decl);
8052 if (new_label_map)
8053 htab_delete (new_label_map);
8054 if (eh_map)
8055 delete eh_map;
8057 /* We need to release ssa-names in a defined order, so first find them,
8058 and then iterate in ascending version order. */
8059 bitmap release_names = BITMAP_ALLOC (NULL);
8060 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
8061 bitmap_iterator bi;
8062 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
8063 release_ssa_name (ssa_name (i));
8064 BITMAP_FREE (release_names);
8066 /* Rewire the entry and exit blocks. The successor to the entry
8067 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8068 the child function. Similarly, the predecessor of DEST_FN's
8069 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8070 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8071 various CFG manipulation function get to the right CFG.
8073 FIXME, this is silly. The CFG ought to become a parameter to
8074 these helpers. */
8075 push_cfun (dest_cfun);
8076 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
8077 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
8078 if (exit_bb)
8080 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
8081 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
8083 else
8084 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
8085 pop_cfun ();
8087 /* Back in the original function, the SESE region has disappeared,
8088 create a new basic block in its place. */
8089 bb = create_empty_bb (entry_pred[0]);
8090 if (current_loops)
8091 add_bb_to_loop (bb, loop);
8092 for (i = 0; i < num_entry_edges; i++)
8094 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8095 e->probability = entry_prob[i];
8098 for (i = 0; i < num_exit_edges; i++)
8100 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8101 e->probability = exit_prob[i];
8104 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8105 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8106 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8108 if (exit_bb)
8110 free (exit_prob);
8111 free (exit_flag);
8112 free (exit_succ);
8114 free (entry_prob);
8115 free (entry_flag);
8116 free (entry_pred);
8117 bbs.release ();
8119 return bb;
8122 /* Dump default def DEF to file FILE using FLAGS and indentation
8123 SPC. */
8125 static void
8126 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8128 for (int i = 0; i < spc; ++i)
8129 fprintf (file, " ");
8130 dump_ssaname_info_to_file (file, def, spc);
8132 print_generic_expr (file, TREE_TYPE (def), flags);
8133 fprintf (file, " ");
8134 print_generic_expr (file, def, flags);
8135 fprintf (file, " = ");
8136 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8137 fprintf (file, ";\n");
8140 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8142 static void
8143 print_no_sanitize_attr_value (FILE *file, tree value)
8145 unsigned int flags = tree_to_uhwi (value);
8146 bool first = true;
8147 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8149 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8151 if (!first)
8152 fprintf (file, " | ");
8153 fprintf (file, "%s", sanitizer_opts[i].name);
8154 first = false;
8159 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8162 void
8163 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8165 tree arg, var, old_current_fndecl = current_function_decl;
8166 struct function *dsf;
8167 bool ignore_topmost_bind = false, any_var = false;
8168 basic_block bb;
8169 tree chain;
8170 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8171 && decl_is_tm_clone (fndecl));
8172 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8174 tree fntype = TREE_TYPE (fndecl);
8175 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8177 for (int i = 0; i != 2; ++i)
8179 if (!attrs[i])
8180 continue;
8182 fprintf (file, "__attribute__((");
8184 bool first = true;
8185 tree chain;
8186 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8188 if (!first)
8189 fprintf (file, ", ");
8191 tree name = get_attribute_name (chain);
8192 print_generic_expr (file, name, dump_flags);
8193 if (TREE_VALUE (chain) != NULL_TREE)
8195 fprintf (file, " (");
8197 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8198 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8199 else
8200 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8201 fprintf (file, ")");
8205 fprintf (file, "))\n");
8208 current_function_decl = fndecl;
8209 if (flags & TDF_GIMPLE)
8211 static bool hotness_bb_param_printed = false;
8212 if (profile_info != NULL
8213 && !hotness_bb_param_printed)
8215 hotness_bb_param_printed = true;
8216 fprintf (file,
8217 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8218 " */\n", get_hot_bb_threshold ());
8221 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8222 dump_flags | TDF_SLIM);
8223 fprintf (file, " __GIMPLE (%s",
8224 (fun->curr_properties & PROP_ssa) ? "ssa"
8225 : (fun->curr_properties & PROP_cfg) ? "cfg"
8226 : "");
8228 if (fun && fun->cfg)
8230 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8231 if (bb->count.initialized_p ())
8232 fprintf (file, ",%s(%" PRIu64 ")",
8233 profile_quality_as_string (bb->count.quality ()),
8234 bb->count.value ());
8235 if (dump_flags & TDF_UID)
8236 fprintf (file, ")\n%sD_%u (", function_name (fun),
8237 DECL_UID (fndecl));
8238 else
8239 fprintf (file, ")\n%s (", function_name (fun));
8242 else
8244 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8245 if (dump_flags & TDF_UID)
8246 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8247 tmclone ? "[tm-clone] " : "");
8248 else
8249 fprintf (file, " %s %s(", function_name (fun),
8250 tmclone ? "[tm-clone] " : "");
8253 arg = DECL_ARGUMENTS (fndecl);
8254 while (arg)
8256 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8257 fprintf (file, " ");
8258 print_generic_expr (file, arg, dump_flags);
8259 if (DECL_CHAIN (arg))
8260 fprintf (file, ", ");
8261 arg = DECL_CHAIN (arg);
8263 fprintf (file, ")\n");
8265 dsf = DECL_STRUCT_FUNCTION (fndecl);
8266 if (dsf && (flags & TDF_EH))
8267 dump_eh_tree (file, dsf);
8269 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8271 dump_node (fndecl, TDF_SLIM | flags, file);
8272 current_function_decl = old_current_fndecl;
8273 return;
8276 /* When GIMPLE is lowered, the variables are no longer available in
8277 BIND_EXPRs, so display them separately. */
8278 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8280 unsigned ix;
8281 ignore_topmost_bind = true;
8283 fprintf (file, "{\n");
8284 if (gimple_in_ssa_p (fun)
8285 && (flags & TDF_ALIAS))
8287 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8288 arg = DECL_CHAIN (arg))
8290 tree def = ssa_default_def (fun, arg);
8291 if (def)
8292 dump_default_def (file, def, 2, flags);
8295 tree res = DECL_RESULT (fun->decl);
8296 if (res != NULL_TREE
8297 && DECL_BY_REFERENCE (res))
8299 tree def = ssa_default_def (fun, res);
8300 if (def)
8301 dump_default_def (file, def, 2, flags);
8304 tree static_chain = fun->static_chain_decl;
8305 if (static_chain != NULL_TREE)
8307 tree def = ssa_default_def (fun, static_chain);
8308 if (def)
8309 dump_default_def (file, def, 2, flags);
8313 if (!vec_safe_is_empty (fun->local_decls))
8314 FOR_EACH_LOCAL_DECL (fun, ix, var)
8316 print_generic_decl (file, var, flags);
8317 fprintf (file, "\n");
8319 any_var = true;
8322 tree name;
8324 if (gimple_in_ssa_p (fun))
8325 FOR_EACH_SSA_NAME (ix, name, fun)
8327 if (!SSA_NAME_VAR (name)
8328 /* SSA name with decls without a name still get
8329 dumped as _N, list those explicitely as well even
8330 though we've dumped the decl declaration as D.xxx
8331 above. */
8332 || !SSA_NAME_IDENTIFIER (name))
8334 fprintf (file, " ");
8335 print_generic_expr (file, TREE_TYPE (name), flags);
8336 fprintf (file, " ");
8337 print_generic_expr (file, name, flags);
8338 fprintf (file, ";\n");
8340 any_var = true;
8345 if (fun && fun->decl == fndecl
8346 && fun->cfg
8347 && basic_block_info_for_fn (fun))
8349 /* If the CFG has been built, emit a CFG-based dump. */
8350 if (!ignore_topmost_bind)
8351 fprintf (file, "{\n");
8353 if (any_var && n_basic_blocks_for_fn (fun))
8354 fprintf (file, "\n");
8356 FOR_EACH_BB_FN (bb, fun)
8357 dump_bb (file, bb, 2, flags);
8359 fprintf (file, "}\n");
8361 else if (fun && (fun->curr_properties & PROP_gimple_any))
8363 /* The function is now in GIMPLE form but the CFG has not been
8364 built yet. Emit the single sequence of GIMPLE statements
8365 that make up its body. */
8366 gimple_seq body = gimple_body (fndecl);
8368 if (gimple_seq_first_stmt (body)
8369 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8370 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8371 print_gimple_seq (file, body, 0, flags);
8372 else
8374 if (!ignore_topmost_bind)
8375 fprintf (file, "{\n");
8377 if (any_var)
8378 fprintf (file, "\n");
8380 print_gimple_seq (file, body, 2, flags);
8381 fprintf (file, "}\n");
8384 else
8386 int indent;
8388 /* Make a tree based dump. */
8389 chain = DECL_SAVED_TREE (fndecl);
8390 if (chain && TREE_CODE (chain) == BIND_EXPR)
8392 if (ignore_topmost_bind)
8394 chain = BIND_EXPR_BODY (chain);
8395 indent = 2;
8397 else
8398 indent = 0;
8400 else
8402 if (!ignore_topmost_bind)
8404 fprintf (file, "{\n");
8405 /* No topmost bind, pretend it's ignored for later. */
8406 ignore_topmost_bind = true;
8408 indent = 2;
8411 if (any_var)
8412 fprintf (file, "\n");
8414 print_generic_stmt_indented (file, chain, flags, indent);
8415 if (ignore_topmost_bind)
8416 fprintf (file, "}\n");
8419 if (flags & TDF_ENUMERATE_LOCALS)
8420 dump_enumerated_decls (file, flags);
8421 fprintf (file, "\n\n");
8423 current_function_decl = old_current_fndecl;
8426 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8428 DEBUG_FUNCTION void
8429 debug_function (tree fn, dump_flags_t flags)
8431 dump_function_to_file (fn, stderr, flags);
8435 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8437 static void
8438 print_pred_bbs (FILE *file, basic_block bb)
8440 edge e;
8441 edge_iterator ei;
8443 FOR_EACH_EDGE (e, ei, bb->preds)
8444 fprintf (file, "bb_%d ", e->src->index);
8448 /* Print on FILE the indexes for the successors of basic_block BB. */
8450 static void
8451 print_succ_bbs (FILE *file, basic_block bb)
8453 edge e;
8454 edge_iterator ei;
8456 FOR_EACH_EDGE (e, ei, bb->succs)
8457 fprintf (file, "bb_%d ", e->dest->index);
8460 /* Print to FILE the basic block BB following the VERBOSITY level. */
8462 void
8463 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8465 char *s_indent = (char *) alloca ((size_t) indent + 1);
8466 memset ((void *) s_indent, ' ', (size_t) indent);
8467 s_indent[indent] = '\0';
8469 /* Print basic_block's header. */
8470 if (verbosity >= 2)
8472 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8473 print_pred_bbs (file, bb);
8474 fprintf (file, "}, succs = {");
8475 print_succ_bbs (file, bb);
8476 fprintf (file, "})\n");
8479 /* Print basic_block's body. */
8480 if (verbosity >= 3)
8482 fprintf (file, "%s {\n", s_indent);
8483 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8484 fprintf (file, "%s }\n", s_indent);
8488 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8490 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8491 VERBOSITY level this outputs the contents of the loop, or just its
8492 structure. */
8494 static void
8495 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8497 char *s_indent;
8498 basic_block bb;
8500 if (loop == NULL)
8501 return;
8503 s_indent = (char *) alloca ((size_t) indent + 1);
8504 memset ((void *) s_indent, ' ', (size_t) indent);
8505 s_indent[indent] = '\0';
8507 /* Print loop's header. */
8508 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8509 if (loop->header)
8510 fprintf (file, "header = %d", loop->header->index);
8511 else
8513 fprintf (file, "deleted)\n");
8514 return;
8516 if (loop->latch)
8517 fprintf (file, ", latch = %d", loop->latch->index);
8518 else
8519 fprintf (file, ", multiple latches");
8520 fprintf (file, ", niter = ");
8521 print_generic_expr (file, loop->nb_iterations);
8523 if (loop->any_upper_bound)
8525 fprintf (file, ", upper_bound = ");
8526 print_decu (loop->nb_iterations_upper_bound, file);
8528 if (loop->any_likely_upper_bound)
8530 fprintf (file, ", likely_upper_bound = ");
8531 print_decu (loop->nb_iterations_likely_upper_bound, file);
8534 if (loop->any_estimate)
8536 fprintf (file, ", estimate = ");
8537 print_decu (loop->nb_iterations_estimate, file);
8539 if (loop->unroll)
8540 fprintf (file, ", unroll = %d", loop->unroll);
8541 fprintf (file, ")\n");
8543 /* Print loop's body. */
8544 if (verbosity >= 1)
8546 fprintf (file, "%s{\n", s_indent);
8547 FOR_EACH_BB_FN (bb, cfun)
8548 if (bb->loop_father == loop)
8549 print_loops_bb (file, bb, indent, verbosity);
8551 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8552 fprintf (file, "%s}\n", s_indent);
8556 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8557 spaces. Following VERBOSITY level this outputs the contents of the
8558 loop, or just its structure. */
8560 static void
8561 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8562 int verbosity)
8564 if (loop == NULL)
8565 return;
8567 print_loop (file, loop, indent, verbosity);
8568 print_loop_and_siblings (file, loop->next, indent, verbosity);
8571 /* Follow a CFG edge from the entry point of the program, and on entry
8572 of a loop, pretty print the loop structure on FILE. */
8574 void
8575 print_loops (FILE *file, int verbosity)
8577 basic_block bb;
8579 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8580 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8581 if (bb && bb->loop_father)
8582 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8585 /* Dump a loop. */
8587 DEBUG_FUNCTION void
8588 debug (class loop &ref)
8590 print_loop (stderr, &ref, 0, /*verbosity*/0);
8593 DEBUG_FUNCTION void
8594 debug (class loop *ptr)
8596 if (ptr)
8597 debug (*ptr);
8598 else
8599 fprintf (stderr, "<nil>\n");
8602 /* Dump a loop verbosely. */
8604 DEBUG_FUNCTION void
8605 debug_verbose (class loop &ref)
8607 print_loop (stderr, &ref, 0, /*verbosity*/3);
8610 DEBUG_FUNCTION void
8611 debug_verbose (class loop *ptr)
8613 if (ptr)
8614 debug (*ptr);
8615 else
8616 fprintf (stderr, "<nil>\n");
8620 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8622 DEBUG_FUNCTION void
8623 debug_loops (int verbosity)
8625 print_loops (stderr, verbosity);
8628 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8630 DEBUG_FUNCTION void
8631 debug_loop (class loop *loop, int verbosity)
8633 print_loop (stderr, loop, 0, verbosity);
8636 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8637 level. */
8639 DEBUG_FUNCTION void
8640 debug_loop_num (unsigned num, int verbosity)
8642 debug_loop (get_loop (cfun, num), verbosity);
8645 /* Return true if BB ends with a call, possibly followed by some
8646 instructions that must stay with the call. Return false,
8647 otherwise. */
8649 static bool
8650 gimple_block_ends_with_call_p (basic_block bb)
8652 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8653 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8657 /* Return true if BB ends with a conditional branch. Return false,
8658 otherwise. */
8660 static bool
8661 gimple_block_ends_with_condjump_p (const_basic_block bb)
8663 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8664 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8668 /* Return true if statement T may terminate execution of BB in ways not
8669 explicitly represtented in the CFG. */
8671 bool
8672 stmt_can_terminate_bb_p (gimple *t)
8674 tree fndecl = NULL_TREE;
8675 int call_flags = 0;
8677 /* Eh exception not handled internally terminates execution of the whole
8678 function. */
8679 if (stmt_can_throw_external (cfun, t))
8680 return true;
8682 /* NORETURN and LONGJMP calls already have an edge to exit.
8683 CONST and PURE calls do not need one.
8684 We don't currently check for CONST and PURE here, although
8685 it would be a good idea, because those attributes are
8686 figured out from the RTL in mark_constant_function, and
8687 the counter incrementation code from -fprofile-arcs
8688 leads to different results from -fbranch-probabilities. */
8689 if (is_gimple_call (t))
8691 fndecl = gimple_call_fndecl (t);
8692 call_flags = gimple_call_flags (t);
8695 if (is_gimple_call (t)
8696 && fndecl
8697 && fndecl_built_in_p (fndecl)
8698 && (call_flags & ECF_NOTHROW)
8699 && !(call_flags & ECF_RETURNS_TWICE)
8700 /* fork() doesn't really return twice, but the effect of
8701 wrapping it in __gcov_fork() which calls __gcov_dump() and
8702 __gcov_reset() and clears the counters before forking has the same
8703 effect as returning twice. Force a fake edge. */
8704 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8705 return false;
8707 if (is_gimple_call (t))
8709 edge_iterator ei;
8710 edge e;
8711 basic_block bb;
8713 if (call_flags & (ECF_PURE | ECF_CONST)
8714 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8715 return false;
8717 /* Function call may do longjmp, terminate program or do other things.
8718 Special case noreturn that have non-abnormal edges out as in this case
8719 the fact is sufficiently represented by lack of edges out of T. */
8720 if (!(call_flags & ECF_NORETURN))
8721 return true;
8723 bb = gimple_bb (t);
8724 FOR_EACH_EDGE (e, ei, bb->succs)
8725 if ((e->flags & EDGE_FAKE) == 0)
8726 return true;
8729 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8730 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8731 return true;
8733 return false;
8737 /* Add fake edges to the function exit for any non constant and non
8738 noreturn calls (or noreturn calls with EH/abnormal edges),
8739 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8740 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8741 that were split.
8743 The goal is to expose cases in which entering a basic block does
8744 not imply that all subsequent instructions must be executed. */
8746 static int
8747 gimple_flow_call_edges_add (sbitmap blocks)
8749 int i;
8750 int blocks_split = 0;
8751 int last_bb = last_basic_block_for_fn (cfun);
8752 bool check_last_block = false;
8754 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8755 return 0;
8757 if (! blocks)
8758 check_last_block = true;
8759 else
8760 check_last_block = bitmap_bit_p (blocks,
8761 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8763 /* In the last basic block, before epilogue generation, there will be
8764 a fallthru edge to EXIT. Special care is required if the last insn
8765 of the last basic block is a call because make_edge folds duplicate
8766 edges, which would result in the fallthru edge also being marked
8767 fake, which would result in the fallthru edge being removed by
8768 remove_fake_edges, which would result in an invalid CFG.
8770 Moreover, we can't elide the outgoing fake edge, since the block
8771 profiler needs to take this into account in order to solve the minimal
8772 spanning tree in the case that the call doesn't return.
8774 Handle this by adding a dummy instruction in a new last basic block. */
8775 if (check_last_block)
8777 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8778 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8779 gimple *t = NULL;
8781 if (!gsi_end_p (gsi))
8782 t = gsi_stmt (gsi);
8784 if (t && stmt_can_terminate_bb_p (t))
8786 edge e;
8788 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8789 if (e)
8791 gsi_insert_on_edge (e, gimple_build_nop ());
8792 gsi_commit_edge_inserts ();
8797 /* Now add fake edges to the function exit for any non constant
8798 calls since there is no way that we can determine if they will
8799 return or not... */
8800 for (i = 0; i < last_bb; i++)
8802 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8803 gimple_stmt_iterator gsi;
8804 gimple *stmt, *last_stmt;
8806 if (!bb)
8807 continue;
8809 if (blocks && !bitmap_bit_p (blocks, i))
8810 continue;
8812 gsi = gsi_last_nondebug_bb (bb);
8813 if (!gsi_end_p (gsi))
8815 last_stmt = gsi_stmt (gsi);
8818 stmt = gsi_stmt (gsi);
8819 if (stmt_can_terminate_bb_p (stmt))
8821 edge e;
8823 /* The handling above of the final block before the
8824 epilogue should be enough to verify that there is
8825 no edge to the exit block in CFG already.
8826 Calling make_edge in such case would cause us to
8827 mark that edge as fake and remove it later. */
8828 if (flag_checking && stmt == last_stmt)
8830 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8831 gcc_assert (e == NULL);
8834 /* Note that the following may create a new basic block
8835 and renumber the existing basic blocks. */
8836 if (stmt != last_stmt)
8838 e = split_block (bb, stmt);
8839 if (e)
8840 blocks_split++;
8842 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8843 e->probability = profile_probability::guessed_never ();
8845 gsi_prev (&gsi);
8847 while (!gsi_end_p (gsi));
8851 if (blocks_split)
8852 checking_verify_flow_info ();
8854 return blocks_split;
8857 /* Removes edge E and all the blocks dominated by it, and updates dominance
8858 information. The IL in E->src needs to be updated separately.
8859 If dominance info is not available, only the edge E is removed.*/
8861 void
8862 remove_edge_and_dominated_blocks (edge e)
8864 vec<basic_block> bbs_to_fix_dom = vNULL;
8865 edge f;
8866 edge_iterator ei;
8867 bool none_removed = false;
8868 unsigned i;
8869 basic_block bb, dbb;
8870 bitmap_iterator bi;
8872 /* If we are removing a path inside a non-root loop that may change
8873 loop ownership of blocks or remove loops. Mark loops for fixup. */
8874 if (current_loops
8875 && loop_outer (e->src->loop_father) != NULL
8876 && e->src->loop_father == e->dest->loop_father)
8877 loops_state_set (LOOPS_NEED_FIXUP);
8879 if (!dom_info_available_p (CDI_DOMINATORS))
8881 remove_edge (e);
8882 return;
8885 /* No updating is needed for edges to exit. */
8886 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8888 if (cfgcleanup_altered_bbs)
8889 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8890 remove_edge (e);
8891 return;
8894 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8895 that is not dominated by E->dest, then this set is empty. Otherwise,
8896 all the basic blocks dominated by E->dest are removed.
8898 Also, to DF_IDOM we store the immediate dominators of the blocks in
8899 the dominance frontier of E (i.e., of the successors of the
8900 removed blocks, if there are any, and of E->dest otherwise). */
8901 FOR_EACH_EDGE (f, ei, e->dest->preds)
8903 if (f == e)
8904 continue;
8906 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8908 none_removed = true;
8909 break;
8913 auto_bitmap df, df_idom;
8914 auto_vec<basic_block> bbs_to_remove;
8915 if (none_removed)
8916 bitmap_set_bit (df_idom,
8917 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8918 else
8920 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8921 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8923 FOR_EACH_EDGE (f, ei, bb->succs)
8925 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8926 bitmap_set_bit (df, f->dest->index);
8929 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8930 bitmap_clear_bit (df, bb->index);
8932 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8934 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8935 bitmap_set_bit (df_idom,
8936 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8940 if (cfgcleanup_altered_bbs)
8942 /* Record the set of the altered basic blocks. */
8943 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8944 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8947 /* Remove E and the cancelled blocks. */
8948 if (none_removed)
8949 remove_edge (e);
8950 else
8952 /* Walk backwards so as to get a chance to substitute all
8953 released DEFs into debug stmts. See
8954 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
8955 details. */
8956 for (i = bbs_to_remove.length (); i-- > 0; )
8957 delete_basic_block (bbs_to_remove[i]);
8960 /* Update the dominance information. The immediate dominator may change only
8961 for blocks whose immediate dominator belongs to DF_IDOM:
8963 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8964 removal. Let Z the arbitrary block such that idom(Z) = Y and
8965 Z dominates X after the removal. Before removal, there exists a path P
8966 from Y to X that avoids Z. Let F be the last edge on P that is
8967 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8968 dominates W, and because of P, Z does not dominate W), and W belongs to
8969 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8970 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8972 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8973 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8974 dbb;
8975 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8976 bbs_to_fix_dom.safe_push (dbb);
8979 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8981 bbs_to_fix_dom.release ();
8984 /* Purge dead EH edges from basic block BB. */
8986 bool
8987 gimple_purge_dead_eh_edges (basic_block bb)
8989 bool changed = false;
8990 edge e;
8991 edge_iterator ei;
8992 gimple *stmt = last_stmt (bb);
8994 if (stmt && stmt_can_throw_internal (cfun, stmt))
8995 return false;
8997 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8999 if (e->flags & EDGE_EH)
9001 remove_edge_and_dominated_blocks (e);
9002 changed = true;
9004 else
9005 ei_next (&ei);
9008 return changed;
9011 /* Purge dead EH edges from basic block listed in BLOCKS. */
9013 bool
9014 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
9016 bool changed = false;
9017 unsigned i;
9018 bitmap_iterator bi;
9020 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9022 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9024 /* Earlier gimple_purge_dead_eh_edges could have removed
9025 this basic block already. */
9026 gcc_assert (bb || changed);
9027 if (bb != NULL)
9028 changed |= gimple_purge_dead_eh_edges (bb);
9031 return changed;
9034 /* Purge dead abnormal call edges from basic block BB. */
9036 bool
9037 gimple_purge_dead_abnormal_call_edges (basic_block bb)
9039 bool changed = false;
9040 edge e;
9041 edge_iterator ei;
9042 gimple *stmt = last_stmt (bb);
9044 if (stmt && stmt_can_make_abnormal_goto (stmt))
9045 return false;
9047 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9049 if (e->flags & EDGE_ABNORMAL)
9051 if (e->flags & EDGE_FALLTHRU)
9052 e->flags &= ~EDGE_ABNORMAL;
9053 else
9054 remove_edge_and_dominated_blocks (e);
9055 changed = true;
9057 else
9058 ei_next (&ei);
9061 return changed;
9064 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9066 bool
9067 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
9069 bool changed = false;
9070 unsigned i;
9071 bitmap_iterator bi;
9073 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9075 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9077 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9078 this basic block already. */
9079 gcc_assert (bb || changed);
9080 if (bb != NULL)
9081 changed |= gimple_purge_dead_abnormal_call_edges (bb);
9084 return changed;
9087 /* This function is called whenever a new edge is created or
9088 redirected. */
9090 static void
9091 gimple_execute_on_growing_pred (edge e)
9093 basic_block bb = e->dest;
9095 if (!gimple_seq_empty_p (phi_nodes (bb)))
9096 reserve_phi_args_for_new_edge (bb);
9099 /* This function is called immediately before edge E is removed from
9100 the edge vector E->dest->preds. */
9102 static void
9103 gimple_execute_on_shrinking_pred (edge e)
9105 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9106 remove_phi_args (e);
9109 /*---------------------------------------------------------------------------
9110 Helper functions for Loop versioning
9111 ---------------------------------------------------------------------------*/
9113 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9114 of 'first'. Both of them are dominated by 'new_head' basic block. When
9115 'new_head' was created by 'second's incoming edge it received phi arguments
9116 on the edge by split_edge(). Later, additional edge 'e' was created to
9117 connect 'new_head' and 'first'. Now this routine adds phi args on this
9118 additional edge 'e' that new_head to second edge received as part of edge
9119 splitting. */
9121 static void
9122 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9123 basic_block new_head, edge e)
9125 gphi *phi1, *phi2;
9126 gphi_iterator psi1, psi2;
9127 tree def;
9128 edge e2 = find_edge (new_head, second);
9130 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9131 edge, we should always have an edge from NEW_HEAD to SECOND. */
9132 gcc_assert (e2 != NULL);
9134 /* Browse all 'second' basic block phi nodes and add phi args to
9135 edge 'e' for 'first' head. PHI args are always in correct order. */
9137 for (psi2 = gsi_start_phis (second),
9138 psi1 = gsi_start_phis (first);
9139 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9140 gsi_next (&psi2), gsi_next (&psi1))
9142 phi1 = psi1.phi ();
9143 phi2 = psi2.phi ();
9144 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9145 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9150 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9151 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9152 the destination of the ELSE part. */
9154 static void
9155 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9156 basic_block second_head ATTRIBUTE_UNUSED,
9157 basic_block cond_bb, void *cond_e)
9159 gimple_stmt_iterator gsi;
9160 gimple *new_cond_expr;
9161 tree cond_expr = (tree) cond_e;
9162 edge e0;
9164 /* Build new conditional expr */
9165 gsi = gsi_last_bb (cond_bb);
9167 cond_expr = force_gimple_operand_gsi_1 (&gsi, cond_expr,
9168 is_gimple_condexpr_for_cond,
9169 NULL_TREE, false,
9170 GSI_CONTINUE_LINKING);
9171 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9172 NULL_TREE, NULL_TREE);
9174 /* Add new cond in cond_bb. */
9175 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9177 /* Adjust edges appropriately to connect new head with first head
9178 as well as second head. */
9179 e0 = single_succ_edge (cond_bb);
9180 e0->flags &= ~EDGE_FALLTHRU;
9181 e0->flags |= EDGE_FALSE_VALUE;
9185 /* Do book-keeping of basic block BB for the profile consistency checker.
9186 Store the counting in RECORD. */
9187 static void
9188 gimple_account_profile_record (basic_block bb,
9189 struct profile_record *record)
9191 gimple_stmt_iterator i;
9192 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9193 gsi_next_nondebug (&i))
9195 record->size
9196 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9197 if (profile_info)
9199 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9200 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9201 && bb->count.ipa ().initialized_p ())
9202 record->time
9203 += estimate_num_insns (gsi_stmt (i),
9204 &eni_time_weights)
9205 * bb->count.ipa ().to_gcov_type ();
9207 else if (bb->count.initialized_p ()
9208 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9209 record->time
9210 += estimate_num_insns
9211 (gsi_stmt (i),
9212 &eni_time_weights)
9213 * bb->count.to_sreal_scale
9214 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9215 else
9216 record->time
9217 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9221 struct cfg_hooks gimple_cfg_hooks = {
9222 "gimple",
9223 gimple_verify_flow_info,
9224 gimple_dump_bb, /* dump_bb */
9225 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9226 create_bb, /* create_basic_block */
9227 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9228 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9229 gimple_can_remove_branch_p, /* can_remove_branch_p */
9230 remove_bb, /* delete_basic_block */
9231 gimple_split_block, /* split_block */
9232 gimple_move_block_after, /* move_block_after */
9233 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9234 gimple_merge_blocks, /* merge_blocks */
9235 gimple_predict_edge, /* predict_edge */
9236 gimple_predicted_by_p, /* predicted_by_p */
9237 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9238 gimple_duplicate_bb, /* duplicate_block */
9239 gimple_split_edge, /* split_edge */
9240 gimple_make_forwarder_block, /* make_forward_block */
9241 NULL, /* tidy_fallthru_edge */
9242 NULL, /* force_nonfallthru */
9243 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9244 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9245 gimple_flow_call_edges_add, /* flow_call_edges_add */
9246 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9247 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9248 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9249 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9250 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9251 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9252 flush_pending_stmts, /* flush_pending_stmts */
9253 gimple_empty_block_p, /* block_empty_p */
9254 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9255 gimple_account_profile_record,
9259 /* Split all critical edges. Split some extra (not necessarily critical) edges
9260 if FOR_EDGE_INSERTION_P is true. */
9262 unsigned int
9263 split_critical_edges (bool for_edge_insertion_p /* = false */)
9265 basic_block bb;
9266 edge e;
9267 edge_iterator ei;
9269 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9270 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9271 mappings around the calls to split_edge. */
9272 start_recording_case_labels ();
9273 FOR_ALL_BB_FN (bb, cfun)
9275 FOR_EACH_EDGE (e, ei, bb->succs)
9277 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9278 split_edge (e);
9279 /* PRE inserts statements to edges and expects that
9280 since split_critical_edges was done beforehand, committing edge
9281 insertions will not split more edges. In addition to critical
9282 edges we must split edges that have multiple successors and
9283 end by control flow statements, such as RESX.
9284 Go ahead and split them too. This matches the logic in
9285 gimple_find_edge_insert_loc. */
9286 else if (for_edge_insertion_p
9287 && (!single_pred_p (e->dest)
9288 || !gimple_seq_empty_p (phi_nodes (e->dest))
9289 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9290 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9291 && !(e->flags & EDGE_ABNORMAL))
9293 gimple_stmt_iterator gsi;
9295 gsi = gsi_last_bb (e->src);
9296 if (!gsi_end_p (gsi)
9297 && stmt_ends_bb_p (gsi_stmt (gsi))
9298 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9299 && !gimple_call_builtin_p (gsi_stmt (gsi),
9300 BUILT_IN_RETURN)))
9301 split_edge (e);
9305 end_recording_case_labels ();
9306 return 0;
9309 namespace {
9311 const pass_data pass_data_split_crit_edges =
9313 GIMPLE_PASS, /* type */
9314 "crited", /* name */
9315 OPTGROUP_NONE, /* optinfo_flags */
9316 TV_TREE_SPLIT_EDGES, /* tv_id */
9317 PROP_cfg, /* properties_required */
9318 PROP_no_crit_edges, /* properties_provided */
9319 0, /* properties_destroyed */
9320 0, /* todo_flags_start */
9321 0, /* todo_flags_finish */
9324 class pass_split_crit_edges : public gimple_opt_pass
9326 public:
9327 pass_split_crit_edges (gcc::context *ctxt)
9328 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9331 /* opt_pass methods: */
9332 unsigned int execute (function *) final override
9334 return split_critical_edges ();
9337 opt_pass * clone () final override
9339 return new pass_split_crit_edges (m_ctxt);
9341 }; // class pass_split_crit_edges
9343 } // anon namespace
9345 gimple_opt_pass *
9346 make_pass_split_crit_edges (gcc::context *ctxt)
9348 return new pass_split_crit_edges (ctxt);
9352 /* Insert COND expression which is GIMPLE_COND after STMT
9353 in basic block BB with appropriate basic block split
9354 and creation of a new conditionally executed basic block.
9355 Update profile so the new bb is visited with probability PROB.
9356 Return created basic block. */
9357 basic_block
9358 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9359 profile_probability prob)
9361 edge fall = split_block (bb, stmt);
9362 gimple_stmt_iterator iter = gsi_last_bb (bb);
9363 basic_block new_bb;
9365 /* Insert cond statement. */
9366 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9367 if (gsi_end_p (iter))
9368 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9369 else
9370 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9372 /* Create conditionally executed block. */
9373 new_bb = create_empty_bb (bb);
9374 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9375 e->probability = prob;
9376 new_bb->count = e->count ();
9377 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9379 /* Fix edge for split bb. */
9380 fall->flags = EDGE_FALSE_VALUE;
9381 fall->probability -= e->probability;
9383 /* Update dominance info. */
9384 if (dom_info_available_p (CDI_DOMINATORS))
9386 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9387 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9390 /* Update loop info. */
9391 if (current_loops)
9392 add_bb_to_loop (new_bb, bb->loop_father);
9394 return new_bb;
9399 /* Given a basic block B which ends with a conditional and has
9400 precisely two successors, determine which of the edges is taken if
9401 the conditional is true and which is taken if the conditional is
9402 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9404 void
9405 extract_true_false_edges_from_block (basic_block b,
9406 edge *true_edge,
9407 edge *false_edge)
9409 edge e = EDGE_SUCC (b, 0);
9411 if (e->flags & EDGE_TRUE_VALUE)
9413 *true_edge = e;
9414 *false_edge = EDGE_SUCC (b, 1);
9416 else
9418 *false_edge = e;
9419 *true_edge = EDGE_SUCC (b, 1);
9424 /* From a controlling predicate in the immediate dominator DOM of
9425 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9426 predicate evaluates to true and false and store them to
9427 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9428 they are non-NULL. Returns true if the edges can be determined,
9429 else return false. */
9431 bool
9432 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9433 edge *true_controlled_edge,
9434 edge *false_controlled_edge)
9436 basic_block bb = phiblock;
9437 edge true_edge, false_edge, tem;
9438 edge e0 = NULL, e1 = NULL;
9440 /* We have to verify that one edge into the PHI node is dominated
9441 by the true edge of the predicate block and the other edge
9442 dominated by the false edge. This ensures that the PHI argument
9443 we are going to take is completely determined by the path we
9444 take from the predicate block.
9445 We can only use BB dominance checks below if the destination of
9446 the true/false edges are dominated by their edge, thus only
9447 have a single predecessor. */
9448 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9449 tem = EDGE_PRED (bb, 0);
9450 if (tem == true_edge
9451 || (single_pred_p (true_edge->dest)
9452 && (tem->src == true_edge->dest
9453 || dominated_by_p (CDI_DOMINATORS,
9454 tem->src, true_edge->dest))))
9455 e0 = tem;
9456 else if (tem == false_edge
9457 || (single_pred_p (false_edge->dest)
9458 && (tem->src == false_edge->dest
9459 || dominated_by_p (CDI_DOMINATORS,
9460 tem->src, false_edge->dest))))
9461 e1 = tem;
9462 else
9463 return false;
9464 tem = EDGE_PRED (bb, 1);
9465 if (tem == true_edge
9466 || (single_pred_p (true_edge->dest)
9467 && (tem->src == true_edge->dest
9468 || dominated_by_p (CDI_DOMINATORS,
9469 tem->src, true_edge->dest))))
9470 e0 = tem;
9471 else if (tem == false_edge
9472 || (single_pred_p (false_edge->dest)
9473 && (tem->src == false_edge->dest
9474 || dominated_by_p (CDI_DOMINATORS,
9475 tem->src, false_edge->dest))))
9476 e1 = tem;
9477 else
9478 return false;
9479 if (!e0 || !e1)
9480 return false;
9482 if (true_controlled_edge)
9483 *true_controlled_edge = e0;
9484 if (false_controlled_edge)
9485 *false_controlled_edge = e1;
9487 return true;
9490 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9491 range [low, high]. Place associated stmts before *GSI. */
9493 void
9494 generate_range_test (basic_block bb, tree index, tree low, tree high,
9495 tree *lhs, tree *rhs)
9497 tree type = TREE_TYPE (index);
9498 tree utype = range_check_type (type);
9500 low = fold_convert (utype, low);
9501 high = fold_convert (utype, high);
9503 gimple_seq seq = NULL;
9504 index = gimple_convert (&seq, utype, index);
9505 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9506 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9508 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9509 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9512 /* Return the basic block that belongs to label numbered INDEX
9513 of a switch statement. */
9515 basic_block
9516 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9518 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9521 /* Return the default basic block of a switch statement. */
9523 basic_block
9524 gimple_switch_default_bb (function *ifun, gswitch *gs)
9526 return gimple_switch_label_bb (ifun, gs, 0);
9529 /* Return the edge that belongs to label numbered INDEX
9530 of a switch statement. */
9532 edge
9533 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9535 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9538 /* Return the default edge of a switch statement. */
9540 edge
9541 gimple_switch_default_edge (function *ifun, gswitch *gs)
9543 return gimple_switch_edge (ifun, gs, 0);
9546 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9548 bool
9549 cond_only_block_p (basic_block bb)
9551 /* BB must have no executable statements. */
9552 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9553 if (phi_nodes (bb))
9554 return false;
9555 while (!gsi_end_p (gsi))
9557 gimple *stmt = gsi_stmt (gsi);
9558 if (is_gimple_debug (stmt))
9560 else if (gimple_code (stmt) == GIMPLE_NOP
9561 || gimple_code (stmt) == GIMPLE_PREDICT
9562 || gimple_code (stmt) == GIMPLE_COND)
9564 else
9565 return false;
9566 gsi_next (&gsi);
9568 return true;
9572 /* Emit return warnings. */
9574 namespace {
9576 const pass_data pass_data_warn_function_return =
9578 GIMPLE_PASS, /* type */
9579 "*warn_function_return", /* name */
9580 OPTGROUP_NONE, /* optinfo_flags */
9581 TV_NONE, /* tv_id */
9582 PROP_cfg, /* properties_required */
9583 0, /* properties_provided */
9584 0, /* properties_destroyed */
9585 0, /* todo_flags_start */
9586 0, /* todo_flags_finish */
9589 class pass_warn_function_return : public gimple_opt_pass
9591 public:
9592 pass_warn_function_return (gcc::context *ctxt)
9593 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9596 /* opt_pass methods: */
9597 unsigned int execute (function *) final override;
9599 }; // class pass_warn_function_return
9601 unsigned int
9602 pass_warn_function_return::execute (function *fun)
9604 location_t location;
9605 gimple *last;
9606 edge e;
9607 edge_iterator ei;
9609 if (!targetm.warn_func_return (fun->decl))
9610 return 0;
9612 /* If we have a path to EXIT, then we do return. */
9613 if (TREE_THIS_VOLATILE (fun->decl)
9614 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9616 location = UNKNOWN_LOCATION;
9617 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9618 (e = ei_safe_edge (ei)); )
9620 last = last_stmt (e->src);
9621 if ((gimple_code (last) == GIMPLE_RETURN
9622 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9623 && location == UNKNOWN_LOCATION
9624 && ((location = LOCATION_LOCUS (gimple_location (last)))
9625 != UNKNOWN_LOCATION)
9626 && !optimize)
9627 break;
9628 /* When optimizing, replace return stmts in noreturn functions
9629 with __builtin_unreachable () call. */
9630 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9632 location_t loc = gimple_location (last);
9633 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
9634 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9635 gsi_replace (&gsi, new_stmt, true);
9636 remove_edge (e);
9638 else
9639 ei_next (&ei);
9641 if (location == UNKNOWN_LOCATION)
9642 location = cfun->function_end_locus;
9643 warning_at (location, 0, "%<noreturn%> function does return");
9646 /* If we see "return;" in some basic block, then we do reach the end
9647 without returning a value. */
9648 else if (warn_return_type > 0
9649 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9650 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9652 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9654 gimple *last = last_stmt (e->src);
9655 greturn *return_stmt = dyn_cast <greturn *> (last);
9656 if (return_stmt
9657 && gimple_return_retval (return_stmt) == NULL
9658 && !warning_suppressed_p (last, OPT_Wreturn_type))
9660 location = gimple_location (last);
9661 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9662 location = fun->function_end_locus;
9663 if (warning_at (location, OPT_Wreturn_type,
9664 "control reaches end of non-void function"))
9665 suppress_warning (fun->decl, OPT_Wreturn_type);
9666 break;
9669 /* The C++ FE turns fallthrough from the end of non-void function
9670 into __builtin_unreachable () call with BUILTINS_LOCATION.
9671 Recognize those as well as calls from ubsan_instrument_return. */
9672 basic_block bb;
9673 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9674 FOR_EACH_BB_FN (bb, fun)
9675 if (EDGE_COUNT (bb->succs) == 0)
9677 gimple *last = last_stmt (bb);
9678 const enum built_in_function ubsan_missing_ret
9679 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9680 if (last
9681 && ((LOCATION_LOCUS (gimple_location (last))
9682 == BUILTINS_LOCATION
9683 && (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
9684 || gimple_call_builtin_p (last, BUILT_IN_TRAP)))
9685 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9687 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9688 gsi_prev_nondebug (&gsi);
9689 gimple *prev = gsi_stmt (gsi);
9690 if (prev == NULL)
9691 location = UNKNOWN_LOCATION;
9692 else
9693 location = gimple_location (prev);
9694 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9695 location = fun->function_end_locus;
9696 if (warning_at (location, OPT_Wreturn_type,
9697 "control reaches end of non-void function"))
9698 suppress_warning (fun->decl, OPT_Wreturn_type);
9699 break;
9703 return 0;
9706 } // anon namespace
9708 gimple_opt_pass *
9709 make_pass_warn_function_return (gcc::context *ctxt)
9711 return new pass_warn_function_return (ctxt);
9714 /* Walk a gimplified function and warn for functions whose return value is
9715 ignored and attribute((warn_unused_result)) is set. This is done before
9716 inlining, so we don't have to worry about that. */
9718 static void
9719 do_warn_unused_result (gimple_seq seq)
9721 tree fdecl, ftype;
9722 gimple_stmt_iterator i;
9724 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9726 gimple *g = gsi_stmt (i);
9728 switch (gimple_code (g))
9730 case GIMPLE_BIND:
9731 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9732 break;
9733 case GIMPLE_TRY:
9734 do_warn_unused_result (gimple_try_eval (g));
9735 do_warn_unused_result (gimple_try_cleanup (g));
9736 break;
9737 case GIMPLE_CATCH:
9738 do_warn_unused_result (gimple_catch_handler (
9739 as_a <gcatch *> (g)));
9740 break;
9741 case GIMPLE_EH_FILTER:
9742 do_warn_unused_result (gimple_eh_filter_failure (g));
9743 break;
9745 case GIMPLE_CALL:
9746 if (gimple_call_lhs (g))
9747 break;
9748 if (gimple_call_internal_p (g))
9749 break;
9751 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9752 LHS. All calls whose value is ignored should be
9753 represented like this. Look for the attribute. */
9754 fdecl = gimple_call_fndecl (g);
9755 ftype = gimple_call_fntype (g);
9757 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9759 location_t loc = gimple_location (g);
9761 if (fdecl)
9762 warning_at (loc, OPT_Wunused_result,
9763 "ignoring return value of %qD "
9764 "declared with attribute %<warn_unused_result%>",
9765 fdecl);
9766 else
9767 warning_at (loc, OPT_Wunused_result,
9768 "ignoring return value of function "
9769 "declared with attribute %<warn_unused_result%>");
9771 break;
9773 default:
9774 /* Not a container, not a call, or a call whose value is used. */
9775 break;
9780 namespace {
9782 const pass_data pass_data_warn_unused_result =
9784 GIMPLE_PASS, /* type */
9785 "*warn_unused_result", /* name */
9786 OPTGROUP_NONE, /* optinfo_flags */
9787 TV_NONE, /* tv_id */
9788 PROP_gimple_any, /* properties_required */
9789 0, /* properties_provided */
9790 0, /* properties_destroyed */
9791 0, /* todo_flags_start */
9792 0, /* todo_flags_finish */
9795 class pass_warn_unused_result : public gimple_opt_pass
9797 public:
9798 pass_warn_unused_result (gcc::context *ctxt)
9799 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9802 /* opt_pass methods: */
9803 bool gate (function *) final override { return flag_warn_unused_result; }
9804 unsigned int execute (function *) final override
9806 do_warn_unused_result (gimple_body (current_function_decl));
9807 return 0;
9810 }; // class pass_warn_unused_result
9812 } // anon namespace
9814 gimple_opt_pass *
9815 make_pass_warn_unused_result (gcc::context *ctxt)
9817 return new pass_warn_unused_result (ctxt);
9820 /* Maybe Remove stores to variables we marked write-only.
9821 Return true if a store was removed. */
9822 static bool
9823 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9824 bitmap dce_ssa_names)
9826 /* Keep access when store has side effect, i.e. in case when source
9827 is volatile. */
9828 if (!gimple_store_p (stmt)
9829 || gimple_has_side_effects (stmt)
9830 || optimize_debug)
9831 return false;
9833 tree lhs = get_base_address (gimple_get_lhs (stmt));
9835 if (!VAR_P (lhs)
9836 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9837 || !varpool_node::get (lhs)->writeonly)
9838 return false;
9840 if (dump_file && (dump_flags & TDF_DETAILS))
9842 fprintf (dump_file, "Removing statement, writes"
9843 " to write only var:\n");
9844 print_gimple_stmt (dump_file, stmt, 0,
9845 TDF_VOPS|TDF_MEMSYMS);
9848 /* Mark ssa name defining to be checked for simple dce. */
9849 if (gimple_assign_single_p (stmt))
9851 tree rhs = gimple_assign_rhs1 (stmt);
9852 if (TREE_CODE (rhs) == SSA_NAME
9853 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9854 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9856 unlink_stmt_vdef (stmt);
9857 gsi_remove (&gsi, true);
9858 release_defs (stmt);
9859 return true;
9862 /* IPA passes, compilation of earlier functions or inlining
9863 might have changed some properties, such as marked functions nothrow,
9864 pure, const or noreturn.
9865 Remove redundant edges and basic blocks, and create new ones if necessary. */
9867 unsigned int
9868 execute_fixup_cfg (void)
9870 basic_block bb;
9871 gimple_stmt_iterator gsi;
9872 int todo = 0;
9873 cgraph_node *node = cgraph_node::get (current_function_decl);
9874 /* Same scaling is also done by ipa_merge_profiles. */
9875 profile_count num = node->count;
9876 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9877 bool scale = num.initialized_p () && !(num == den);
9878 auto_bitmap dce_ssa_names;
9880 if (scale)
9882 profile_count::adjust_for_ipa_scaling (&num, &den);
9883 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9884 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9885 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9888 FOR_EACH_BB_FN (bb, cfun)
9890 if (scale)
9891 bb->count = bb->count.apply_scale (num, den);
9892 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9894 gimple *stmt = gsi_stmt (gsi);
9895 tree decl = is_gimple_call (stmt)
9896 ? gimple_call_fndecl (stmt)
9897 : NULL;
9898 if (decl)
9900 int flags = gimple_call_flags (stmt);
9901 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9903 if (gimple_in_ssa_p (cfun))
9905 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9906 update_stmt (stmt);
9909 if (flags & ECF_NORETURN
9910 && fixup_noreturn_call (stmt))
9911 todo |= TODO_cleanup_cfg;
9914 /* Remove stores to variables we marked write-only. */
9915 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
9917 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9918 continue;
9921 /* For calls we can simply remove LHS when it is known
9922 to be write-only. */
9923 if (is_gimple_call (stmt)
9924 && gimple_get_lhs (stmt))
9926 tree lhs = get_base_address (gimple_get_lhs (stmt));
9928 if (VAR_P (lhs)
9929 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9930 && varpool_node::get (lhs)->writeonly)
9932 gimple_call_set_lhs (stmt, NULL);
9933 update_stmt (stmt);
9934 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9938 gsi_next (&gsi);
9940 if (gimple *last = last_stmt (bb))
9942 if (maybe_clean_eh_stmt (last)
9943 && gimple_purge_dead_eh_edges (bb))
9944 todo |= TODO_cleanup_cfg;
9945 if (gimple_purge_dead_abnormal_call_edges (bb))
9946 todo |= TODO_cleanup_cfg;
9949 /* If we have a basic block with no successors that does not
9950 end with a control statement or a noreturn call end it with
9951 a call to __builtin_unreachable. This situation can occur
9952 when inlining a noreturn call that does in fact return. */
9953 if (EDGE_COUNT (bb->succs) == 0)
9955 gimple *stmt = last_stmt (bb);
9956 if (!stmt
9957 || (!is_ctrl_stmt (stmt)
9958 && (!is_gimple_call (stmt)
9959 || !gimple_call_noreturn_p (stmt))))
9961 if (stmt && is_gimple_call (stmt))
9962 gimple_call_set_ctrl_altering (stmt, false);
9963 stmt = gimple_build_builtin_unreachable (UNKNOWN_LOCATION);
9964 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9965 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9966 if (!cfun->after_inlining)
9967 if (tree fndecl = gimple_call_fndecl (stmt))
9969 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9970 node->create_edge (cgraph_node::get_create (fndecl),
9971 call_stmt, bb->count);
9976 if (scale)
9978 update_max_bb_count ();
9979 compute_function_frequency ();
9982 if (current_loops
9983 && (todo & TODO_cleanup_cfg))
9984 loops_state_set (LOOPS_NEED_FIXUP);
9986 simple_dce_from_worklist (dce_ssa_names);
9988 return todo;
9991 namespace {
9993 const pass_data pass_data_fixup_cfg =
9995 GIMPLE_PASS, /* type */
9996 "fixup_cfg", /* name */
9997 OPTGROUP_NONE, /* optinfo_flags */
9998 TV_NONE, /* tv_id */
9999 PROP_cfg, /* properties_required */
10000 0, /* properties_provided */
10001 0, /* properties_destroyed */
10002 0, /* todo_flags_start */
10003 0, /* todo_flags_finish */
10006 class pass_fixup_cfg : public gimple_opt_pass
10008 public:
10009 pass_fixup_cfg (gcc::context *ctxt)
10010 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
10013 /* opt_pass methods: */
10014 opt_pass * clone () final override { return new pass_fixup_cfg (m_ctxt); }
10015 unsigned int execute (function *) final override
10017 return execute_fixup_cfg ();
10020 }; // class pass_fixup_cfg
10022 } // anon namespace
10024 gimple_opt_pass *
10025 make_pass_fixup_cfg (gcc::context *ctxt)
10027 return new pass_fixup_cfg (ctxt);
10030 /* Garbage collection support for edge_def. */
10032 extern void gt_ggc_mx (tree&);
10033 extern void gt_ggc_mx (gimple *&);
10034 extern void gt_ggc_mx (rtx&);
10035 extern void gt_ggc_mx (basic_block&);
10037 static void
10038 gt_ggc_mx (rtx_insn *& x)
10040 if (x)
10041 gt_ggc_mx_rtx_def ((void *) x);
10044 void
10045 gt_ggc_mx (edge_def *e)
10047 tree block = LOCATION_BLOCK (e->goto_locus);
10048 gt_ggc_mx (e->src);
10049 gt_ggc_mx (e->dest);
10050 if (current_ir_type () == IR_GIMPLE)
10051 gt_ggc_mx (e->insns.g);
10052 else
10053 gt_ggc_mx (e->insns.r);
10054 gt_ggc_mx (block);
10057 /* PCH support for edge_def. */
10059 extern void gt_pch_nx (tree&);
10060 extern void gt_pch_nx (gimple *&);
10061 extern void gt_pch_nx (rtx&);
10062 extern void gt_pch_nx (basic_block&);
10064 static void
10065 gt_pch_nx (rtx_insn *& x)
10067 if (x)
10068 gt_pch_nx_rtx_def ((void *) x);
10071 void
10072 gt_pch_nx (edge_def *e)
10074 tree block = LOCATION_BLOCK (e->goto_locus);
10075 gt_pch_nx (e->src);
10076 gt_pch_nx (e->dest);
10077 if (current_ir_type () == IR_GIMPLE)
10078 gt_pch_nx (e->insns.g);
10079 else
10080 gt_pch_nx (e->insns.r);
10081 gt_pch_nx (block);
10084 void
10085 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
10087 tree block = LOCATION_BLOCK (e->goto_locus);
10088 op (&(e->src), NULL, cookie);
10089 op (&(e->dest), NULL, cookie);
10090 if (current_ir_type () == IR_GIMPLE)
10091 op (&(e->insns.g), NULL, cookie);
10092 else
10093 op (&(e->insns.r), NULL, cookie);
10094 op (&(block), &(block), cookie);
10097 #if CHECKING_P
10099 namespace selftest {
10101 /* Helper function for CFG selftests: create a dummy function decl
10102 and push it as cfun. */
10104 static tree
10105 push_fndecl (const char *name)
10107 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
10108 /* FIXME: this uses input_location: */
10109 tree fndecl = build_fn_decl (name, fn_type);
10110 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
10111 NULL_TREE, integer_type_node);
10112 DECL_RESULT (fndecl) = retval;
10113 push_struct_function (fndecl);
10114 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10115 ASSERT_TRUE (fun != NULL);
10116 init_empty_tree_cfg_for_function (fun);
10117 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10118 ASSERT_EQ (0, n_edges_for_fn (fun));
10119 return fndecl;
10122 /* These tests directly create CFGs.
10123 Compare with the static fns within tree-cfg.cc:
10124 - build_gimple_cfg
10125 - make_blocks: calls create_basic_block (seq, bb);
10126 - make_edges. */
10128 /* Verify a simple cfg of the form:
10129 ENTRY -> A -> B -> C -> EXIT. */
10131 static void
10132 test_linear_chain ()
10134 gimple_register_cfg_hooks ();
10136 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10137 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10139 /* Create some empty blocks. */
10140 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10141 basic_block bb_b = create_empty_bb (bb_a);
10142 basic_block bb_c = create_empty_bb (bb_b);
10144 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10145 ASSERT_EQ (0, n_edges_for_fn (fun));
10147 /* Create some edges: a simple linear chain of BBs. */
10148 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10149 make_edge (bb_a, bb_b, 0);
10150 make_edge (bb_b, bb_c, 0);
10151 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10153 /* Verify the edges. */
10154 ASSERT_EQ (4, n_edges_for_fn (fun));
10155 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10156 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10157 ASSERT_EQ (1, bb_a->preds->length ());
10158 ASSERT_EQ (1, bb_a->succs->length ());
10159 ASSERT_EQ (1, bb_b->preds->length ());
10160 ASSERT_EQ (1, bb_b->succs->length ());
10161 ASSERT_EQ (1, bb_c->preds->length ());
10162 ASSERT_EQ (1, bb_c->succs->length ());
10163 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10164 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10166 /* Verify the dominance information
10167 Each BB in our simple chain should be dominated by the one before
10168 it. */
10169 calculate_dominance_info (CDI_DOMINATORS);
10170 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10171 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10172 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10173 ASSERT_EQ (1, dom_by_b.length ());
10174 ASSERT_EQ (bb_c, dom_by_b[0]);
10175 free_dominance_info (CDI_DOMINATORS);
10177 /* Similarly for post-dominance: each BB in our chain is post-dominated
10178 by the one after it. */
10179 calculate_dominance_info (CDI_POST_DOMINATORS);
10180 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10181 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10182 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10183 ASSERT_EQ (1, postdom_by_b.length ());
10184 ASSERT_EQ (bb_a, postdom_by_b[0]);
10185 free_dominance_info (CDI_POST_DOMINATORS);
10187 pop_cfun ();
10190 /* Verify a simple CFG of the form:
10191 ENTRY
10195 /t \f
10201 EXIT. */
10203 static void
10204 test_diamond ()
10206 gimple_register_cfg_hooks ();
10208 tree fndecl = push_fndecl ("cfg_test_diamond");
10209 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10211 /* Create some empty blocks. */
10212 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10213 basic_block bb_b = create_empty_bb (bb_a);
10214 basic_block bb_c = create_empty_bb (bb_a);
10215 basic_block bb_d = create_empty_bb (bb_b);
10217 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10218 ASSERT_EQ (0, n_edges_for_fn (fun));
10220 /* Create the edges. */
10221 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10222 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10223 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10224 make_edge (bb_b, bb_d, 0);
10225 make_edge (bb_c, bb_d, 0);
10226 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10228 /* Verify the edges. */
10229 ASSERT_EQ (6, n_edges_for_fn (fun));
10230 ASSERT_EQ (1, bb_a->preds->length ());
10231 ASSERT_EQ (2, bb_a->succs->length ());
10232 ASSERT_EQ (1, bb_b->preds->length ());
10233 ASSERT_EQ (1, bb_b->succs->length ());
10234 ASSERT_EQ (1, bb_c->preds->length ());
10235 ASSERT_EQ (1, bb_c->succs->length ());
10236 ASSERT_EQ (2, bb_d->preds->length ());
10237 ASSERT_EQ (1, bb_d->succs->length ());
10239 /* Verify the dominance information. */
10240 calculate_dominance_info (CDI_DOMINATORS);
10241 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10242 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10243 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10244 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10245 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10246 dom_by_a.release ();
10247 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10248 ASSERT_EQ (0, dom_by_b.length ());
10249 dom_by_b.release ();
10250 free_dominance_info (CDI_DOMINATORS);
10252 /* Similarly for post-dominance. */
10253 calculate_dominance_info (CDI_POST_DOMINATORS);
10254 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10255 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10256 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10257 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10258 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10259 postdom_by_d.release ();
10260 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10261 ASSERT_EQ (0, postdom_by_b.length ());
10262 postdom_by_b.release ();
10263 free_dominance_info (CDI_POST_DOMINATORS);
10265 pop_cfun ();
10268 /* Verify that we can handle a CFG containing a "complete" aka
10269 fully-connected subgraph (where A B C D below all have edges
10270 pointing to each other node, also to themselves).
10271 e.g.:
10272 ENTRY EXIT
10278 A<--->B
10279 ^^ ^^
10280 | \ / |
10281 | X |
10282 | / \ |
10283 VV VV
10284 C<--->D
10287 static void
10288 test_fully_connected ()
10290 gimple_register_cfg_hooks ();
10292 tree fndecl = push_fndecl ("cfg_fully_connected");
10293 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10295 const int n = 4;
10297 /* Create some empty blocks. */
10298 auto_vec <basic_block> subgraph_nodes;
10299 for (int i = 0; i < n; i++)
10300 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10302 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10303 ASSERT_EQ (0, n_edges_for_fn (fun));
10305 /* Create the edges. */
10306 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10307 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10308 for (int i = 0; i < n; i++)
10309 for (int j = 0; j < n; j++)
10310 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10312 /* Verify the edges. */
10313 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10314 /* The first one is linked to ENTRY/EXIT as well as itself and
10315 everything else. */
10316 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10317 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10318 /* The other ones in the subgraph are linked to everything in
10319 the subgraph (including themselves). */
10320 for (int i = 1; i < n; i++)
10322 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10323 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10326 /* Verify the dominance information. */
10327 calculate_dominance_info (CDI_DOMINATORS);
10328 /* The initial block in the subgraph should be dominated by ENTRY. */
10329 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10330 get_immediate_dominator (CDI_DOMINATORS,
10331 subgraph_nodes[0]));
10332 /* Every other block in the subgraph should be dominated by the
10333 initial block. */
10334 for (int i = 1; i < n; i++)
10335 ASSERT_EQ (subgraph_nodes[0],
10336 get_immediate_dominator (CDI_DOMINATORS,
10337 subgraph_nodes[i]));
10338 free_dominance_info (CDI_DOMINATORS);
10340 /* Similarly for post-dominance. */
10341 calculate_dominance_info (CDI_POST_DOMINATORS);
10342 /* The initial block in the subgraph should be postdominated by EXIT. */
10343 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10344 get_immediate_dominator (CDI_POST_DOMINATORS,
10345 subgraph_nodes[0]));
10346 /* Every other block in the subgraph should be postdominated by the
10347 initial block, since that leads to EXIT. */
10348 for (int i = 1; i < n; i++)
10349 ASSERT_EQ (subgraph_nodes[0],
10350 get_immediate_dominator (CDI_POST_DOMINATORS,
10351 subgraph_nodes[i]));
10352 free_dominance_info (CDI_POST_DOMINATORS);
10354 pop_cfun ();
10357 /* Run all of the selftests within this file. */
10359 void
10360 tree_cfg_cc_tests ()
10362 test_linear_chain ();
10363 test_diamond ();
10364 test_fully_connected ();
10367 } // namespace selftest
10369 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10370 - loop
10371 - nested loops
10372 - switch statement (a block with many out-edges)
10373 - something that jumps to itself
10374 - etc */
10376 #endif /* CHECKING_P */