gccrs: add test case to show our query-type system is working
[official-gcc.git] / gcc / tree-cfg.cc
bloba9fcc7fd050f871437ef336ecfb8d6cc81280ee0
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2023 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map<edge, tree> *edge_to_cases;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
101 /* CFG statistics. */
102 struct cfg_stats_d
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
119 int location_line;
120 int discriminator;
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
135 inline hashval_t
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
138 return item->location_line;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
144 inline bool
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
148 return a->location_line == b->location_line;
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
181 void
182 init_empty_tree_cfg_for_function (struct function *fn)
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
205 void
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
218 static void
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
228 make_blocks (seq);
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
321 for (auto loop : loops_list (cfun, 0))
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
330 /* Push the global flag_finite_loops state down to individual loops. */
331 loop->finite_p = flag_finite_loops;
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
353 break;
354 default:
355 gcc_unreachable ();
358 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 gimple_call_arg (stmt, 0));
361 gsi_replace (&gsi, stmt, true);
366 static unsigned int
367 execute_build_cfg (void)
369 gimple_seq body = gimple_body (current_function_decl);
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
378 cleanup_tree_cfg ();
380 bb_to_omp_idx.release ();
382 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383 replace_loop_annotate ();
384 return 0;
387 namespace {
389 const pass_data pass_data_build_cfg =
391 GIMPLE_PASS, /* type */
392 "cfg", /* name */
393 OPTGROUP_NONE, /* optinfo_flags */
394 TV_TREE_CFG, /* tv_id */
395 PROP_gimple_leh, /* properties_required */
396 ( PROP_cfg | PROP_loops ), /* properties_provided */
397 0, /* properties_destroyed */
398 0, /* todo_flags_start */
399 0, /* todo_flags_finish */
402 class pass_build_cfg : public gimple_opt_pass
404 public:
405 pass_build_cfg (gcc::context *ctxt)
406 : gimple_opt_pass (pass_data_build_cfg, ctxt)
409 /* opt_pass methods: */
410 unsigned int execute (function *) final override
412 return execute_build_cfg ();
415 }; // class pass_build_cfg
417 } // anon namespace
419 gimple_opt_pass *
420 make_pass_build_cfg (gcc::context *ctxt)
422 return new pass_build_cfg (ctxt);
426 /* Return true if T is a computed goto. */
428 bool
429 computed_goto_p (gimple *t)
431 return (gimple_code (t) == GIMPLE_GOTO
432 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
435 /* Returns true if the sequence of statements STMTS only contains
436 a call to __builtin_unreachable (). */
438 bool
439 gimple_seq_unreachable_p (gimple_seq stmts)
441 if (stmts == NULL
442 /* Return false if -fsanitize=unreachable, we don't want to
443 optimize away those calls, but rather turn them into
444 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
445 later. */
446 || sanitize_flags_p (SANITIZE_UNREACHABLE))
447 return false;
449 gimple_stmt_iterator gsi = gsi_last (stmts);
451 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
452 return false;
454 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
456 gimple *stmt = gsi_stmt (gsi);
457 if (gimple_code (stmt) != GIMPLE_LABEL
458 && !is_gimple_debug (stmt)
459 && !gimple_clobber_p (stmt))
460 return false;
462 return true;
465 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
466 the other edge points to a bb with just __builtin_unreachable ().
467 I.e. return true for C->M edge in:
468 <bb C>:
470 if (something)
471 goto <bb N>;
472 else
473 goto <bb M>;
474 <bb N>:
475 __builtin_unreachable ();
476 <bb M>: */
478 bool
479 assert_unreachable_fallthru_edge_p (edge e)
481 basic_block pred_bb = e->src;
482 gimple *last = last_stmt (pred_bb);
483 if (last && gimple_code (last) == GIMPLE_COND)
485 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
486 if (other_bb == e->dest)
487 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
488 if (EDGE_COUNT (other_bb->succs) == 0)
489 return gimple_seq_unreachable_p (bb_seq (other_bb));
491 return false;
495 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
496 could alter control flow except via eh. We initialize the flag at
497 CFG build time and only ever clear it later. */
499 static void
500 gimple_call_initialize_ctrl_altering (gimple *stmt)
502 int flags = gimple_call_flags (stmt);
504 /* A call alters control flow if it can make an abnormal goto. */
505 if (call_can_make_abnormal_goto (stmt)
506 /* A call also alters control flow if it does not return. */
507 || flags & ECF_NORETURN
508 /* TM ending statements have backedges out of the transaction.
509 Return true so we split the basic block containing them.
510 Note that the TM_BUILTIN test is merely an optimization. */
511 || ((flags & ECF_TM_BUILTIN)
512 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
513 /* BUILT_IN_RETURN call is same as return statement. */
514 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
515 /* IFN_UNIQUE should be the last insn, to make checking for it
516 as cheap as possible. */
517 || (gimple_call_internal_p (stmt)
518 && gimple_call_internal_unique_p (stmt)))
519 gimple_call_set_ctrl_altering (stmt, true);
520 else
521 gimple_call_set_ctrl_altering (stmt, false);
525 /* Insert SEQ after BB and build a flowgraph. */
527 static basic_block
528 make_blocks_1 (gimple_seq seq, basic_block bb)
530 gimple_stmt_iterator i = gsi_start (seq);
531 gimple *stmt = NULL;
532 gimple *prev_stmt = NULL;
533 bool start_new_block = true;
534 bool first_stmt_of_seq = true;
536 while (!gsi_end_p (i))
538 /* PREV_STMT should only be set to a debug stmt if the debug
539 stmt is before nondebug stmts. Once stmt reaches a nondebug
540 nonlabel, prev_stmt will be set to it, so that
541 stmt_starts_bb_p will know to start a new block if a label is
542 found. However, if stmt was a label after debug stmts only,
543 keep the label in prev_stmt even if we find further debug
544 stmts, for there may be other labels after them, and they
545 should land in the same block. */
546 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
547 prev_stmt = stmt;
548 stmt = gsi_stmt (i);
550 if (stmt && is_gimple_call (stmt))
551 gimple_call_initialize_ctrl_altering (stmt);
553 /* If the statement starts a new basic block or if we have determined
554 in a previous pass that we need to create a new block for STMT, do
555 so now. */
556 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
558 if (!first_stmt_of_seq)
559 gsi_split_seq_before (&i, &seq);
560 bb = create_basic_block (seq, bb);
561 start_new_block = false;
562 prev_stmt = NULL;
565 /* Now add STMT to BB and create the subgraphs for special statement
566 codes. */
567 gimple_set_bb (stmt, bb);
569 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
570 next iteration. */
571 if (stmt_ends_bb_p (stmt))
573 /* If the stmt can make abnormal goto use a new temporary
574 for the assignment to the LHS. This makes sure the old value
575 of the LHS is available on the abnormal edge. Otherwise
576 we will end up with overlapping life-ranges for abnormal
577 SSA names. */
578 if (gimple_has_lhs (stmt)
579 && stmt_can_make_abnormal_goto (stmt)
580 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
582 tree lhs = gimple_get_lhs (stmt);
583 tree tmp = create_tmp_var (TREE_TYPE (lhs));
584 gimple *s = gimple_build_assign (lhs, tmp);
585 gimple_set_location (s, gimple_location (stmt));
586 gimple_set_block (s, gimple_block (stmt));
587 gimple_set_lhs (stmt, tmp);
588 gsi_insert_after (&i, s, GSI_SAME_STMT);
590 start_new_block = true;
593 gsi_next (&i);
594 first_stmt_of_seq = false;
596 return bb;
599 /* Build a flowgraph for the sequence of stmts SEQ. */
601 static void
602 make_blocks (gimple_seq seq)
604 /* Look for debug markers right before labels, and move the debug
605 stmts after the labels. Accepting labels among debug markers
606 adds no value, just complexity; if we wanted to annotate labels
607 with view numbers (so sequencing among markers would matter) or
608 somesuch, we're probably better off still moving the labels, but
609 adding other debug annotations in their original positions or
610 emitting nonbind or bind markers associated with the labels in
611 the original position of the labels.
613 Moving labels would probably be simpler, but we can't do that:
614 moving labels assigns label ids to them, and doing so because of
615 debug markers makes for -fcompare-debug and possibly even codegen
616 differences. So, we have to move the debug stmts instead. To
617 that end, we scan SEQ backwards, marking the position of the
618 latest (earliest we find) label, and moving debug stmts that are
619 not separated from it by nondebug nonlabel stmts after the
620 label. */
621 if (MAY_HAVE_DEBUG_MARKER_STMTS)
623 gimple_stmt_iterator label = gsi_none ();
625 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
627 gimple *stmt = gsi_stmt (i);
629 /* If this is the first label we encounter (latest in SEQ)
630 before nondebug stmts, record its position. */
631 if (is_a <glabel *> (stmt))
633 if (gsi_end_p (label))
634 label = i;
635 continue;
638 /* Without a recorded label position to move debug stmts to,
639 there's nothing to do. */
640 if (gsi_end_p (label))
641 continue;
643 /* Move the debug stmt at I after LABEL. */
644 if (is_gimple_debug (stmt))
646 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
647 /* As STMT is removed, I advances to the stmt after
648 STMT, so the gsi_prev in the for "increment"
649 expression gets us to the stmt we're to visit after
650 STMT. LABEL, however, would advance to the moved
651 stmt if we passed it to gsi_move_after, so pass it a
652 copy instead, so as to keep LABEL pointing to the
653 LABEL. */
654 gimple_stmt_iterator copy = label;
655 gsi_move_after (&i, &copy);
656 continue;
659 /* There aren't any (more?) debug stmts before label, so
660 there isn't anything else to move after it. */
661 label = gsi_none ();
665 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
668 /* Create and return a new empty basic block after bb AFTER. */
670 static basic_block
671 create_bb (void *h, void *e, basic_block after)
673 basic_block bb;
675 gcc_assert (!e);
677 /* Create and initialize a new basic block. Since alloc_block uses
678 GC allocation that clears memory to allocate a basic block, we do
679 not have to clear the newly allocated basic block here. */
680 bb = alloc_block ();
682 bb->index = last_basic_block_for_fn (cfun);
683 bb->flags = BB_NEW;
684 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
686 /* Add the new block to the linked list of blocks. */
687 link_block (bb, after);
689 /* Grow the basic block array if needed. */
690 if ((size_t) last_basic_block_for_fn (cfun)
691 == basic_block_info_for_fn (cfun)->length ())
692 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
693 last_basic_block_for_fn (cfun) + 1);
695 /* Add the newly created block to the array. */
696 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
698 n_basic_blocks_for_fn (cfun)++;
699 last_basic_block_for_fn (cfun)++;
701 return bb;
705 /*---------------------------------------------------------------------------
706 Edge creation
707 ---------------------------------------------------------------------------*/
709 /* If basic block BB has an abnormal edge to a basic block
710 containing IFN_ABNORMAL_DISPATCHER internal call, return
711 that the dispatcher's basic block, otherwise return NULL. */
713 basic_block
714 get_abnormal_succ_dispatcher (basic_block bb)
716 edge e;
717 edge_iterator ei;
719 FOR_EACH_EDGE (e, ei, bb->succs)
720 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
722 gimple_stmt_iterator gsi
723 = gsi_start_nondebug_after_labels_bb (e->dest);
724 gimple *g = gsi_stmt (gsi);
725 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
726 return e->dest;
728 return NULL;
731 /* Helper function for make_edges. Create a basic block with
732 with ABNORMAL_DISPATCHER internal call in it if needed, and
733 create abnormal edges from BBS to it and from it to FOR_BB
734 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
736 static void
737 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
738 auto_vec<basic_block> *bbs, bool computed_goto)
740 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
741 unsigned int idx = 0;
742 basic_block bb;
743 bool inner = false;
745 if (!bb_to_omp_idx.is_empty ())
747 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
748 if (bb_to_omp_idx[for_bb->index] != 0)
749 inner = true;
752 /* If the dispatcher has been created already, then there are basic
753 blocks with abnormal edges to it, so just make a new edge to
754 for_bb. */
755 if (*dispatcher == NULL)
757 /* Check if there are any basic blocks that need to have
758 abnormal edges to this dispatcher. If there are none, return
759 early. */
760 if (bb_to_omp_idx.is_empty ())
762 if (bbs->is_empty ())
763 return;
765 else
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
769 break;
770 if (bb == NULL)
771 return;
774 /* Create the dispatcher bb. */
775 *dispatcher = create_basic_block (NULL, for_bb);
776 if (computed_goto)
778 /* Factor computed gotos into a common computed goto site. Also
779 record the location of that site so that we can un-factor the
780 gotos after we have converted back to normal form. */
781 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
783 /* Create the destination of the factored goto. Each original
784 computed goto will put its desired destination into this
785 variable and jump to the label we create immediately below. */
786 tree var = create_tmp_var (ptr_type_node, "gotovar");
788 /* Build a label for the new block which will contain the
789 factored computed goto. */
790 tree factored_label_decl
791 = create_artificial_label (UNKNOWN_LOCATION);
792 gimple *factored_computed_goto_label
793 = gimple_build_label (factored_label_decl);
794 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
796 /* Build our new computed goto. */
797 gimple *factored_computed_goto = gimple_build_goto (var);
798 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 if (!bb_to_omp_idx.is_empty ()
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 continue;
806 gsi = gsi_last_bb (bb);
807 gimple *last = gsi_stmt (gsi);
809 gcc_assert (computed_goto_p (last));
811 /* Copy the original computed goto's destination into VAR. */
812 gimple *assignment
813 = gimple_build_assign (var, gimple_goto_dest (last));
814 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
816 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
817 e->goto_locus = gimple_location (last);
818 gsi_remove (&gsi, true);
821 else
823 tree arg = inner ? boolean_true_node : boolean_false_node;
824 gcall *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
825 1, arg);
826 gimple_call_set_ctrl_altering (g, true);
827 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
828 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
830 /* Create predecessor edges of the dispatcher. */
831 FOR_EACH_VEC_ELT (*bbs, idx, bb)
833 if (!bb_to_omp_idx.is_empty ()
834 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
835 continue;
836 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
841 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
844 /* Creates outgoing edges for BB. Returns 1 when it ends with an
845 computed goto, returns 2 when it ends with a statement that
846 might return to this function via an nonlocal goto, otherwise
847 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
849 static int
850 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
852 gimple *last = last_stmt (bb);
853 bool fallthru = false;
854 int ret = 0;
856 if (!last)
857 return ret;
859 switch (gimple_code (last))
861 case GIMPLE_GOTO:
862 if (make_goto_expr_edges (bb))
863 ret = 1;
864 fallthru = false;
865 break;
866 case GIMPLE_RETURN:
868 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
869 e->goto_locus = gimple_location (last);
870 fallthru = false;
872 break;
873 case GIMPLE_COND:
874 make_cond_expr_edges (bb);
875 fallthru = false;
876 break;
877 case GIMPLE_SWITCH:
878 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
879 fallthru = false;
880 break;
881 case GIMPLE_RESX:
882 make_eh_edges (last);
883 fallthru = false;
884 break;
885 case GIMPLE_EH_DISPATCH:
886 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
887 break;
889 case GIMPLE_CALL:
890 /* If this function receives a nonlocal goto, then we need to
891 make edges from this call site to all the nonlocal goto
892 handlers. */
893 if (stmt_can_make_abnormal_goto (last))
894 ret = 2;
896 /* If this statement has reachable exception handlers, then
897 create abnormal edges to them. */
898 make_eh_edges (last);
900 /* BUILTIN_RETURN is really a return statement. */
901 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
903 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
904 fallthru = false;
906 /* Some calls are known not to return. */
907 else
908 fallthru = !gimple_call_noreturn_p (last);
909 break;
911 case GIMPLE_ASSIGN:
912 /* A GIMPLE_ASSIGN may throw internally and thus be considered
913 control-altering. */
914 if (is_ctrl_altering_stmt (last))
915 make_eh_edges (last);
916 fallthru = true;
917 break;
919 case GIMPLE_ASM:
920 make_gimple_asm_edges (bb);
921 fallthru = true;
922 break;
924 CASE_GIMPLE_OMP:
925 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
926 break;
928 case GIMPLE_TRANSACTION:
930 gtransaction *txn = as_a <gtransaction *> (last);
931 tree label1 = gimple_transaction_label_norm (txn);
932 tree label2 = gimple_transaction_label_uninst (txn);
934 if (label1)
935 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
936 if (label2)
937 make_edge (bb, label_to_block (cfun, label2),
938 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
940 tree label3 = gimple_transaction_label_over (txn);
941 if (gimple_transaction_subcode (txn)
942 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
943 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
945 fallthru = false;
947 break;
949 default:
950 gcc_assert (!stmt_ends_bb_p (last));
951 fallthru = true;
952 break;
955 if (fallthru)
956 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
958 return ret;
961 /* Join all the blocks in the flowgraph. */
963 static void
964 make_edges (void)
966 basic_block bb;
967 struct omp_region *cur_region = NULL;
968 auto_vec<basic_block> ab_edge_goto;
969 auto_vec<basic_block> ab_edge_call;
970 int cur_omp_region_idx = 0;
972 /* Create an edge from entry to the first block with executable
973 statements in it. */
974 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
975 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
976 EDGE_FALLTHRU);
978 /* Traverse the basic block array placing edges. */
979 FOR_EACH_BB_FN (bb, cfun)
981 int mer;
983 if (!bb_to_omp_idx.is_empty ())
984 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
986 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
987 if (mer == 1)
988 ab_edge_goto.safe_push (bb);
989 else if (mer == 2)
990 ab_edge_call.safe_push (bb);
992 if (cur_region && bb_to_omp_idx.is_empty ())
993 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
996 /* Computed gotos are hell to deal with, especially if there are
997 lots of them with a large number of destinations. So we factor
998 them to a common computed goto location before we build the
999 edge list. After we convert back to normal form, we will un-factor
1000 the computed gotos since factoring introduces an unwanted jump.
1001 For non-local gotos and abnormal edges from calls to calls that return
1002 twice or forced labels, factor the abnormal edges too, by having all
1003 abnormal edges from the calls go to a common artificial basic block
1004 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005 basic block to all forced labels and calls returning twice.
1006 We do this per-OpenMP structured block, because those regions
1007 are guaranteed to be single entry single exit by the standard,
1008 so it is not allowed to enter or exit such regions abnormally this way,
1009 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010 must not transfer control across SESE region boundaries. */
1011 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1013 gimple_stmt_iterator gsi;
1014 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1015 basic_block *dispatcher_bbs = dispatcher_bb_array;
1016 int count = n_basic_blocks_for_fn (cfun);
1018 if (!bb_to_omp_idx.is_empty ())
1019 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1021 FOR_EACH_BB_FN (bb, cfun)
1023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1025 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1026 tree target;
1028 if (!label_stmt)
1029 break;
1031 target = gimple_label_label (label_stmt);
1033 /* Make an edge to every label block that has been marked as a
1034 potential target for a computed goto or a non-local goto. */
1035 if (FORCED_LABEL (target))
1036 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1037 true);
1038 if (DECL_NONLOCAL (target))
1040 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1041 false);
1042 break;
1046 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1047 gsi_next_nondebug (&gsi);
1048 if (!gsi_end_p (gsi))
1050 /* Make an edge to every setjmp-like call. */
1051 gimple *call_stmt = gsi_stmt (gsi);
1052 if (is_gimple_call (call_stmt)
1053 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1054 || gimple_call_builtin_p (call_stmt,
1055 BUILT_IN_SETJMP_RECEIVER)))
1056 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1057 false);
1061 if (!bb_to_omp_idx.is_empty ())
1062 XDELETE (dispatcher_bbs);
1065 omp_free_regions ();
1068 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1069 needed. Returns true if new bbs were created.
1070 Note: This is transitional code, and should not be used for new code. We
1071 should be able to get rid of this by rewriting all target va-arg
1072 gimplification hooks to use an interface gimple_build_cond_value as described
1073 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1075 bool
1076 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1078 gimple *stmt = gsi_stmt (*gsi);
1079 basic_block bb = gimple_bb (stmt);
1080 basic_block lastbb, afterbb;
1081 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1082 edge e;
1083 lastbb = make_blocks_1 (seq, bb);
1084 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1085 return false;
1086 e = split_block (bb, stmt);
1087 /* Move e->dest to come after the new basic blocks. */
1088 afterbb = e->dest;
1089 unlink_block (afterbb);
1090 link_block (afterbb, lastbb);
1091 redirect_edge_succ (e, bb->next_bb);
1092 bb = bb->next_bb;
1093 while (bb != afterbb)
1095 struct omp_region *cur_region = NULL;
1096 profile_count cnt = profile_count::zero ();
1097 bool all = true;
1099 int cur_omp_region_idx = 0;
1100 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1101 gcc_assert (!mer && !cur_region);
1102 add_bb_to_loop (bb, afterbb->loop_father);
1104 edge e;
1105 edge_iterator ei;
1106 FOR_EACH_EDGE (e, ei, bb->preds)
1108 if (e->count ().initialized_p ())
1109 cnt += e->count ();
1110 else
1111 all = false;
1113 tree_guess_outgoing_edge_probabilities (bb);
1114 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1115 bb->count = cnt;
1117 bb = bb->next_bb;
1119 return true;
1122 /* Find the next available discriminator value for LOCUS. The
1123 discriminator distinguishes among several basic blocks that
1124 share a common locus, allowing for more accurate sample-based
1125 profiling. */
1127 static int
1128 next_discriminator_for_locus (int line)
1130 struct locus_discrim_map item;
1131 struct locus_discrim_map **slot;
1133 item.location_line = line;
1134 item.discriminator = 0;
1135 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1136 gcc_assert (slot);
1137 if (*slot == HTAB_EMPTY_ENTRY)
1139 *slot = XNEW (struct locus_discrim_map);
1140 gcc_assert (*slot);
1141 (*slot)->location_line = line;
1142 (*slot)->discriminator = 0;
1144 (*slot)->discriminator++;
1145 return (*slot)->discriminator;
1148 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1150 static bool
1151 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1153 expanded_location to;
1155 if (locus1 == locus2)
1156 return true;
1158 to = expand_location (locus2);
1160 if (from->line != to.line)
1161 return false;
1162 if (from->file == to.file)
1163 return true;
1164 return (from->file != NULL
1165 && to.file != NULL
1166 && filename_cmp (from->file, to.file) == 0);
1169 /* Assign a unique discriminator value to all statements in block bb that
1170 have the same line number as locus. */
1172 static void
1173 assign_discriminator (location_t locus, basic_block bb)
1175 gimple_stmt_iterator gsi;
1176 int discriminator;
1178 if (locus == UNKNOWN_LOCATION)
1179 return;
1181 expanded_location locus_e = expand_location (locus);
1183 discriminator = next_discriminator_for_locus (locus_e.line);
1185 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1187 gimple *stmt = gsi_stmt (gsi);
1188 location_t stmt_locus = gimple_location (stmt);
1189 if (same_line_p (locus, &locus_e, stmt_locus))
1190 gimple_set_location (stmt,
1191 location_with_discriminator (stmt_locus, discriminator));
1195 /* Assign discriminators to statement locations. */
1197 static void
1198 assign_discriminators (void)
1200 basic_block bb;
1202 FOR_EACH_BB_FN (bb, cfun)
1204 edge e;
1205 edge_iterator ei;
1206 gimple_stmt_iterator gsi;
1207 location_t curr_locus = UNKNOWN_LOCATION;
1208 expanded_location curr_locus_e = {};
1209 int curr_discr = 0;
1211 /* Traverse the basic block, if two function calls within a basic block
1212 are mapped to the same line, assign a new discriminator because a call
1213 stmt could be a split point of a basic block. */
1214 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1216 gimple *stmt = gsi_stmt (gsi);
1218 if (curr_locus == UNKNOWN_LOCATION)
1220 curr_locus = gimple_location (stmt);
1221 curr_locus_e = expand_location (curr_locus);
1223 else if (!same_line_p (curr_locus, &curr_locus_e, gimple_location (stmt)))
1225 curr_locus = gimple_location (stmt);
1226 curr_locus_e = expand_location (curr_locus);
1227 curr_discr = 0;
1229 else if (curr_discr != 0)
1231 location_t loc = gimple_location (stmt);
1232 location_t dloc = location_with_discriminator (loc, curr_discr);
1233 gimple_set_location (stmt, dloc);
1235 /* Allocate a new discriminator for CALL stmt. */
1236 if (gimple_code (stmt) == GIMPLE_CALL)
1237 curr_discr = next_discriminator_for_locus (curr_locus);
1240 gimple *last = last_stmt (bb);
1241 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1242 if (locus == UNKNOWN_LOCATION)
1243 continue;
1245 expanded_location locus_e = expand_location (locus);
1247 FOR_EACH_EDGE (e, ei, bb->succs)
1249 gimple *first = first_non_label_stmt (e->dest);
1250 gimple *last = last_stmt (e->dest);
1252 gimple *stmt_on_same_line = NULL;
1253 if (first && same_line_p (locus, &locus_e,
1254 gimple_location (first)))
1255 stmt_on_same_line = first;
1256 else if (last && same_line_p (locus, &locus_e,
1257 gimple_location (last)))
1258 stmt_on_same_line = last;
1260 if (stmt_on_same_line)
1262 if (has_discriminator (gimple_location (stmt_on_same_line))
1263 && !has_discriminator (locus))
1264 assign_discriminator (locus, bb);
1265 else
1266 assign_discriminator (locus, e->dest);
1272 /* Create the edges for a GIMPLE_COND starting at block BB. */
1274 static void
1275 make_cond_expr_edges (basic_block bb)
1277 gcond *entry = as_a <gcond *> (last_stmt (bb));
1278 gimple *then_stmt, *else_stmt;
1279 basic_block then_bb, else_bb;
1280 tree then_label, else_label;
1281 edge e;
1283 gcc_assert (entry);
1284 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1286 /* Entry basic blocks for each component. */
1287 then_label = gimple_cond_true_label (entry);
1288 else_label = gimple_cond_false_label (entry);
1289 then_bb = label_to_block (cfun, then_label);
1290 else_bb = label_to_block (cfun, else_label);
1291 then_stmt = first_stmt (then_bb);
1292 else_stmt = first_stmt (else_bb);
1294 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1295 e->goto_locus = gimple_location (then_stmt);
1296 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1297 if (e)
1298 e->goto_locus = gimple_location (else_stmt);
1300 /* We do not need the labels anymore. */
1301 gimple_cond_set_true_label (entry, NULL_TREE);
1302 gimple_cond_set_false_label (entry, NULL_TREE);
1306 /* Called for each element in the hash table (P) as we delete the
1307 edge to cases hash table.
1309 Clear all the CASE_CHAINs to prevent problems with copying of
1310 SWITCH_EXPRs and structure sharing rules, then free the hash table
1311 element. */
1313 bool
1314 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1316 tree t, next;
1318 for (t = value; t; t = next)
1320 next = CASE_CHAIN (t);
1321 CASE_CHAIN (t) = NULL;
1324 return true;
1327 /* Start recording information mapping edges to case labels. */
1329 void
1330 start_recording_case_labels (void)
1332 gcc_assert (edge_to_cases == NULL);
1333 edge_to_cases = new hash_map<edge, tree>;
1334 touched_switch_bbs = BITMAP_ALLOC (NULL);
1337 /* Return nonzero if we are recording information for case labels. */
1339 static bool
1340 recording_case_labels_p (void)
1342 return (edge_to_cases != NULL);
1345 /* Stop recording information mapping edges to case labels and
1346 remove any information we have recorded. */
1347 void
1348 end_recording_case_labels (void)
1350 bitmap_iterator bi;
1351 unsigned i;
1352 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1353 delete edge_to_cases;
1354 edge_to_cases = NULL;
1355 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1357 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1358 if (bb)
1360 gimple *stmt = last_stmt (bb);
1361 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1362 group_case_labels_stmt (as_a <gswitch *> (stmt));
1365 BITMAP_FREE (touched_switch_bbs);
1368 /* If we are inside a {start,end}_recording_cases block, then return
1369 a chain of CASE_LABEL_EXPRs from T which reference E.
1371 Otherwise return NULL. */
1373 tree
1374 get_cases_for_edge (edge e, gswitch *t)
1376 tree *slot;
1377 size_t i, n;
1379 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1380 chains available. Return NULL so the caller can detect this case. */
1381 if (!recording_case_labels_p ())
1382 return NULL;
1384 slot = edge_to_cases->get (e);
1385 if (slot)
1386 return *slot;
1388 /* If we did not find E in the hash table, then this must be the first
1389 time we have been queried for information about E & T. Add all the
1390 elements from T to the hash table then perform the query again. */
1392 n = gimple_switch_num_labels (t);
1393 for (i = 0; i < n; i++)
1395 tree elt = gimple_switch_label (t, i);
1396 tree lab = CASE_LABEL (elt);
1397 basic_block label_bb = label_to_block (cfun, lab);
1398 edge this_edge = find_edge (e->src, label_bb);
1400 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1401 a new chain. */
1402 tree &s = edge_to_cases->get_or_insert (this_edge);
1403 CASE_CHAIN (elt) = s;
1404 s = elt;
1407 return *edge_to_cases->get (e);
1410 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1412 static void
1413 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1415 size_t i, n;
1417 n = gimple_switch_num_labels (entry);
1419 for (i = 0; i < n; ++i)
1421 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1422 make_edge (bb, label_bb, 0);
1427 /* Return the basic block holding label DEST. */
1429 basic_block
1430 label_to_block (struct function *ifun, tree dest)
1432 int uid = LABEL_DECL_UID (dest);
1434 /* We would die hard when faced by an undefined label. Emit a label to
1435 the very first basic block. This will hopefully make even the dataflow
1436 and undefined variable warnings quite right. */
1437 if (seen_error () && uid < 0)
1439 gimple_stmt_iterator gsi =
1440 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1441 gimple *stmt;
1443 stmt = gimple_build_label (dest);
1444 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1445 uid = LABEL_DECL_UID (dest);
1447 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1448 return NULL;
1449 return (*ifun->cfg->x_label_to_block_map)[uid];
1452 /* Create edges for a goto statement at block BB. Returns true
1453 if abnormal edges should be created. */
1455 static bool
1456 make_goto_expr_edges (basic_block bb)
1458 gimple_stmt_iterator last = gsi_last_bb (bb);
1459 gimple *goto_t = gsi_stmt (last);
1461 /* A simple GOTO creates normal edges. */
1462 if (simple_goto_p (goto_t))
1464 tree dest = gimple_goto_dest (goto_t);
1465 basic_block label_bb = label_to_block (cfun, dest);
1466 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1467 e->goto_locus = gimple_location (goto_t);
1468 gsi_remove (&last, true);
1469 return false;
1472 /* A computed GOTO creates abnormal edges. */
1473 return true;
1476 /* Create edges for an asm statement with labels at block BB. */
1478 static void
1479 make_gimple_asm_edges (basic_block bb)
1481 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1482 int i, n = gimple_asm_nlabels (stmt);
1484 for (i = 0; i < n; ++i)
1486 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1487 basic_block label_bb = label_to_block (cfun, label);
1488 make_edge (bb, label_bb, 0);
1492 /*---------------------------------------------------------------------------
1493 Flowgraph analysis
1494 ---------------------------------------------------------------------------*/
1496 /* Cleanup useless labels in basic blocks. This is something we wish
1497 to do early because it allows us to group case labels before creating
1498 the edges for the CFG, and it speeds up block statement iterators in
1499 all passes later on.
1500 We rerun this pass after CFG is created, to get rid of the labels that
1501 are no longer referenced. After then we do not run it any more, since
1502 (almost) no new labels should be created. */
1504 /* A map from basic block index to the leading label of that block. */
1505 struct label_record
1507 /* The label. */
1508 tree label;
1510 /* True if the label is referenced from somewhere. */
1511 bool used;
1514 /* Given LABEL return the first label in the same basic block. */
1516 static tree
1517 main_block_label (tree label, label_record *label_for_bb)
1519 basic_block bb = label_to_block (cfun, label);
1520 tree main_label = label_for_bb[bb->index].label;
1522 /* label_to_block possibly inserted undefined label into the chain. */
1523 if (!main_label)
1525 label_for_bb[bb->index].label = label;
1526 main_label = label;
1529 label_for_bb[bb->index].used = true;
1530 return main_label;
1533 /* Clean up redundant labels within the exception tree. */
1535 static void
1536 cleanup_dead_labels_eh (label_record *label_for_bb)
1538 eh_landing_pad lp;
1539 eh_region r;
1540 tree lab;
1541 int i;
1543 if (cfun->eh == NULL)
1544 return;
1546 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1547 if (lp && lp->post_landing_pad)
1549 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1550 if (lab != lp->post_landing_pad)
1552 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1553 lp->post_landing_pad = lab;
1554 EH_LANDING_PAD_NR (lab) = lp->index;
1558 FOR_ALL_EH_REGION (r)
1559 switch (r->type)
1561 case ERT_CLEANUP:
1562 case ERT_MUST_NOT_THROW:
1563 break;
1565 case ERT_TRY:
1567 eh_catch c;
1568 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1570 lab = c->label;
1571 if (lab)
1572 c->label = main_block_label (lab, label_for_bb);
1575 break;
1577 case ERT_ALLOWED_EXCEPTIONS:
1578 lab = r->u.allowed.label;
1579 if (lab)
1580 r->u.allowed.label = main_block_label (lab, label_for_bb);
1581 break;
1586 /* Cleanup redundant labels. This is a three-step process:
1587 1) Find the leading label for each block.
1588 2) Redirect all references to labels to the leading labels.
1589 3) Cleanup all useless labels. */
1591 void
1592 cleanup_dead_labels (void)
1594 basic_block bb;
1595 label_record *label_for_bb = XCNEWVEC (struct label_record,
1596 last_basic_block_for_fn (cfun));
1598 /* Find a suitable label for each block. We use the first user-defined
1599 label if there is one, or otherwise just the first label we see. */
1600 FOR_EACH_BB_FN (bb, cfun)
1602 gimple_stmt_iterator i;
1604 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1606 tree label;
1607 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1609 if (!label_stmt)
1610 break;
1612 label = gimple_label_label (label_stmt);
1614 /* If we have not yet seen a label for the current block,
1615 remember this one and see if there are more labels. */
1616 if (!label_for_bb[bb->index].label)
1618 label_for_bb[bb->index].label = label;
1619 continue;
1622 /* If we did see a label for the current block already, but it
1623 is an artificially created label, replace it if the current
1624 label is a user defined label. */
1625 if (!DECL_ARTIFICIAL (label)
1626 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1628 label_for_bb[bb->index].label = label;
1629 break;
1634 /* Now redirect all jumps/branches to the selected label.
1635 First do so for each block ending in a control statement. */
1636 FOR_EACH_BB_FN (bb, cfun)
1638 gimple *stmt = last_stmt (bb);
1639 tree label, new_label;
1641 if (!stmt)
1642 continue;
1644 switch (gimple_code (stmt))
1646 case GIMPLE_COND:
1648 gcond *cond_stmt = as_a <gcond *> (stmt);
1649 label = gimple_cond_true_label (cond_stmt);
1650 if (label)
1652 new_label = main_block_label (label, label_for_bb);
1653 if (new_label != label)
1654 gimple_cond_set_true_label (cond_stmt, new_label);
1657 label = gimple_cond_false_label (cond_stmt);
1658 if (label)
1660 new_label = main_block_label (label, label_for_bb);
1661 if (new_label != label)
1662 gimple_cond_set_false_label (cond_stmt, new_label);
1665 break;
1667 case GIMPLE_SWITCH:
1669 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1670 size_t i, n = gimple_switch_num_labels (switch_stmt);
1672 /* Replace all destination labels. */
1673 for (i = 0; i < n; ++i)
1675 tree case_label = gimple_switch_label (switch_stmt, i);
1676 label = CASE_LABEL (case_label);
1677 new_label = main_block_label (label, label_for_bb);
1678 if (new_label != label)
1679 CASE_LABEL (case_label) = new_label;
1681 break;
1684 case GIMPLE_ASM:
1686 gasm *asm_stmt = as_a <gasm *> (stmt);
1687 int i, n = gimple_asm_nlabels (asm_stmt);
1689 for (i = 0; i < n; ++i)
1691 tree cons = gimple_asm_label_op (asm_stmt, i);
1692 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1693 TREE_VALUE (cons) = label;
1695 break;
1698 /* We have to handle gotos until they're removed, and we don't
1699 remove them until after we've created the CFG edges. */
1700 case GIMPLE_GOTO:
1701 if (!computed_goto_p (stmt))
1703 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1704 label = gimple_goto_dest (goto_stmt);
1705 new_label = main_block_label (label, label_for_bb);
1706 if (new_label != label)
1707 gimple_goto_set_dest (goto_stmt, new_label);
1709 break;
1711 case GIMPLE_TRANSACTION:
1713 gtransaction *txn = as_a <gtransaction *> (stmt);
1715 label = gimple_transaction_label_norm (txn);
1716 if (label)
1718 new_label = main_block_label (label, label_for_bb);
1719 if (new_label != label)
1720 gimple_transaction_set_label_norm (txn, new_label);
1723 label = gimple_transaction_label_uninst (txn);
1724 if (label)
1726 new_label = main_block_label (label, label_for_bb);
1727 if (new_label != label)
1728 gimple_transaction_set_label_uninst (txn, new_label);
1731 label = gimple_transaction_label_over (txn);
1732 if (label)
1734 new_label = main_block_label (label, label_for_bb);
1735 if (new_label != label)
1736 gimple_transaction_set_label_over (txn, new_label);
1739 break;
1741 default:
1742 break;
1746 /* Do the same for the exception region tree labels. */
1747 cleanup_dead_labels_eh (label_for_bb);
1749 /* Finally, purge dead labels. All user-defined labels and labels that
1750 can be the target of non-local gotos and labels which have their
1751 address taken are preserved. */
1752 FOR_EACH_BB_FN (bb, cfun)
1754 gimple_stmt_iterator i;
1755 tree label_for_this_bb = label_for_bb[bb->index].label;
1757 if (!label_for_this_bb)
1758 continue;
1760 /* If the main label of the block is unused, we may still remove it. */
1761 if (!label_for_bb[bb->index].used)
1762 label_for_this_bb = NULL;
1764 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1766 tree label;
1767 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1769 if (!label_stmt)
1770 break;
1772 label = gimple_label_label (label_stmt);
1774 if (label == label_for_this_bb
1775 || !DECL_ARTIFICIAL (label)
1776 || DECL_NONLOCAL (label)
1777 || FORCED_LABEL (label))
1778 gsi_next (&i);
1779 else
1781 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1782 gsi_remove (&i, true);
1787 free (label_for_bb);
1790 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1791 the ones jumping to the same label.
1792 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1794 bool
1795 group_case_labels_stmt (gswitch *stmt)
1797 int old_size = gimple_switch_num_labels (stmt);
1798 int i, next_index, new_size;
1799 basic_block default_bb = NULL;
1800 hash_set<tree> *removed_labels = NULL;
1802 default_bb = gimple_switch_default_bb (cfun, stmt);
1804 /* Look for possible opportunities to merge cases. */
1805 new_size = i = 1;
1806 while (i < old_size)
1808 tree base_case, base_high;
1809 basic_block base_bb;
1811 base_case = gimple_switch_label (stmt, i);
1813 gcc_assert (base_case);
1814 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1816 /* Discard cases that have the same destination as the default case or
1817 whose destination blocks have already been removed as unreachable. */
1818 if (base_bb == NULL
1819 || base_bb == default_bb
1820 || (removed_labels
1821 && removed_labels->contains (CASE_LABEL (base_case))))
1823 i++;
1824 continue;
1827 base_high = CASE_HIGH (base_case)
1828 ? CASE_HIGH (base_case)
1829 : CASE_LOW (base_case);
1830 next_index = i + 1;
1832 /* Try to merge case labels. Break out when we reach the end
1833 of the label vector or when we cannot merge the next case
1834 label with the current one. */
1835 while (next_index < old_size)
1837 tree merge_case = gimple_switch_label (stmt, next_index);
1838 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1839 wide_int bhp1 = wi::to_wide (base_high) + 1;
1841 /* Merge the cases if they jump to the same place,
1842 and their ranges are consecutive. */
1843 if (merge_bb == base_bb
1844 && (removed_labels == NULL
1845 || !removed_labels->contains (CASE_LABEL (merge_case)))
1846 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1848 base_high
1849 = (CASE_HIGH (merge_case)
1850 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1851 CASE_HIGH (base_case) = base_high;
1852 next_index++;
1854 else
1855 break;
1858 /* Discard cases that have an unreachable destination block. */
1859 if (EDGE_COUNT (base_bb->succs) == 0
1860 && gimple_seq_unreachable_p (bb_seq (base_bb))
1861 /* Don't optimize this if __builtin_unreachable () is the
1862 implicitly added one by the C++ FE too early, before
1863 -Wreturn-type can be diagnosed. We'll optimize it later
1864 during switchconv pass or any other cfg cleanup. */
1865 && (gimple_in_ssa_p (cfun)
1866 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1867 != BUILTINS_LOCATION)))
1869 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1870 if (base_edge != NULL)
1872 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1873 !gsi_end_p (gsi); gsi_next (&gsi))
1874 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1876 if (FORCED_LABEL (gimple_label_label (stmt))
1877 || DECL_NONLOCAL (gimple_label_label (stmt)))
1879 /* Forced/non-local labels aren't going to be removed,
1880 but they will be moved to some neighbouring basic
1881 block. If some later case label refers to one of
1882 those labels, we should throw that case away rather
1883 than keeping it around and refering to some random
1884 other basic block without an edge to it. */
1885 if (removed_labels == NULL)
1886 removed_labels = new hash_set<tree>;
1887 removed_labels->add (gimple_label_label (stmt));
1890 else
1891 break;
1892 remove_edge_and_dominated_blocks (base_edge);
1894 i = next_index;
1895 continue;
1898 if (new_size < i)
1899 gimple_switch_set_label (stmt, new_size,
1900 gimple_switch_label (stmt, i));
1901 i = next_index;
1902 new_size++;
1905 gcc_assert (new_size <= old_size);
1907 if (new_size < old_size)
1908 gimple_switch_set_num_labels (stmt, new_size);
1910 delete removed_labels;
1911 return new_size < old_size;
1914 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1915 and scan the sorted vector of cases. Combine the ones jumping to the
1916 same label. */
1918 bool
1919 group_case_labels (void)
1921 basic_block bb;
1922 bool changed = false;
1924 FOR_EACH_BB_FN (bb, cfun)
1926 gimple *stmt = last_stmt (bb);
1927 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1928 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1931 return changed;
1934 /* Checks whether we can merge block B into block A. */
1936 static bool
1937 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1939 gimple *stmt;
1941 if (!single_succ_p (a))
1942 return false;
1944 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1945 return false;
1947 if (single_succ (a) != b)
1948 return false;
1950 if (!single_pred_p (b))
1951 return false;
1953 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1954 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1955 return false;
1957 /* If A ends by a statement causing exceptions or something similar, we
1958 cannot merge the blocks. */
1959 stmt = last_stmt (a);
1960 if (stmt && stmt_ends_bb_p (stmt))
1961 return false;
1963 /* Do not allow a block with only a non-local label to be merged. */
1964 if (stmt)
1965 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1966 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1967 return false;
1969 /* Examine the labels at the beginning of B. */
1970 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1971 gsi_next (&gsi))
1973 tree lab;
1974 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1975 if (!label_stmt)
1976 break;
1977 lab = gimple_label_label (label_stmt);
1979 /* Do not remove user forced labels or for -O0 any user labels. */
1980 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1981 return false;
1984 /* Protect simple loop latches. We only want to avoid merging
1985 the latch with the loop header or with a block in another
1986 loop in this case. */
1987 if (current_loops
1988 && b->loop_father->latch == b
1989 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1990 && (b->loop_father->header == a
1991 || b->loop_father != a->loop_father))
1992 return false;
1994 /* It must be possible to eliminate all phi nodes in B. If ssa form
1995 is not up-to-date and a name-mapping is registered, we cannot eliminate
1996 any phis. Symbols marked for renaming are never a problem though. */
1997 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1998 gsi_next (&gsi))
2000 gphi *phi = gsi.phi ();
2001 /* Technically only new names matter. */
2002 if (name_registered_for_update_p (PHI_RESULT (phi)))
2003 return false;
2006 /* When not optimizing, don't merge if we'd lose goto_locus. */
2007 if (!optimize
2008 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
2010 location_t goto_locus = single_succ_edge (a)->goto_locus;
2011 gimple_stmt_iterator prev, next;
2012 prev = gsi_last_nondebug_bb (a);
2013 next = gsi_after_labels (b);
2014 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
2015 gsi_next_nondebug (&next);
2016 if ((gsi_end_p (prev)
2017 || gimple_location (gsi_stmt (prev)) != goto_locus)
2018 && (gsi_end_p (next)
2019 || gimple_location (gsi_stmt (next)) != goto_locus))
2020 return false;
2023 return true;
2026 /* Replaces all uses of NAME by VAL. */
2028 void
2029 replace_uses_by (tree name, tree val)
2031 imm_use_iterator imm_iter;
2032 use_operand_p use;
2033 gimple *stmt;
2034 edge e;
2036 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2038 /* Mark the block if we change the last stmt in it. */
2039 if (cfgcleanup_altered_bbs
2040 && stmt_ends_bb_p (stmt))
2041 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2043 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2045 replace_exp (use, val);
2047 if (gimple_code (stmt) == GIMPLE_PHI)
2049 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2050 PHI_ARG_INDEX_FROM_USE (use));
2051 if (e->flags & EDGE_ABNORMAL
2052 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2054 /* This can only occur for virtual operands, since
2055 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2056 would prevent replacement. */
2057 gcc_checking_assert (virtual_operand_p (name));
2058 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2063 if (gimple_code (stmt) != GIMPLE_PHI)
2065 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2066 gimple *orig_stmt = stmt;
2067 size_t i;
2069 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2070 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2071 only change sth from non-invariant to invariant, and only
2072 when propagating constants. */
2073 if (is_gimple_min_invariant (val))
2074 for (i = 0; i < gimple_num_ops (stmt); i++)
2076 tree op = gimple_op (stmt, i);
2077 /* Operands may be empty here. For example, the labels
2078 of a GIMPLE_COND are nulled out following the creation
2079 of the corresponding CFG edges. */
2080 if (op && TREE_CODE (op) == ADDR_EXPR)
2081 recompute_tree_invariant_for_addr_expr (op);
2084 if (fold_stmt (&gsi))
2085 stmt = gsi_stmt (gsi);
2087 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2088 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2090 update_stmt (stmt);
2094 gcc_checking_assert (has_zero_uses (name));
2096 /* Also update the trees stored in loop structures. */
2097 if (current_loops)
2099 for (auto loop : loops_list (cfun, 0))
2100 substitute_in_loop_info (loop, name, val);
2104 /* Merge block B into block A. */
2106 static void
2107 gimple_merge_blocks (basic_block a, basic_block b)
2109 gimple_stmt_iterator last, gsi;
2110 gphi_iterator psi;
2112 if (dump_file)
2113 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2115 /* Remove all single-valued PHI nodes from block B of the form
2116 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2117 gsi = gsi_last_bb (a);
2118 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2120 gimple *phi = gsi_stmt (psi);
2121 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2122 gimple *copy;
2123 bool may_replace_uses = (virtual_operand_p (def)
2124 || may_propagate_copy (def, use));
2126 /* In case we maintain loop closed ssa form, do not propagate arguments
2127 of loop exit phi nodes. */
2128 if (current_loops
2129 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2130 && !virtual_operand_p (def)
2131 && TREE_CODE (use) == SSA_NAME
2132 && a->loop_father != b->loop_father)
2133 may_replace_uses = false;
2135 if (!may_replace_uses)
2137 gcc_assert (!virtual_operand_p (def));
2139 /* Note that just emitting the copies is fine -- there is no problem
2140 with ordering of phi nodes. This is because A is the single
2141 predecessor of B, therefore results of the phi nodes cannot
2142 appear as arguments of the phi nodes. */
2143 copy = gimple_build_assign (def, use);
2144 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2145 remove_phi_node (&psi, false);
2147 else
2149 /* If we deal with a PHI for virtual operands, we can simply
2150 propagate these without fussing with folding or updating
2151 the stmt. */
2152 if (virtual_operand_p (def))
2154 imm_use_iterator iter;
2155 use_operand_p use_p;
2156 gimple *stmt;
2158 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2159 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2160 SET_USE (use_p, use);
2162 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2163 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2165 else
2166 replace_uses_by (def, use);
2168 remove_phi_node (&psi, true);
2172 /* Ensure that B follows A. */
2173 move_block_after (b, a);
2175 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2176 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2178 /* Remove labels from B and set gimple_bb to A for other statements. */
2179 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2181 gimple *stmt = gsi_stmt (gsi);
2182 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2184 tree label = gimple_label_label (label_stmt);
2185 int lp_nr;
2187 gsi_remove (&gsi, false);
2189 /* Now that we can thread computed gotos, we might have
2190 a situation where we have a forced label in block B
2191 However, the label at the start of block B might still be
2192 used in other ways (think about the runtime checking for
2193 Fortran assigned gotos). So we cannot just delete the
2194 label. Instead we move the label to the start of block A. */
2195 if (FORCED_LABEL (label))
2197 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2198 tree first_label = NULL_TREE;
2199 if (!gsi_end_p (dest_gsi))
2200 if (glabel *first_label_stmt
2201 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2202 first_label = gimple_label_label (first_label_stmt);
2203 if (first_label
2204 && (DECL_NONLOCAL (first_label)
2205 || EH_LANDING_PAD_NR (first_label) != 0))
2206 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2207 else
2208 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2210 /* Other user labels keep around in a form of a debug stmt. */
2211 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2213 gimple *dbg = gimple_build_debug_bind (label,
2214 integer_zero_node,
2215 stmt);
2216 gimple_debug_bind_reset_value (dbg);
2217 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2220 lp_nr = EH_LANDING_PAD_NR (label);
2221 if (lp_nr)
2223 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2224 lp->post_landing_pad = NULL;
2227 else
2229 gimple_set_bb (stmt, a);
2230 gsi_next (&gsi);
2234 /* When merging two BBs, if their counts are different, the larger count
2235 is selected as the new bb count. This is to handle inconsistent
2236 profiles. */
2237 if (a->loop_father == b->loop_father)
2239 a->count = a->count.merge (b->count);
2242 /* Merge the sequences. */
2243 last = gsi_last_bb (a);
2244 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2245 set_bb_seq (b, NULL);
2247 if (cfgcleanup_altered_bbs)
2248 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2252 /* Return the one of two successors of BB that is not reachable by a
2253 complex edge, if there is one. Else, return BB. We use
2254 this in optimizations that use post-dominators for their heuristics,
2255 to catch the cases in C++ where function calls are involved. */
2257 basic_block
2258 single_noncomplex_succ (basic_block bb)
2260 edge e0, e1;
2261 if (EDGE_COUNT (bb->succs) != 2)
2262 return bb;
2264 e0 = EDGE_SUCC (bb, 0);
2265 e1 = EDGE_SUCC (bb, 1);
2266 if (e0->flags & EDGE_COMPLEX)
2267 return e1->dest;
2268 if (e1->flags & EDGE_COMPLEX)
2269 return e0->dest;
2271 return bb;
2274 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2276 void
2277 notice_special_calls (gcall *call)
2279 int flags = gimple_call_flags (call);
2281 if (flags & ECF_MAY_BE_ALLOCA)
2282 cfun->calls_alloca = true;
2283 if (flags & ECF_RETURNS_TWICE)
2284 cfun->calls_setjmp = true;
2288 /* Clear flags set by notice_special_calls. Used by dead code removal
2289 to update the flags. */
2291 void
2292 clear_special_calls (void)
2294 cfun->calls_alloca = false;
2295 cfun->calls_setjmp = false;
2298 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2300 static void
2301 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2303 /* Since this block is no longer reachable, we can just delete all
2304 of its PHI nodes. */
2305 remove_phi_nodes (bb);
2307 /* Remove edges to BB's successors. */
2308 while (EDGE_COUNT (bb->succs) > 0)
2309 remove_edge (EDGE_SUCC (bb, 0));
2313 /* Remove statements of basic block BB. */
2315 static void
2316 remove_bb (basic_block bb)
2318 gimple_stmt_iterator i;
2320 if (dump_file)
2322 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2323 if (dump_flags & TDF_DETAILS)
2325 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2326 fprintf (dump_file, "\n");
2330 if (current_loops)
2332 class loop *loop = bb->loop_father;
2334 /* If a loop gets removed, clean up the information associated
2335 with it. */
2336 if (loop->latch == bb
2337 || loop->header == bb)
2338 free_numbers_of_iterations_estimates (loop);
2341 /* Remove all the instructions in the block. */
2342 if (bb_seq (bb) != NULL)
2344 /* Walk backwards so as to get a chance to substitute all
2345 released DEFs into debug stmts. See
2346 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2347 details. */
2348 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2350 gimple *stmt = gsi_stmt (i);
2351 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2352 if (label_stmt
2353 && (FORCED_LABEL (gimple_label_label (label_stmt))
2354 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2356 basic_block new_bb;
2357 gimple_stmt_iterator new_gsi;
2359 /* A non-reachable non-local label may still be referenced.
2360 But it no longer needs to carry the extra semantics of
2361 non-locality. */
2362 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2364 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2365 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2368 new_bb = bb->prev_bb;
2369 /* Don't move any labels into ENTRY block. */
2370 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2372 new_bb = single_succ (new_bb);
2373 gcc_assert (new_bb != bb);
2375 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2376 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2377 || (bb_to_omp_idx[bb->index]
2378 != bb_to_omp_idx[new_bb->index])))
2380 /* During cfg pass make sure to put orphaned labels
2381 into the right OMP region. */
2382 unsigned int i;
2383 int idx;
2384 new_bb = NULL;
2385 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2386 if (i >= NUM_FIXED_BLOCKS
2387 && idx == bb_to_omp_idx[bb->index]
2388 && i != (unsigned) bb->index)
2390 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2391 break;
2393 if (new_bb == NULL)
2395 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2396 gcc_assert (new_bb != bb);
2399 new_gsi = gsi_after_labels (new_bb);
2400 gsi_remove (&i, false);
2401 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2403 else
2405 /* Release SSA definitions. */
2406 release_defs (stmt);
2407 gsi_remove (&i, true);
2410 if (gsi_end_p (i))
2411 i = gsi_last_bb (bb);
2412 else
2413 gsi_prev (&i);
2417 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2418 bb_to_omp_idx[bb->index] = -1;
2419 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2420 bb->il.gimple.seq = NULL;
2421 bb->il.gimple.phi_nodes = NULL;
2425 /* Given a basic block BB and a value VAL for use in the final statement
2426 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2427 the edge that will be taken out of the block.
2428 If VAL is NULL_TREE, then the current value of the final statement's
2429 predicate or index is used.
2430 If the value does not match a unique edge, NULL is returned. */
2432 edge
2433 find_taken_edge (basic_block bb, tree val)
2435 gimple *stmt;
2437 stmt = last_stmt (bb);
2439 /* Handle ENTRY and EXIT. */
2440 if (!stmt)
2441 return NULL;
2443 if (gimple_code (stmt) == GIMPLE_COND)
2444 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2446 if (gimple_code (stmt) == GIMPLE_SWITCH)
2447 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2449 if (computed_goto_p (stmt))
2451 /* Only optimize if the argument is a label, if the argument is
2452 not a label then we cannot construct a proper CFG.
2454 It may be the case that we only need to allow the LABEL_REF to
2455 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2456 appear inside a LABEL_EXPR just to be safe. */
2457 if (val
2458 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2459 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2460 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2463 /* Otherwise we only know the taken successor edge if it's unique. */
2464 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2467 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2468 statement, determine which of the outgoing edges will be taken out of the
2469 block. Return NULL if either edge may be taken. */
2471 static edge
2472 find_taken_edge_computed_goto (basic_block bb, tree val)
2474 basic_block dest;
2475 edge e = NULL;
2477 dest = label_to_block (cfun, val);
2478 if (dest)
2479 e = find_edge (bb, dest);
2481 /* It's possible for find_edge to return NULL here on invalid code
2482 that abuses the labels-as-values extension (e.g. code that attempts to
2483 jump *between* functions via stored labels-as-values; PR 84136).
2484 If so, then we simply return that NULL for the edge.
2485 We don't currently have a way of detecting such invalid code, so we
2486 can't assert that it was the case when a NULL edge occurs here. */
2488 return e;
2491 /* Given COND_STMT and a constant value VAL for use as the predicate,
2492 determine which of the two edges will be taken out of
2493 the statement's block. Return NULL if either edge may be taken.
2494 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2495 is used. */
2497 static edge
2498 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2500 edge true_edge, false_edge;
2502 if (val == NULL_TREE)
2504 /* Use the current value of the predicate. */
2505 if (gimple_cond_true_p (cond_stmt))
2506 val = integer_one_node;
2507 else if (gimple_cond_false_p (cond_stmt))
2508 val = integer_zero_node;
2509 else
2510 return NULL;
2512 else if (TREE_CODE (val) != INTEGER_CST)
2513 return NULL;
2515 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2516 &true_edge, &false_edge);
2518 return (integer_zerop (val) ? false_edge : true_edge);
2521 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2522 which edge will be taken out of the statement's block. Return NULL if any
2523 edge may be taken.
2524 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2525 is used. */
2527 edge
2528 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2530 basic_block dest_bb;
2531 edge e;
2532 tree taken_case;
2534 if (gimple_switch_num_labels (switch_stmt) == 1)
2535 taken_case = gimple_switch_default_label (switch_stmt);
2536 else
2538 if (val == NULL_TREE)
2539 val = gimple_switch_index (switch_stmt);
2540 if (TREE_CODE (val) != INTEGER_CST)
2541 return NULL;
2542 else
2543 taken_case = find_case_label_for_value (switch_stmt, val);
2545 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2547 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2548 gcc_assert (e);
2549 return e;
2553 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2554 We can make optimal use here of the fact that the case labels are
2555 sorted: We can do a binary search for a case matching VAL. */
2557 tree
2558 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2560 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2561 tree default_case = gimple_switch_default_label (switch_stmt);
2563 for (low = 0, high = n; high - low > 1; )
2565 size_t i = (high + low) / 2;
2566 tree t = gimple_switch_label (switch_stmt, i);
2567 int cmp;
2569 /* Cache the result of comparing CASE_LOW and val. */
2570 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2572 if (cmp > 0)
2573 high = i;
2574 else
2575 low = i;
2577 if (CASE_HIGH (t) == NULL)
2579 /* A singe-valued case label. */
2580 if (cmp == 0)
2581 return t;
2583 else
2585 /* A case range. We can only handle integer ranges. */
2586 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2587 return t;
2591 return default_case;
2595 /* Dump a basic block on stderr. */
2597 void
2598 gimple_debug_bb (basic_block bb)
2600 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2604 /* Dump basic block with index N on stderr. */
2606 basic_block
2607 gimple_debug_bb_n (int n)
2609 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2610 return BASIC_BLOCK_FOR_FN (cfun, n);
2614 /* Dump the CFG on stderr.
2616 FLAGS are the same used by the tree dumping functions
2617 (see TDF_* in dumpfile.h). */
2619 void
2620 gimple_debug_cfg (dump_flags_t flags)
2622 gimple_dump_cfg (stderr, flags);
2626 /* Dump the program showing basic block boundaries on the given FILE.
2628 FLAGS are the same used by the tree dumping functions (see TDF_* in
2629 tree.h). */
2631 void
2632 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2634 if (flags & TDF_DETAILS)
2636 dump_function_header (file, current_function_decl, flags);
2637 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2638 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2639 last_basic_block_for_fn (cfun));
2641 brief_dump_cfg (file, flags);
2642 fprintf (file, "\n");
2645 if (flags & TDF_STATS)
2646 dump_cfg_stats (file);
2648 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2652 /* Dump CFG statistics on FILE. */
2654 void
2655 dump_cfg_stats (FILE *file)
2657 static long max_num_merged_labels = 0;
2658 unsigned long size, total = 0;
2659 long num_edges;
2660 basic_block bb;
2661 const char * const fmt_str = "%-30s%-13s%12s\n";
2662 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2663 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2664 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2665 const char *funcname = current_function_name ();
2667 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2669 fprintf (file, "---------------------------------------------------------\n");
2670 fprintf (file, fmt_str, "", " Number of ", "Memory");
2671 fprintf (file, fmt_str, "", " instances ", "used ");
2672 fprintf (file, "---------------------------------------------------------\n");
2674 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2675 total += size;
2676 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2677 SIZE_AMOUNT (size));
2679 num_edges = 0;
2680 FOR_EACH_BB_FN (bb, cfun)
2681 num_edges += EDGE_COUNT (bb->succs);
2682 size = num_edges * sizeof (class edge_def);
2683 total += size;
2684 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2686 fprintf (file, "---------------------------------------------------------\n");
2687 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2688 SIZE_AMOUNT (total));
2689 fprintf (file, "---------------------------------------------------------\n");
2690 fprintf (file, "\n");
2692 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2693 max_num_merged_labels = cfg_stats.num_merged_labels;
2695 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2696 cfg_stats.num_merged_labels, max_num_merged_labels);
2698 fprintf (file, "\n");
2702 /* Dump CFG statistics on stderr. Keep extern so that it's always
2703 linked in the final executable. */
2705 DEBUG_FUNCTION void
2706 debug_cfg_stats (void)
2708 dump_cfg_stats (stderr);
2711 /*---------------------------------------------------------------------------
2712 Miscellaneous helpers
2713 ---------------------------------------------------------------------------*/
2715 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2716 flow. Transfers of control flow associated with EH are excluded. */
2718 static bool
2719 call_can_make_abnormal_goto (gimple *t)
2721 /* If the function has no non-local labels, then a call cannot make an
2722 abnormal transfer of control. */
2723 if (!cfun->has_nonlocal_label
2724 && !cfun->calls_setjmp)
2725 return false;
2727 /* Likewise if the call has no side effects. */
2728 if (!gimple_has_side_effects (t))
2729 return false;
2731 /* Likewise if the called function is leaf. */
2732 if (gimple_call_flags (t) & ECF_LEAF)
2733 return false;
2735 return true;
2739 /* Return true if T can make an abnormal transfer of control flow.
2740 Transfers of control flow associated with EH are excluded. */
2742 bool
2743 stmt_can_make_abnormal_goto (gimple *t)
2745 if (computed_goto_p (t))
2746 return true;
2747 if (is_gimple_call (t))
2748 return call_can_make_abnormal_goto (t);
2749 return false;
2753 /* Return true if T represents a stmt that always transfers control. */
2755 bool
2756 is_ctrl_stmt (gimple *t)
2758 switch (gimple_code (t))
2760 case GIMPLE_COND:
2761 case GIMPLE_SWITCH:
2762 case GIMPLE_GOTO:
2763 case GIMPLE_RETURN:
2764 case GIMPLE_RESX:
2765 return true;
2766 default:
2767 return false;
2772 /* Return true if T is a statement that may alter the flow of control
2773 (e.g., a call to a non-returning function). */
2775 bool
2776 is_ctrl_altering_stmt (gimple *t)
2778 gcc_assert (t);
2780 switch (gimple_code (t))
2782 case GIMPLE_CALL:
2783 /* Per stmt call flag indicates whether the call could alter
2784 controlflow. */
2785 if (gimple_call_ctrl_altering_p (t))
2786 return true;
2787 break;
2789 case GIMPLE_EH_DISPATCH:
2790 /* EH_DISPATCH branches to the individual catch handlers at
2791 this level of a try or allowed-exceptions region. It can
2792 fallthru to the next statement as well. */
2793 return true;
2795 case GIMPLE_ASM:
2796 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2797 return true;
2798 break;
2800 CASE_GIMPLE_OMP:
2801 /* OpenMP directives alter control flow. */
2802 return true;
2804 case GIMPLE_TRANSACTION:
2805 /* A transaction start alters control flow. */
2806 return true;
2808 default:
2809 break;
2812 /* If a statement can throw, it alters control flow. */
2813 return stmt_can_throw_internal (cfun, t);
2817 /* Return true if T is a simple local goto. */
2819 bool
2820 simple_goto_p (gimple *t)
2822 return (gimple_code (t) == GIMPLE_GOTO
2823 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2827 /* Return true if STMT should start a new basic block. PREV_STMT is
2828 the statement preceding STMT. It is used when STMT is a label or a
2829 case label. Labels should only start a new basic block if their
2830 previous statement wasn't a label. Otherwise, sequence of labels
2831 would generate unnecessary basic blocks that only contain a single
2832 label. */
2834 static inline bool
2835 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2837 if (stmt == NULL)
2838 return false;
2840 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2841 any nondebug stmts in the block. We don't want to start another
2842 block in this case: the debug stmt will already have started the
2843 one STMT would start if we weren't outputting debug stmts. */
2844 if (prev_stmt && is_gimple_debug (prev_stmt))
2845 return false;
2847 /* Labels start a new basic block only if the preceding statement
2848 wasn't a label of the same type. This prevents the creation of
2849 consecutive blocks that have nothing but a single label. */
2850 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2852 /* Nonlocal and computed GOTO targets always start a new block. */
2853 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2854 || FORCED_LABEL (gimple_label_label (label_stmt)))
2855 return true;
2857 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2859 if (DECL_NONLOCAL (gimple_label_label (plabel))
2860 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2861 return true;
2863 cfg_stats.num_merged_labels++;
2864 return false;
2866 else
2867 return true;
2869 else if (gimple_code (stmt) == GIMPLE_CALL)
2871 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2872 /* setjmp acts similar to a nonlocal GOTO target and thus should
2873 start a new block. */
2874 return true;
2875 if (gimple_call_internal_p (stmt, IFN_PHI)
2876 && prev_stmt
2877 && gimple_code (prev_stmt) != GIMPLE_LABEL
2878 && (gimple_code (prev_stmt) != GIMPLE_CALL
2879 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2880 /* PHI nodes start a new block unless preceeded by a label
2881 or another PHI. */
2882 return true;
2885 return false;
2889 /* Return true if T should end a basic block. */
2891 bool
2892 stmt_ends_bb_p (gimple *t)
2894 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2897 /* Remove block annotations and other data structures. */
2899 void
2900 delete_tree_cfg_annotations (struct function *fn)
2902 vec_free (label_to_block_map_for_fn (fn));
2905 /* Return the virtual phi in BB. */
2907 gphi *
2908 get_virtual_phi (basic_block bb)
2910 for (gphi_iterator gsi = gsi_start_phis (bb);
2911 !gsi_end_p (gsi);
2912 gsi_next (&gsi))
2914 gphi *phi = gsi.phi ();
2916 if (virtual_operand_p (PHI_RESULT (phi)))
2917 return phi;
2920 return NULL;
2923 /* Return the first statement in basic block BB. */
2925 gimple *
2926 first_stmt (basic_block bb)
2928 gimple_stmt_iterator i = gsi_start_bb (bb);
2929 gimple *stmt = NULL;
2931 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2933 gsi_next (&i);
2934 stmt = NULL;
2936 return stmt;
2939 /* Return the first non-label statement in basic block BB. */
2941 static gimple *
2942 first_non_label_stmt (basic_block bb)
2944 gimple_stmt_iterator i = gsi_start_bb (bb);
2945 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2946 gsi_next (&i);
2947 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2950 /* Return the last statement in basic block BB. */
2952 gimple *
2953 last_stmt (basic_block bb)
2955 gimple_stmt_iterator i = gsi_last_bb (bb);
2956 gimple *stmt = NULL;
2958 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2960 gsi_prev (&i);
2961 stmt = NULL;
2963 return stmt;
2966 /* Return the last statement of an otherwise empty block. Return NULL
2967 if the block is totally empty, or if it contains more than one
2968 statement. */
2970 gimple *
2971 last_and_only_stmt (basic_block bb)
2973 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2974 gimple *last, *prev;
2976 if (gsi_end_p (i))
2977 return NULL;
2979 last = gsi_stmt (i);
2980 gsi_prev_nondebug (&i);
2981 if (gsi_end_p (i))
2982 return last;
2984 /* Empty statements should no longer appear in the instruction stream.
2985 Everything that might have appeared before should be deleted by
2986 remove_useless_stmts, and the optimizers should just gsi_remove
2987 instead of smashing with build_empty_stmt.
2989 Thus the only thing that should appear here in a block containing
2990 one executable statement is a label. */
2991 prev = gsi_stmt (i);
2992 if (gimple_code (prev) == GIMPLE_LABEL)
2993 return last;
2994 else
2995 return NULL;
2998 /* Returns the basic block after which the new basic block created
2999 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3000 near its "logical" location. This is of most help to humans looking
3001 at debugging dumps. */
3003 basic_block
3004 split_edge_bb_loc (edge edge_in)
3006 basic_block dest = edge_in->dest;
3007 basic_block dest_prev = dest->prev_bb;
3009 if (dest_prev)
3011 edge e = find_edge (dest_prev, dest);
3012 if (e && !(e->flags & EDGE_COMPLEX))
3013 return edge_in->src;
3015 return dest_prev;
3018 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3019 Abort on abnormal edges. */
3021 static basic_block
3022 gimple_split_edge (edge edge_in)
3024 basic_block new_bb, after_bb, dest;
3025 edge new_edge, e;
3027 /* Abnormal edges cannot be split. */
3028 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3030 dest = edge_in->dest;
3032 after_bb = split_edge_bb_loc (edge_in);
3034 new_bb = create_empty_bb (after_bb);
3035 new_bb->count = edge_in->count ();
3037 /* We want to avoid re-allocating PHIs when we first
3038 add the fallthru edge from new_bb to dest but we also
3039 want to avoid changing PHI argument order when
3040 first redirecting edge_in away from dest. The former
3041 avoids changing PHI argument order by adding them
3042 last and then the redirection swapping it back into
3043 place by means of unordered remove.
3044 So hack around things by temporarily removing all PHIs
3045 from the destination during the edge redirection and then
3046 making sure the edges stay in order. */
3047 gimple_seq saved_phis = phi_nodes (dest);
3048 unsigned old_dest_idx = edge_in->dest_idx;
3049 set_phi_nodes (dest, NULL);
3050 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3051 e = redirect_edge_and_branch (edge_in, new_bb);
3052 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
3053 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3054 dest->il.gimple.phi_nodes = saved_phis;
3056 return new_bb;
3060 /* Verify properties of the address expression T whose base should be
3061 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3063 static bool
3064 verify_address (tree t, bool verify_addressable)
3066 bool old_constant;
3067 bool old_side_effects;
3068 bool new_constant;
3069 bool new_side_effects;
3071 old_constant = TREE_CONSTANT (t);
3072 old_side_effects = TREE_SIDE_EFFECTS (t);
3074 recompute_tree_invariant_for_addr_expr (t);
3075 new_side_effects = TREE_SIDE_EFFECTS (t);
3076 new_constant = TREE_CONSTANT (t);
3078 if (old_constant != new_constant)
3080 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3081 return true;
3083 if (old_side_effects != new_side_effects)
3085 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3086 return true;
3089 tree base = TREE_OPERAND (t, 0);
3090 while (handled_component_p (base))
3091 base = TREE_OPERAND (base, 0);
3093 if (!(VAR_P (base)
3094 || TREE_CODE (base) == PARM_DECL
3095 || TREE_CODE (base) == RESULT_DECL))
3096 return false;
3098 if (verify_addressable && !TREE_ADDRESSABLE (base))
3100 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3101 return true;
3104 return false;
3108 /* Verify if EXPR is a valid GIMPLE reference expression. If
3109 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3110 if there is an error, otherwise false. */
3112 static bool
3113 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3115 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3117 if (TREE_CODE (expr) == REALPART_EXPR
3118 || TREE_CODE (expr) == IMAGPART_EXPR
3119 || TREE_CODE (expr) == BIT_FIELD_REF)
3121 tree op = TREE_OPERAND (expr, 0);
3122 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3124 error ("non-scalar %qs", code_name);
3125 return true;
3128 if (TREE_CODE (expr) == BIT_FIELD_REF)
3130 tree t1 = TREE_OPERAND (expr, 1);
3131 tree t2 = TREE_OPERAND (expr, 2);
3132 poly_uint64 size, bitpos;
3133 if (!poly_int_tree_p (t1, &size)
3134 || !poly_int_tree_p (t2, &bitpos)
3135 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3136 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3138 error ("invalid position or size operand to %qs", code_name);
3139 return true;
3141 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3142 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3144 error ("integral result type precision does not match "
3145 "field size of %qs", code_name);
3146 return true;
3148 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3149 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3150 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3151 size))
3153 error ("mode size of non-integral result does not "
3154 "match field size of %qs",
3155 code_name);
3156 return true;
3158 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3159 && !type_has_mode_precision_p (TREE_TYPE (op)))
3161 error ("%qs of non-mode-precision operand", code_name);
3162 return true;
3164 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3165 && maybe_gt (size + bitpos,
3166 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3168 error ("position plus size exceeds size of referenced object in "
3169 "%qs", code_name);
3170 return true;
3174 if ((TREE_CODE (expr) == REALPART_EXPR
3175 || TREE_CODE (expr) == IMAGPART_EXPR)
3176 && !useless_type_conversion_p (TREE_TYPE (expr),
3177 TREE_TYPE (TREE_TYPE (op))))
3179 error ("type mismatch in %qs reference", code_name);
3180 debug_generic_stmt (TREE_TYPE (expr));
3181 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3182 return true;
3184 expr = op;
3187 while (handled_component_p (expr))
3189 code_name = get_tree_code_name (TREE_CODE (expr));
3191 if (TREE_CODE (expr) == REALPART_EXPR
3192 || TREE_CODE (expr) == IMAGPART_EXPR
3193 || TREE_CODE (expr) == BIT_FIELD_REF)
3195 error ("non-top-level %qs", code_name);
3196 return true;
3199 tree op = TREE_OPERAND (expr, 0);
3201 if (TREE_CODE (expr) == ARRAY_REF
3202 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3204 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3205 || (TREE_OPERAND (expr, 2)
3206 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3207 || (TREE_OPERAND (expr, 3)
3208 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3210 error ("invalid operands to %qs", code_name);
3211 debug_generic_stmt (expr);
3212 return true;
3216 /* Verify if the reference array element types are compatible. */
3217 if (TREE_CODE (expr) == ARRAY_REF
3218 && !useless_type_conversion_p (TREE_TYPE (expr),
3219 TREE_TYPE (TREE_TYPE (op))))
3221 error ("type mismatch in %qs", code_name);
3222 debug_generic_stmt (TREE_TYPE (expr));
3223 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3224 return true;
3226 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3227 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3228 TREE_TYPE (TREE_TYPE (op))))
3230 error ("type mismatch in %qs", code_name);
3231 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3232 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3233 return true;
3236 if (TREE_CODE (expr) == COMPONENT_REF)
3238 if (TREE_OPERAND (expr, 2)
3239 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3241 error ("invalid %qs offset operator", code_name);
3242 return true;
3244 if (!useless_type_conversion_p (TREE_TYPE (expr),
3245 TREE_TYPE (TREE_OPERAND (expr, 1))))
3247 error ("type mismatch in %qs", code_name);
3248 debug_generic_stmt (TREE_TYPE (expr));
3249 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3250 return true;
3254 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3256 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3257 that their operand is not an SSA name or an invariant when
3258 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3259 bug). Otherwise there is nothing to verify, gross mismatches at
3260 most invoke undefined behavior. */
3261 if (require_lvalue
3262 && (TREE_CODE (op) == SSA_NAME
3263 || is_gimple_min_invariant (op)))
3265 error ("conversion of %qs on the left hand side of %qs",
3266 get_tree_code_name (TREE_CODE (op)), code_name);
3267 debug_generic_stmt (expr);
3268 return true;
3270 else if (TREE_CODE (op) == SSA_NAME
3271 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3273 error ("conversion of register to a different size in %qs",
3274 code_name);
3275 debug_generic_stmt (expr);
3276 return true;
3278 else if (!handled_component_p (op))
3279 return false;
3282 expr = op;
3285 code_name = get_tree_code_name (TREE_CODE (expr));
3287 if (TREE_CODE (expr) == MEM_REF)
3289 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3290 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3291 && verify_address (TREE_OPERAND (expr, 0), false)))
3293 error ("invalid address operand in %qs", code_name);
3294 debug_generic_stmt (expr);
3295 return true;
3297 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3298 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3300 error ("invalid offset operand in %qs", code_name);
3301 debug_generic_stmt (expr);
3302 return true;
3304 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3305 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3307 error ("invalid clique in %qs", code_name);
3308 debug_generic_stmt (expr);
3309 return true;
3312 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3314 if (!TMR_BASE (expr)
3315 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3316 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3317 && verify_address (TMR_BASE (expr), false)))
3319 error ("invalid address operand in %qs", code_name);
3320 return true;
3322 if (!TMR_OFFSET (expr)
3323 || !poly_int_tree_p (TMR_OFFSET (expr))
3324 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3326 error ("invalid offset operand in %qs", code_name);
3327 debug_generic_stmt (expr);
3328 return true;
3330 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3331 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3333 error ("invalid clique in %qs", code_name);
3334 debug_generic_stmt (expr);
3335 return true;
3338 else if (TREE_CODE (expr) == INDIRECT_REF)
3340 error ("%qs in gimple IL", code_name);
3341 debug_generic_stmt (expr);
3342 return true;
3345 if (!require_lvalue
3346 && (TREE_CODE (expr) == SSA_NAME || is_gimple_min_invariant (expr)))
3347 return false;
3349 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3350 return false;
3352 if (TREE_CODE (expr) != TARGET_MEM_REF
3353 && TREE_CODE (expr) != MEM_REF)
3355 error ("invalid expression for min lvalue");
3356 return true;
3359 return false;
3362 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3363 list of pointer-to types that is trivially convertible to DEST. */
3365 static bool
3366 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3368 tree src;
3370 if (!TYPE_POINTER_TO (src_obj))
3371 return true;
3373 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3374 if (useless_type_conversion_p (dest, src))
3375 return true;
3377 return false;
3380 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3381 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3383 static bool
3384 valid_fixed_convert_types_p (tree type1, tree type2)
3386 return (FIXED_POINT_TYPE_P (type1)
3387 && (INTEGRAL_TYPE_P (type2)
3388 || SCALAR_FLOAT_TYPE_P (type2)
3389 || FIXED_POINT_TYPE_P (type2)));
3392 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3393 is a problem, otherwise false. */
3395 static bool
3396 verify_gimple_call (gcall *stmt)
3398 tree fn = gimple_call_fn (stmt);
3399 tree fntype, fndecl;
3400 unsigned i;
3402 if (gimple_call_internal_p (stmt))
3404 if (fn)
3406 error ("gimple call has two targets");
3407 debug_generic_stmt (fn);
3408 return true;
3411 else
3413 if (!fn)
3415 error ("gimple call has no target");
3416 return true;
3420 if (fn && !is_gimple_call_addr (fn))
3422 error ("invalid function in gimple call");
3423 debug_generic_stmt (fn);
3424 return true;
3427 if (fn
3428 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3429 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3430 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3432 error ("non-function in gimple call");
3433 return true;
3436 fndecl = gimple_call_fndecl (stmt);
3437 if (fndecl
3438 && TREE_CODE (fndecl) == FUNCTION_DECL
3439 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3440 && !DECL_PURE_P (fndecl)
3441 && !TREE_READONLY (fndecl))
3443 error ("invalid pure const state for function");
3444 return true;
3447 tree lhs = gimple_call_lhs (stmt);
3448 if (lhs
3449 && (!is_gimple_reg (lhs)
3450 && (!is_gimple_lvalue (lhs)
3451 || verify_types_in_gimple_reference
3452 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3453 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3455 error ("invalid LHS in gimple call");
3456 return true;
3459 if (gimple_call_ctrl_altering_p (stmt)
3460 && gimple_call_noreturn_p (stmt)
3461 && should_remove_lhs_p (lhs))
3463 error ("LHS in %<noreturn%> call");
3464 return true;
3467 fntype = gimple_call_fntype (stmt);
3468 if (fntype
3469 && lhs
3470 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3471 /* ??? At least C++ misses conversions at assignments from
3472 void * call results.
3473 For now simply allow arbitrary pointer type conversions. */
3474 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3475 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3477 error ("invalid conversion in gimple call");
3478 debug_generic_stmt (TREE_TYPE (lhs));
3479 debug_generic_stmt (TREE_TYPE (fntype));
3480 return true;
3483 if (gimple_call_chain (stmt)
3484 && !is_gimple_val (gimple_call_chain (stmt)))
3486 error ("invalid static chain in gimple call");
3487 debug_generic_stmt (gimple_call_chain (stmt));
3488 return true;
3491 /* If there is a static chain argument, the call should either be
3492 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3493 if (gimple_call_chain (stmt)
3494 && fndecl
3495 && !DECL_STATIC_CHAIN (fndecl))
3497 error ("static chain with function that doesn%'t use one");
3498 return true;
3501 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3503 switch (DECL_FUNCTION_CODE (fndecl))
3505 case BUILT_IN_UNREACHABLE:
3506 case BUILT_IN_UNREACHABLE_TRAP:
3507 case BUILT_IN_TRAP:
3508 if (gimple_call_num_args (stmt) > 0)
3510 /* Built-in unreachable with parameters might not be caught by
3511 undefined behavior sanitizer. Front-ends do check users do not
3512 call them that way but we also produce calls to
3513 __builtin_unreachable internally, for example when IPA figures
3514 out a call cannot happen in a legal program. In such cases,
3515 we must make sure arguments are stripped off. */
3516 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3517 "with arguments");
3518 return true;
3520 break;
3521 default:
3522 break;
3526 /* For a call to .DEFERRED_INIT,
3527 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3528 we should guarantee that when the 1st argument is a constant, it should
3529 be the same as the size of the LHS. */
3531 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3533 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3534 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3536 if (TREE_CODE (lhs) == SSA_NAME)
3537 lhs = SSA_NAME_VAR (lhs);
3539 poly_uint64 size_from_arg0, size_from_lhs;
3540 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3541 &size_from_arg0);
3542 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3543 &size_from_lhs);
3544 if (is_constant_size_arg0 && is_constant_size_lhs)
3545 if (maybe_ne (size_from_arg0, size_from_lhs))
3547 error ("%<DEFERRED_INIT%> calls should have same "
3548 "constant size for the first argument and LHS");
3549 return true;
3553 /* ??? The C frontend passes unpromoted arguments in case it
3554 didn't see a function declaration before the call. So for now
3555 leave the call arguments mostly unverified. Once we gimplify
3556 unit-at-a-time we have a chance to fix this. */
3557 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3559 tree arg = gimple_call_arg (stmt, i);
3560 if ((is_gimple_reg_type (TREE_TYPE (arg))
3561 && !is_gimple_val (arg))
3562 || (!is_gimple_reg_type (TREE_TYPE (arg))
3563 && !is_gimple_lvalue (arg)))
3565 error ("invalid argument to gimple call");
3566 debug_generic_expr (arg);
3567 return true;
3569 if (!is_gimple_reg (arg))
3571 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3572 arg = TREE_OPERAND (arg, 0);
3573 if (verify_types_in_gimple_reference (arg, false))
3574 return true;
3578 return false;
3581 /* Verifies the gimple comparison with the result type TYPE and
3582 the operands OP0 and OP1, comparison code is CODE. */
3584 static bool
3585 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3587 tree op0_type = TREE_TYPE (op0);
3588 tree op1_type = TREE_TYPE (op1);
3590 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3592 error ("invalid operands in gimple comparison");
3593 return true;
3596 /* For comparisons we do not have the operations type as the
3597 effective type the comparison is carried out in. Instead
3598 we require that either the first operand is trivially
3599 convertible into the second, or the other way around. */
3600 if (!useless_type_conversion_p (op0_type, op1_type)
3601 && !useless_type_conversion_p (op1_type, op0_type))
3603 error ("mismatching comparison operand types");
3604 debug_generic_expr (op0_type);
3605 debug_generic_expr (op1_type);
3606 return true;
3609 /* The resulting type of a comparison may be an effective boolean type. */
3610 if (INTEGRAL_TYPE_P (type)
3611 && (TREE_CODE (type) == BOOLEAN_TYPE
3612 || TYPE_PRECISION (type) == 1))
3614 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3615 || TREE_CODE (op1_type) == VECTOR_TYPE)
3616 && code != EQ_EXPR && code != NE_EXPR
3617 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3618 && !VECTOR_INTEGER_TYPE_P (op0_type))
3620 error ("unsupported operation or type for vector comparison"
3621 " returning a boolean");
3622 debug_generic_expr (op0_type);
3623 debug_generic_expr (op1_type);
3624 return true;
3627 /* Or a boolean vector type with the same element count
3628 as the comparison operand types. */
3629 else if (TREE_CODE (type) == VECTOR_TYPE
3630 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3632 if (TREE_CODE (op0_type) != VECTOR_TYPE
3633 || TREE_CODE (op1_type) != VECTOR_TYPE)
3635 error ("non-vector operands in vector comparison");
3636 debug_generic_expr (op0_type);
3637 debug_generic_expr (op1_type);
3638 return true;
3641 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3642 TYPE_VECTOR_SUBPARTS (op0_type)))
3644 error ("invalid vector comparison resulting type");
3645 debug_generic_expr (type);
3646 return true;
3649 else
3651 error ("bogus comparison result type");
3652 debug_generic_expr (type);
3653 return true;
3656 return false;
3659 /* Verify a gimple assignment statement STMT with an unary rhs.
3660 Returns true if anything is wrong. */
3662 static bool
3663 verify_gimple_assign_unary (gassign *stmt)
3665 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3666 tree lhs = gimple_assign_lhs (stmt);
3667 tree lhs_type = TREE_TYPE (lhs);
3668 tree rhs1 = gimple_assign_rhs1 (stmt);
3669 tree rhs1_type = TREE_TYPE (rhs1);
3671 if (!is_gimple_reg (lhs))
3673 error ("non-register as LHS of unary operation");
3674 return true;
3677 if (!is_gimple_val (rhs1))
3679 error ("invalid operand in unary operation");
3680 return true;
3683 const char* const code_name = get_tree_code_name (rhs_code);
3685 /* First handle conversions. */
3686 switch (rhs_code)
3688 CASE_CONVERT:
3690 /* Allow conversions between vectors with the same number of elements,
3691 provided that the conversion is OK for the element types too. */
3692 if (VECTOR_TYPE_P (lhs_type)
3693 && VECTOR_TYPE_P (rhs1_type)
3694 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3695 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3697 lhs_type = TREE_TYPE (lhs_type);
3698 rhs1_type = TREE_TYPE (rhs1_type);
3700 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3702 error ("invalid vector types in nop conversion");
3703 debug_generic_expr (lhs_type);
3704 debug_generic_expr (rhs1_type);
3705 return true;
3708 /* Allow conversions from pointer type to integral type only if
3709 there is no sign or zero extension involved.
3710 For targets were the precision of ptrofftype doesn't match that
3711 of pointers we allow conversions to types where
3712 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3713 if ((POINTER_TYPE_P (lhs_type)
3714 && INTEGRAL_TYPE_P (rhs1_type))
3715 || (POINTER_TYPE_P (rhs1_type)
3716 && INTEGRAL_TYPE_P (lhs_type)
3717 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3718 #if defined(POINTERS_EXTEND_UNSIGNED)
3719 || (TYPE_MODE (rhs1_type) == ptr_mode
3720 && (TYPE_PRECISION (lhs_type)
3721 == BITS_PER_WORD /* word_mode */
3722 || (TYPE_PRECISION (lhs_type)
3723 == GET_MODE_PRECISION (Pmode))))
3724 #endif
3726 return false;
3728 /* Allow conversion from integral to offset type and vice versa. */
3729 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3730 && INTEGRAL_TYPE_P (rhs1_type))
3731 || (INTEGRAL_TYPE_P (lhs_type)
3732 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3733 return false;
3735 /* Otherwise assert we are converting between types of the
3736 same kind. */
3737 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3739 error ("invalid types in nop conversion");
3740 debug_generic_expr (lhs_type);
3741 debug_generic_expr (rhs1_type);
3742 return true;
3745 return false;
3748 case ADDR_SPACE_CONVERT_EXPR:
3750 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3751 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3752 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3754 error ("invalid types in address space conversion");
3755 debug_generic_expr (lhs_type);
3756 debug_generic_expr (rhs1_type);
3757 return true;
3760 return false;
3763 case FIXED_CONVERT_EXPR:
3765 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3766 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3768 error ("invalid types in fixed-point conversion");
3769 debug_generic_expr (lhs_type);
3770 debug_generic_expr (rhs1_type);
3771 return true;
3774 return false;
3777 case FLOAT_EXPR:
3779 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3780 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3781 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3783 error ("invalid types in conversion to floating-point");
3784 debug_generic_expr (lhs_type);
3785 debug_generic_expr (rhs1_type);
3786 return true;
3789 return false;
3792 case FIX_TRUNC_EXPR:
3794 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3795 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3796 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3798 error ("invalid types in conversion to integer");
3799 debug_generic_expr (lhs_type);
3800 debug_generic_expr (rhs1_type);
3801 return true;
3804 return false;
3807 case VEC_UNPACK_HI_EXPR:
3808 case VEC_UNPACK_LO_EXPR:
3809 case VEC_UNPACK_FLOAT_HI_EXPR:
3810 case VEC_UNPACK_FLOAT_LO_EXPR:
3811 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3812 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3813 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3814 || TREE_CODE (lhs_type) != VECTOR_TYPE
3815 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3816 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3817 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3818 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3819 || ((rhs_code == VEC_UNPACK_HI_EXPR
3820 || rhs_code == VEC_UNPACK_LO_EXPR)
3821 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3822 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3823 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3824 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3825 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3826 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3827 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3828 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3829 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3830 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3831 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3832 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3833 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3834 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3835 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3836 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3838 error ("type mismatch in %qs expression", code_name);
3839 debug_generic_expr (lhs_type);
3840 debug_generic_expr (rhs1_type);
3841 return true;
3844 return false;
3846 case NEGATE_EXPR:
3847 case ABS_EXPR:
3848 case BIT_NOT_EXPR:
3849 case PAREN_EXPR:
3850 case CONJ_EXPR:
3851 /* Disallow pointer and offset types for many of the unary gimple. */
3852 if (POINTER_TYPE_P (lhs_type)
3853 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3855 error ("invalid types for %qs", code_name);
3856 debug_generic_expr (lhs_type);
3857 debug_generic_expr (rhs1_type);
3858 return true;
3860 break;
3862 case ABSU_EXPR:
3863 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3864 || !TYPE_UNSIGNED (lhs_type)
3865 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3866 || TYPE_UNSIGNED (rhs1_type)
3867 || element_precision (lhs_type) != element_precision (rhs1_type))
3869 error ("invalid types for %qs", code_name);
3870 debug_generic_expr (lhs_type);
3871 debug_generic_expr (rhs1_type);
3872 return true;
3874 return false;
3876 case VEC_DUPLICATE_EXPR:
3877 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3878 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3880 error ("%qs should be from a scalar to a like vector", code_name);
3881 debug_generic_expr (lhs_type);
3882 debug_generic_expr (rhs1_type);
3883 return true;
3885 return false;
3887 default:
3888 gcc_unreachable ();
3891 /* For the remaining codes assert there is no conversion involved. */
3892 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3894 error ("non-trivial conversion in unary operation");
3895 debug_generic_expr (lhs_type);
3896 debug_generic_expr (rhs1_type);
3897 return true;
3900 return false;
3903 /* Verify a gimple assignment statement STMT with a binary rhs.
3904 Returns true if anything is wrong. */
3906 static bool
3907 verify_gimple_assign_binary (gassign *stmt)
3909 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3910 tree lhs = gimple_assign_lhs (stmt);
3911 tree lhs_type = TREE_TYPE (lhs);
3912 tree rhs1 = gimple_assign_rhs1 (stmt);
3913 tree rhs1_type = TREE_TYPE (rhs1);
3914 tree rhs2 = gimple_assign_rhs2 (stmt);
3915 tree rhs2_type = TREE_TYPE (rhs2);
3917 if (!is_gimple_reg (lhs))
3919 error ("non-register as LHS of binary operation");
3920 return true;
3923 if (!is_gimple_val (rhs1)
3924 || !is_gimple_val (rhs2))
3926 error ("invalid operands in binary operation");
3927 return true;
3930 const char* const code_name = get_tree_code_name (rhs_code);
3932 /* First handle operations that involve different types. */
3933 switch (rhs_code)
3935 case COMPLEX_EXPR:
3937 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3938 || !(INTEGRAL_TYPE_P (rhs1_type)
3939 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3940 || !(INTEGRAL_TYPE_P (rhs2_type)
3941 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3943 error ("type mismatch in %qs", code_name);
3944 debug_generic_expr (lhs_type);
3945 debug_generic_expr (rhs1_type);
3946 debug_generic_expr (rhs2_type);
3947 return true;
3950 return false;
3953 case LSHIFT_EXPR:
3954 case RSHIFT_EXPR:
3955 case LROTATE_EXPR:
3956 case RROTATE_EXPR:
3958 /* Shifts and rotates are ok on integral types, fixed point
3959 types and integer vector types. */
3960 if ((!INTEGRAL_TYPE_P (rhs1_type)
3961 && !FIXED_POINT_TYPE_P (rhs1_type)
3962 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3963 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3964 || (!INTEGRAL_TYPE_P (rhs2_type)
3965 /* Vector shifts of vectors are also ok. */
3966 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3967 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3968 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3969 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3970 || !useless_type_conversion_p (lhs_type, rhs1_type))
3972 error ("type mismatch in %qs", code_name);
3973 debug_generic_expr (lhs_type);
3974 debug_generic_expr (rhs1_type);
3975 debug_generic_expr (rhs2_type);
3976 return true;
3979 return false;
3982 case WIDEN_LSHIFT_EXPR:
3984 if (!INTEGRAL_TYPE_P (lhs_type)
3985 || !INTEGRAL_TYPE_P (rhs1_type)
3986 || TREE_CODE (rhs2) != INTEGER_CST
3987 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3989 error ("type mismatch in %qs", code_name);
3990 debug_generic_expr (lhs_type);
3991 debug_generic_expr (rhs1_type);
3992 debug_generic_expr (rhs2_type);
3993 return true;
3996 return false;
3999 case VEC_WIDEN_LSHIFT_HI_EXPR:
4000 case VEC_WIDEN_LSHIFT_LO_EXPR:
4002 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4003 || TREE_CODE (lhs_type) != VECTOR_TYPE
4004 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4005 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4006 || TREE_CODE (rhs2) != INTEGER_CST
4007 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4008 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4010 error ("type mismatch in %qs", code_name);
4011 debug_generic_expr (lhs_type);
4012 debug_generic_expr (rhs1_type);
4013 debug_generic_expr (rhs2_type);
4014 return true;
4017 return false;
4020 case WIDEN_PLUS_EXPR:
4021 case WIDEN_MINUS_EXPR:
4022 case PLUS_EXPR:
4023 case MINUS_EXPR:
4025 tree lhs_etype = lhs_type;
4026 tree rhs1_etype = rhs1_type;
4027 tree rhs2_etype = rhs2_type;
4028 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
4030 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4031 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4033 error ("invalid non-vector operands to %qs", code_name);
4034 return true;
4036 lhs_etype = TREE_TYPE (lhs_type);
4037 rhs1_etype = TREE_TYPE (rhs1_type);
4038 rhs2_etype = TREE_TYPE (rhs2_type);
4040 if (POINTER_TYPE_P (lhs_etype)
4041 || POINTER_TYPE_P (rhs1_etype)
4042 || POINTER_TYPE_P (rhs2_etype))
4044 error ("invalid (pointer) operands %qs", code_name);
4045 return true;
4048 /* Continue with generic binary expression handling. */
4049 break;
4052 case POINTER_PLUS_EXPR:
4054 if (!POINTER_TYPE_P (rhs1_type)
4055 || !useless_type_conversion_p (lhs_type, rhs1_type)
4056 || !ptrofftype_p (rhs2_type))
4058 error ("type mismatch in %qs", code_name);
4059 debug_generic_stmt (lhs_type);
4060 debug_generic_stmt (rhs1_type);
4061 debug_generic_stmt (rhs2_type);
4062 return true;
4065 return false;
4068 case POINTER_DIFF_EXPR:
4070 if (!POINTER_TYPE_P (rhs1_type)
4071 || !POINTER_TYPE_P (rhs2_type)
4072 /* Because we special-case pointers to void we allow difference
4073 of arbitrary pointers with the same mode. */
4074 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4075 || !INTEGRAL_TYPE_P (lhs_type)
4076 || TYPE_UNSIGNED (lhs_type)
4077 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4079 error ("type mismatch in %qs", code_name);
4080 debug_generic_stmt (lhs_type);
4081 debug_generic_stmt (rhs1_type);
4082 debug_generic_stmt (rhs2_type);
4083 return true;
4086 return false;
4089 case TRUTH_ANDIF_EXPR:
4090 case TRUTH_ORIF_EXPR:
4091 case TRUTH_AND_EXPR:
4092 case TRUTH_OR_EXPR:
4093 case TRUTH_XOR_EXPR:
4095 gcc_unreachable ();
4097 case LT_EXPR:
4098 case LE_EXPR:
4099 case GT_EXPR:
4100 case GE_EXPR:
4101 case EQ_EXPR:
4102 case NE_EXPR:
4103 case UNORDERED_EXPR:
4104 case ORDERED_EXPR:
4105 case UNLT_EXPR:
4106 case UNLE_EXPR:
4107 case UNGT_EXPR:
4108 case UNGE_EXPR:
4109 case UNEQ_EXPR:
4110 case LTGT_EXPR:
4111 /* Comparisons are also binary, but the result type is not
4112 connected to the operand types. */
4113 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4115 case WIDEN_MULT_EXPR:
4116 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4117 return true;
4118 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4119 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4121 case WIDEN_SUM_EXPR:
4123 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4124 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4125 && ((!INTEGRAL_TYPE_P (rhs1_type)
4126 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4127 || (!INTEGRAL_TYPE_P (lhs_type)
4128 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4129 || !useless_type_conversion_p (lhs_type, rhs2_type)
4130 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4131 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4133 error ("type mismatch in %qs", code_name);
4134 debug_generic_expr (lhs_type);
4135 debug_generic_expr (rhs1_type);
4136 debug_generic_expr (rhs2_type);
4137 return true;
4139 return false;
4142 case VEC_WIDEN_MINUS_HI_EXPR:
4143 case VEC_WIDEN_MINUS_LO_EXPR:
4144 case VEC_WIDEN_PLUS_HI_EXPR:
4145 case VEC_WIDEN_PLUS_LO_EXPR:
4146 case VEC_WIDEN_MULT_HI_EXPR:
4147 case VEC_WIDEN_MULT_LO_EXPR:
4148 case VEC_WIDEN_MULT_EVEN_EXPR:
4149 case VEC_WIDEN_MULT_ODD_EXPR:
4151 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4152 || TREE_CODE (lhs_type) != VECTOR_TYPE
4153 || !types_compatible_p (rhs1_type, rhs2_type)
4154 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4155 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4157 error ("type mismatch in %qs", code_name);
4158 debug_generic_expr (lhs_type);
4159 debug_generic_expr (rhs1_type);
4160 debug_generic_expr (rhs2_type);
4161 return true;
4163 return false;
4166 case VEC_PACK_TRUNC_EXPR:
4167 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4168 vector boolean types. */
4169 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4170 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4171 && types_compatible_p (rhs1_type, rhs2_type)
4172 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4173 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4174 return false;
4176 /* Fallthru. */
4177 case VEC_PACK_SAT_EXPR:
4178 case VEC_PACK_FIX_TRUNC_EXPR:
4180 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4181 || TREE_CODE (lhs_type) != VECTOR_TYPE
4182 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4183 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4184 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4185 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4186 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4187 || !types_compatible_p (rhs1_type, rhs2_type)
4188 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4189 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4190 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4191 TYPE_VECTOR_SUBPARTS (lhs_type)))
4193 error ("type mismatch in %qs", code_name);
4194 debug_generic_expr (lhs_type);
4195 debug_generic_expr (rhs1_type);
4196 debug_generic_expr (rhs2_type);
4197 return true;
4200 return false;
4203 case VEC_PACK_FLOAT_EXPR:
4204 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4205 || TREE_CODE (lhs_type) != VECTOR_TYPE
4206 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4207 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4208 || !types_compatible_p (rhs1_type, rhs2_type)
4209 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4210 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4211 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4212 TYPE_VECTOR_SUBPARTS (lhs_type)))
4214 error ("type mismatch in %qs", code_name);
4215 debug_generic_expr (lhs_type);
4216 debug_generic_expr (rhs1_type);
4217 debug_generic_expr (rhs2_type);
4218 return true;
4221 return false;
4223 case MULT_EXPR:
4224 case MULT_HIGHPART_EXPR:
4225 case TRUNC_DIV_EXPR:
4226 case CEIL_DIV_EXPR:
4227 case FLOOR_DIV_EXPR:
4228 case ROUND_DIV_EXPR:
4229 case TRUNC_MOD_EXPR:
4230 case CEIL_MOD_EXPR:
4231 case FLOOR_MOD_EXPR:
4232 case ROUND_MOD_EXPR:
4233 case RDIV_EXPR:
4234 case EXACT_DIV_EXPR:
4235 case BIT_IOR_EXPR:
4236 case BIT_XOR_EXPR:
4237 /* Disallow pointer and offset types for many of the binary gimple. */
4238 if (POINTER_TYPE_P (lhs_type)
4239 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4241 error ("invalid types for %qs", code_name);
4242 debug_generic_expr (lhs_type);
4243 debug_generic_expr (rhs1_type);
4244 debug_generic_expr (rhs2_type);
4245 return true;
4247 /* Continue with generic binary expression handling. */
4248 break;
4250 case MIN_EXPR:
4251 case MAX_EXPR:
4252 /* Continue with generic binary expression handling. */
4253 break;
4255 case BIT_AND_EXPR:
4256 if (POINTER_TYPE_P (lhs_type)
4257 && TREE_CODE (rhs2) == INTEGER_CST)
4258 break;
4259 /* Disallow pointer and offset types for many of the binary gimple. */
4260 if (POINTER_TYPE_P (lhs_type)
4261 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4263 error ("invalid types for %qs", code_name);
4264 debug_generic_expr (lhs_type);
4265 debug_generic_expr (rhs1_type);
4266 debug_generic_expr (rhs2_type);
4267 return true;
4269 /* Continue with generic binary expression handling. */
4270 break;
4272 case VEC_SERIES_EXPR:
4273 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4275 error ("type mismatch in %qs", code_name);
4276 debug_generic_expr (rhs1_type);
4277 debug_generic_expr (rhs2_type);
4278 return true;
4280 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4281 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4283 error ("vector type expected in %qs", code_name);
4284 debug_generic_expr (lhs_type);
4285 return true;
4287 return false;
4289 default:
4290 gcc_unreachable ();
4293 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4294 || !useless_type_conversion_p (lhs_type, rhs2_type))
4296 error ("type mismatch in binary expression");
4297 debug_generic_stmt (lhs_type);
4298 debug_generic_stmt (rhs1_type);
4299 debug_generic_stmt (rhs2_type);
4300 return true;
4303 return false;
4306 /* Verify a gimple assignment statement STMT with a ternary rhs.
4307 Returns true if anything is wrong. */
4309 static bool
4310 verify_gimple_assign_ternary (gassign *stmt)
4312 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4313 tree lhs = gimple_assign_lhs (stmt);
4314 tree lhs_type = TREE_TYPE (lhs);
4315 tree rhs1 = gimple_assign_rhs1 (stmt);
4316 tree rhs1_type = TREE_TYPE (rhs1);
4317 tree rhs2 = gimple_assign_rhs2 (stmt);
4318 tree rhs2_type = TREE_TYPE (rhs2);
4319 tree rhs3 = gimple_assign_rhs3 (stmt);
4320 tree rhs3_type = TREE_TYPE (rhs3);
4322 if (!is_gimple_reg (lhs))
4324 error ("non-register as LHS of ternary operation");
4325 return true;
4328 if (!is_gimple_val (rhs1)
4329 || !is_gimple_val (rhs2)
4330 || !is_gimple_val (rhs3))
4332 error ("invalid operands in ternary operation");
4333 return true;
4336 const char* const code_name = get_tree_code_name (rhs_code);
4338 /* First handle operations that involve different types. */
4339 switch (rhs_code)
4341 case WIDEN_MULT_PLUS_EXPR:
4342 case WIDEN_MULT_MINUS_EXPR:
4343 if ((!INTEGRAL_TYPE_P (rhs1_type)
4344 && !FIXED_POINT_TYPE_P (rhs1_type))
4345 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4346 || !useless_type_conversion_p (lhs_type, rhs3_type)
4347 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4348 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4350 error ("type mismatch in %qs", code_name);
4351 debug_generic_expr (lhs_type);
4352 debug_generic_expr (rhs1_type);
4353 debug_generic_expr (rhs2_type);
4354 debug_generic_expr (rhs3_type);
4355 return true;
4357 break;
4359 case VEC_COND_EXPR:
4360 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4361 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4362 TYPE_VECTOR_SUBPARTS (lhs_type)))
4364 error ("the first argument of a %qs must be of a "
4365 "boolean vector type of the same number of elements "
4366 "as the result", code_name);
4367 debug_generic_expr (lhs_type);
4368 debug_generic_expr (rhs1_type);
4369 return true;
4371 /* Fallthrough. */
4372 case COND_EXPR:
4373 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4374 || !useless_type_conversion_p (lhs_type, rhs3_type))
4376 error ("type mismatch in %qs", code_name);
4377 debug_generic_expr (lhs_type);
4378 debug_generic_expr (rhs2_type);
4379 debug_generic_expr (rhs3_type);
4380 return true;
4382 break;
4384 case VEC_PERM_EXPR:
4385 /* If permute is constant, then we allow for lhs and rhs
4386 to have different vector types, provided:
4387 (1) lhs, rhs1, rhs2 have same element type.
4388 (2) rhs3 vector is constant and has integer element type.
4389 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4391 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4392 || TREE_CODE (rhs1_type) != VECTOR_TYPE
4393 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4394 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4396 error ("vector types expected in %qs", code_name);
4397 debug_generic_expr (lhs_type);
4398 debug_generic_expr (rhs1_type);
4399 debug_generic_expr (rhs2_type);
4400 debug_generic_expr (rhs3_type);
4401 return true;
4404 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4405 as long as lhs, rhs1 and rhs2 have same element type. */
4406 if (TREE_CONSTANT (rhs3)
4407 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs1_type))
4408 || !useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs2_type)))
4409 : (!useless_type_conversion_p (lhs_type, rhs1_type)
4410 || !useless_type_conversion_p (lhs_type, rhs2_type)))
4412 error ("type mismatch in %qs", code_name);
4413 debug_generic_expr (lhs_type);
4414 debug_generic_expr (rhs1_type);
4415 debug_generic_expr (rhs2_type);
4416 debug_generic_expr (rhs3_type);
4417 return true;
4420 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4421 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4422 TYPE_VECTOR_SUBPARTS (rhs2_type))
4423 || (!TREE_CONSTANT(rhs3)
4424 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4425 TYPE_VECTOR_SUBPARTS (rhs3_type)))
4426 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4427 TYPE_VECTOR_SUBPARTS (lhs_type)))
4429 error ("vectors with different element number found in %qs",
4430 code_name);
4431 debug_generic_expr (lhs_type);
4432 debug_generic_expr (rhs1_type);
4433 debug_generic_expr (rhs2_type);
4434 debug_generic_expr (rhs3_type);
4435 return true;
4438 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4439 || (TREE_CODE (rhs3) != VECTOR_CST
4440 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4441 (TREE_TYPE (rhs3_type)))
4442 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4443 (TREE_TYPE (rhs1_type))))))
4445 error ("invalid mask type in %qs", code_name);
4446 debug_generic_expr (lhs_type);
4447 debug_generic_expr (rhs1_type);
4448 debug_generic_expr (rhs2_type);
4449 debug_generic_expr (rhs3_type);
4450 return true;
4453 return false;
4455 case SAD_EXPR:
4456 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4457 || !useless_type_conversion_p (lhs_type, rhs3_type)
4458 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4459 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4461 error ("type mismatch in %qs", code_name);
4462 debug_generic_expr (lhs_type);
4463 debug_generic_expr (rhs1_type);
4464 debug_generic_expr (rhs2_type);
4465 debug_generic_expr (rhs3_type);
4466 return true;
4469 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4470 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4471 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4473 error ("vector types expected in %qs", code_name);
4474 debug_generic_expr (lhs_type);
4475 debug_generic_expr (rhs1_type);
4476 debug_generic_expr (rhs2_type);
4477 debug_generic_expr (rhs3_type);
4478 return true;
4481 return false;
4483 case BIT_INSERT_EXPR:
4484 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4486 error ("type mismatch in %qs", code_name);
4487 debug_generic_expr (lhs_type);
4488 debug_generic_expr (rhs1_type);
4489 return true;
4491 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4492 && INTEGRAL_TYPE_P (rhs2_type))
4493 /* Vector element insert. */
4494 || (VECTOR_TYPE_P (rhs1_type)
4495 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4496 /* Aligned sub-vector insert. */
4497 || (VECTOR_TYPE_P (rhs1_type)
4498 && VECTOR_TYPE_P (rhs2_type)
4499 && types_compatible_p (TREE_TYPE (rhs1_type),
4500 TREE_TYPE (rhs2_type))
4501 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4502 TYPE_VECTOR_SUBPARTS (rhs2_type))
4503 && multiple_p (wi::to_poly_offset (rhs3),
4504 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4506 error ("not allowed type combination in %qs", code_name);
4507 debug_generic_expr (rhs1_type);
4508 debug_generic_expr (rhs2_type);
4509 return true;
4511 if (! tree_fits_uhwi_p (rhs3)
4512 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4513 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4515 error ("invalid position or size in %qs", code_name);
4516 return true;
4518 if (INTEGRAL_TYPE_P (rhs1_type)
4519 && !type_has_mode_precision_p (rhs1_type))
4521 error ("%qs into non-mode-precision operand", code_name);
4522 return true;
4524 if (INTEGRAL_TYPE_P (rhs1_type))
4526 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4527 if (bitpos >= TYPE_PRECISION (rhs1_type)
4528 || (bitpos + TYPE_PRECISION (rhs2_type)
4529 > TYPE_PRECISION (rhs1_type)))
4531 error ("insertion out of range in %qs", code_name);
4532 return true;
4535 else if (VECTOR_TYPE_P (rhs1_type))
4537 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4538 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4539 if (bitpos % bitsize != 0)
4541 error ("%qs not at element boundary", code_name);
4542 return true;
4545 return false;
4547 case DOT_PROD_EXPR:
4549 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4550 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4551 && ((!INTEGRAL_TYPE_P (rhs1_type)
4552 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4553 || (!INTEGRAL_TYPE_P (lhs_type)
4554 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4555 /* rhs1_type and rhs2_type may differ in sign. */
4556 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4557 || !useless_type_conversion_p (lhs_type, rhs3_type)
4558 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4559 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4561 error ("type mismatch in %qs", code_name);
4562 debug_generic_expr (lhs_type);
4563 debug_generic_expr (rhs1_type);
4564 debug_generic_expr (rhs2_type);
4565 return true;
4567 return false;
4570 case REALIGN_LOAD_EXPR:
4571 /* FIXME. */
4572 return false;
4574 default:
4575 gcc_unreachable ();
4577 return false;
4580 /* Verify a gimple assignment statement STMT with a single rhs.
4581 Returns true if anything is wrong. */
4583 static bool
4584 verify_gimple_assign_single (gassign *stmt)
4586 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4587 tree lhs = gimple_assign_lhs (stmt);
4588 tree lhs_type = TREE_TYPE (lhs);
4589 tree rhs1 = gimple_assign_rhs1 (stmt);
4590 tree rhs1_type = TREE_TYPE (rhs1);
4591 bool res = false;
4593 const char* const code_name = get_tree_code_name (rhs_code);
4595 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4597 error ("non-trivial conversion in %qs", code_name);
4598 debug_generic_expr (lhs_type);
4599 debug_generic_expr (rhs1_type);
4600 return true;
4603 if (gimple_clobber_p (stmt)
4604 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4606 error ("%qs LHS in clobber statement",
4607 get_tree_code_name (TREE_CODE (lhs)));
4608 debug_generic_expr (lhs);
4609 return true;
4612 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4614 error ("%qs LHS in assignment statement",
4615 get_tree_code_name (TREE_CODE (lhs)));
4616 debug_generic_expr (lhs);
4617 return true;
4620 if (handled_component_p (lhs)
4621 || TREE_CODE (lhs) == MEM_REF
4622 || TREE_CODE (lhs) == TARGET_MEM_REF)
4623 res |= verify_types_in_gimple_reference (lhs, true);
4625 /* Special codes we cannot handle via their class. */
4626 switch (rhs_code)
4628 case ADDR_EXPR:
4630 tree op = TREE_OPERAND (rhs1, 0);
4631 if (!is_gimple_addressable (op))
4633 error ("invalid operand in %qs", code_name);
4634 return true;
4637 /* Technically there is no longer a need for matching types, but
4638 gimple hygiene asks for this check. In LTO we can end up
4639 combining incompatible units and thus end up with addresses
4640 of globals that change their type to a common one. */
4641 if (!in_lto_p
4642 && !types_compatible_p (TREE_TYPE (op),
4643 TREE_TYPE (TREE_TYPE (rhs1)))
4644 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4645 TREE_TYPE (op)))
4647 error ("type mismatch in %qs", code_name);
4648 debug_generic_stmt (TREE_TYPE (rhs1));
4649 debug_generic_stmt (TREE_TYPE (op));
4650 return true;
4653 return (verify_address (rhs1, true)
4654 || verify_types_in_gimple_reference (op, true));
4657 /* tcc_reference */
4658 case INDIRECT_REF:
4659 error ("%qs in gimple IL", code_name);
4660 return true;
4662 case COMPONENT_REF:
4663 case BIT_FIELD_REF:
4664 case ARRAY_REF:
4665 case ARRAY_RANGE_REF:
4666 case VIEW_CONVERT_EXPR:
4667 case REALPART_EXPR:
4668 case IMAGPART_EXPR:
4669 case TARGET_MEM_REF:
4670 case MEM_REF:
4671 if (!is_gimple_reg (lhs)
4672 && is_gimple_reg_type (TREE_TYPE (lhs)))
4674 error ("invalid RHS for gimple memory store: %qs", code_name);
4675 debug_generic_stmt (lhs);
4676 debug_generic_stmt (rhs1);
4677 return true;
4679 return res || verify_types_in_gimple_reference (rhs1, false);
4681 /* tcc_constant */
4682 case SSA_NAME:
4683 case INTEGER_CST:
4684 case REAL_CST:
4685 case FIXED_CST:
4686 case COMPLEX_CST:
4687 case VECTOR_CST:
4688 case STRING_CST:
4689 return res;
4691 /* tcc_declaration */
4692 case CONST_DECL:
4693 return res;
4694 case VAR_DECL:
4695 case PARM_DECL:
4696 if (!is_gimple_reg (lhs)
4697 && !is_gimple_reg (rhs1)
4698 && is_gimple_reg_type (TREE_TYPE (lhs)))
4700 error ("invalid RHS for gimple memory store: %qs", code_name);
4701 debug_generic_stmt (lhs);
4702 debug_generic_stmt (rhs1);
4703 return true;
4705 return res;
4707 case CONSTRUCTOR:
4708 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4710 unsigned int i;
4711 tree elt_i, elt_v, elt_t = NULL_TREE;
4713 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4714 return res;
4715 /* For vector CONSTRUCTORs we require that either it is empty
4716 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4717 (then the element count must be correct to cover the whole
4718 outer vector and index must be NULL on all elements, or it is
4719 a CONSTRUCTOR of scalar elements, where we as an exception allow
4720 smaller number of elements (assuming zero filling) and
4721 consecutive indexes as compared to NULL indexes (such
4722 CONSTRUCTORs can appear in the IL from FEs). */
4723 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4725 if (elt_t == NULL_TREE)
4727 elt_t = TREE_TYPE (elt_v);
4728 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4730 tree elt_t = TREE_TYPE (elt_v);
4731 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4732 TREE_TYPE (elt_t)))
4734 error ("incorrect type of vector %qs elements",
4735 code_name);
4736 debug_generic_stmt (rhs1);
4737 return true;
4739 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4740 * TYPE_VECTOR_SUBPARTS (elt_t),
4741 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4743 error ("incorrect number of vector %qs elements",
4744 code_name);
4745 debug_generic_stmt (rhs1);
4746 return true;
4749 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4750 elt_t))
4752 error ("incorrect type of vector %qs elements",
4753 code_name);
4754 debug_generic_stmt (rhs1);
4755 return true;
4757 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4758 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4760 error ("incorrect number of vector %qs elements",
4761 code_name);
4762 debug_generic_stmt (rhs1);
4763 return true;
4766 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4768 error ("incorrect type of vector CONSTRUCTOR elements");
4769 debug_generic_stmt (rhs1);
4770 return true;
4772 if (elt_i != NULL_TREE
4773 && (TREE_CODE (elt_t) == VECTOR_TYPE
4774 || TREE_CODE (elt_i) != INTEGER_CST
4775 || compare_tree_int (elt_i, i) != 0))
4777 error ("vector %qs with non-NULL element index",
4778 code_name);
4779 debug_generic_stmt (rhs1);
4780 return true;
4782 if (!is_gimple_val (elt_v))
4784 error ("vector %qs element is not a GIMPLE value",
4785 code_name);
4786 debug_generic_stmt (rhs1);
4787 return true;
4791 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4793 error ("non-vector %qs with elements", code_name);
4794 debug_generic_stmt (rhs1);
4795 return true;
4797 return res;
4799 case WITH_SIZE_EXPR:
4800 error ("%qs RHS in assignment statement",
4801 get_tree_code_name (rhs_code));
4802 debug_generic_expr (rhs1);
4803 return true;
4805 case OBJ_TYPE_REF:
4806 /* FIXME. */
4807 return res;
4809 default:;
4812 return res;
4815 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4816 is a problem, otherwise false. */
4818 static bool
4819 verify_gimple_assign (gassign *stmt)
4821 switch (gimple_assign_rhs_class (stmt))
4823 case GIMPLE_SINGLE_RHS:
4824 return verify_gimple_assign_single (stmt);
4826 case GIMPLE_UNARY_RHS:
4827 return verify_gimple_assign_unary (stmt);
4829 case GIMPLE_BINARY_RHS:
4830 return verify_gimple_assign_binary (stmt);
4832 case GIMPLE_TERNARY_RHS:
4833 return verify_gimple_assign_ternary (stmt);
4835 default:
4836 gcc_unreachable ();
4840 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4841 is a problem, otherwise false. */
4843 static bool
4844 verify_gimple_return (greturn *stmt)
4846 tree op = gimple_return_retval (stmt);
4847 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4849 /* We cannot test for present return values as we do not fix up missing
4850 return values from the original source. */
4851 if (op == NULL)
4852 return false;
4854 if (!is_gimple_val (op)
4855 && TREE_CODE (op) != RESULT_DECL)
4857 error ("invalid operand in return statement");
4858 debug_generic_stmt (op);
4859 return true;
4862 if ((TREE_CODE (op) == RESULT_DECL
4863 && DECL_BY_REFERENCE (op))
4864 || (TREE_CODE (op) == SSA_NAME
4865 && SSA_NAME_VAR (op)
4866 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4867 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4868 op = TREE_TYPE (op);
4870 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4872 error ("invalid conversion in return statement");
4873 debug_generic_stmt (restype);
4874 debug_generic_stmt (TREE_TYPE (op));
4875 return true;
4878 return false;
4882 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4883 is a problem, otherwise false. */
4885 static bool
4886 verify_gimple_goto (ggoto *stmt)
4888 tree dest = gimple_goto_dest (stmt);
4890 /* ??? We have two canonical forms of direct goto destinations, a
4891 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4892 if (TREE_CODE (dest) != LABEL_DECL
4893 && (!is_gimple_val (dest)
4894 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4896 error ("goto destination is neither a label nor a pointer");
4897 return true;
4900 return false;
4903 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4904 is a problem, otherwise false. */
4906 static bool
4907 verify_gimple_switch (gswitch *stmt)
4909 unsigned int i, n;
4910 tree elt, prev_upper_bound = NULL_TREE;
4911 tree index_type, elt_type = NULL_TREE;
4913 if (!is_gimple_val (gimple_switch_index (stmt)))
4915 error ("invalid operand to switch statement");
4916 debug_generic_stmt (gimple_switch_index (stmt));
4917 return true;
4920 index_type = TREE_TYPE (gimple_switch_index (stmt));
4921 if (! INTEGRAL_TYPE_P (index_type))
4923 error ("non-integral type switch statement");
4924 debug_generic_expr (index_type);
4925 return true;
4928 elt = gimple_switch_label (stmt, 0);
4929 if (CASE_LOW (elt) != NULL_TREE
4930 || CASE_HIGH (elt) != NULL_TREE
4931 || CASE_CHAIN (elt) != NULL_TREE)
4933 error ("invalid default case label in switch statement");
4934 debug_generic_expr (elt);
4935 return true;
4938 n = gimple_switch_num_labels (stmt);
4939 for (i = 1; i < n; i++)
4941 elt = gimple_switch_label (stmt, i);
4943 if (CASE_CHAIN (elt))
4945 error ("invalid %<CASE_CHAIN%>");
4946 debug_generic_expr (elt);
4947 return true;
4949 if (! CASE_LOW (elt))
4951 error ("invalid case label in switch statement");
4952 debug_generic_expr (elt);
4953 return true;
4955 if (CASE_HIGH (elt)
4956 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4958 error ("invalid case range in switch statement");
4959 debug_generic_expr (elt);
4960 return true;
4963 if (! elt_type)
4965 elt_type = TREE_TYPE (CASE_LOW (elt));
4966 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4968 error ("type precision mismatch in switch statement");
4969 return true;
4972 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4973 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4975 error ("type mismatch for case label in switch statement");
4976 debug_generic_expr (elt);
4977 return true;
4980 if (prev_upper_bound)
4982 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4984 error ("case labels not sorted in switch statement");
4985 return true;
4989 prev_upper_bound = CASE_HIGH (elt);
4990 if (! prev_upper_bound)
4991 prev_upper_bound = CASE_LOW (elt);
4994 return false;
4997 /* Verify a gimple debug statement STMT.
4998 Returns true if anything is wrong. */
5000 static bool
5001 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5003 /* There isn't much that could be wrong in a gimple debug stmt. A
5004 gimple debug bind stmt, for example, maps a tree, that's usually
5005 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5006 component or member of an aggregate type, to another tree, that
5007 can be an arbitrary expression. These stmts expand into debug
5008 insns, and are converted to debug notes by var-tracking.cc. */
5009 return false;
5012 /* Verify a gimple label statement STMT.
5013 Returns true if anything is wrong. */
5015 static bool
5016 verify_gimple_label (glabel *stmt)
5018 tree decl = gimple_label_label (stmt);
5019 int uid;
5020 bool err = false;
5022 if (TREE_CODE (decl) != LABEL_DECL)
5023 return true;
5024 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5025 && DECL_CONTEXT (decl) != current_function_decl)
5027 error ("label context is not the current function declaration");
5028 err |= true;
5031 uid = LABEL_DECL_UID (decl);
5032 if (cfun->cfg
5033 && (uid == -1
5034 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5036 error ("incorrect entry in %<label_to_block_map%>");
5037 err |= true;
5040 uid = EH_LANDING_PAD_NR (decl);
5041 if (uid)
5043 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5044 if (decl != lp->post_landing_pad)
5046 error ("incorrect setting of landing pad number");
5047 err |= true;
5051 return err;
5054 /* Verify a gimple cond statement STMT.
5055 Returns true if anything is wrong. */
5057 static bool
5058 verify_gimple_cond (gcond *stmt)
5060 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5062 error ("invalid comparison code in gimple cond");
5063 return true;
5065 if (!(!gimple_cond_true_label (stmt)
5066 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5067 || !(!gimple_cond_false_label (stmt)
5068 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5070 error ("invalid labels in gimple cond");
5071 return true;
5074 return verify_gimple_comparison (boolean_type_node,
5075 gimple_cond_lhs (stmt),
5076 gimple_cond_rhs (stmt),
5077 gimple_cond_code (stmt));
5080 /* Verify the GIMPLE statement STMT. Returns true if there is an
5081 error, otherwise false. */
5083 static bool
5084 verify_gimple_stmt (gimple *stmt)
5086 switch (gimple_code (stmt))
5088 case GIMPLE_ASSIGN:
5089 return verify_gimple_assign (as_a <gassign *> (stmt));
5091 case GIMPLE_LABEL:
5092 return verify_gimple_label (as_a <glabel *> (stmt));
5094 case GIMPLE_CALL:
5095 return verify_gimple_call (as_a <gcall *> (stmt));
5097 case GIMPLE_COND:
5098 return verify_gimple_cond (as_a <gcond *> (stmt));
5100 case GIMPLE_GOTO:
5101 return verify_gimple_goto (as_a <ggoto *> (stmt));
5103 case GIMPLE_SWITCH:
5104 return verify_gimple_switch (as_a <gswitch *> (stmt));
5106 case GIMPLE_RETURN:
5107 return verify_gimple_return (as_a <greturn *> (stmt));
5109 case GIMPLE_ASM:
5110 return false;
5112 case GIMPLE_TRANSACTION:
5113 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5115 /* Tuples that do not have tree operands. */
5116 case GIMPLE_NOP:
5117 case GIMPLE_PREDICT:
5118 case GIMPLE_RESX:
5119 case GIMPLE_EH_DISPATCH:
5120 case GIMPLE_EH_MUST_NOT_THROW:
5121 return false;
5123 CASE_GIMPLE_OMP:
5124 /* OpenMP directives are validated by the FE and never operated
5125 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5126 non-gimple expressions when the main index variable has had
5127 its address taken. This does not affect the loop itself
5128 because the header of an GIMPLE_OMP_FOR is merely used to determine
5129 how to setup the parallel iteration. */
5130 return false;
5132 case GIMPLE_ASSUME:
5133 return false;
5135 case GIMPLE_DEBUG:
5136 return verify_gimple_debug (stmt);
5138 default:
5139 gcc_unreachable ();
5143 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5144 and false otherwise. */
5146 static bool
5147 verify_gimple_phi (gphi *phi)
5149 bool err = false;
5150 unsigned i;
5151 tree phi_result = gimple_phi_result (phi);
5152 bool virtual_p;
5154 if (!phi_result)
5156 error ("invalid %<PHI%> result");
5157 return true;
5160 virtual_p = virtual_operand_p (phi_result);
5161 if (TREE_CODE (phi_result) != SSA_NAME
5162 || (virtual_p
5163 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5165 error ("invalid %<PHI%> result");
5166 err = true;
5169 for (i = 0; i < gimple_phi_num_args (phi); i++)
5171 tree t = gimple_phi_arg_def (phi, i);
5173 if (!t)
5175 error ("missing %<PHI%> def");
5176 err |= true;
5177 continue;
5179 /* Addressable variables do have SSA_NAMEs but they
5180 are not considered gimple values. */
5181 else if ((TREE_CODE (t) == SSA_NAME
5182 && virtual_p != virtual_operand_p (t))
5183 || (virtual_p
5184 && (TREE_CODE (t) != SSA_NAME
5185 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5186 || (!virtual_p
5187 && !is_gimple_val (t)))
5189 error ("invalid %<PHI%> argument");
5190 debug_generic_expr (t);
5191 err |= true;
5193 #ifdef ENABLE_TYPES_CHECKING
5194 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5196 error ("incompatible types in %<PHI%> argument %u", i);
5197 debug_generic_stmt (TREE_TYPE (phi_result));
5198 debug_generic_stmt (TREE_TYPE (t));
5199 err |= true;
5201 #endif
5204 return err;
5207 /* Verify the GIMPLE statements inside the sequence STMTS. */
5209 static bool
5210 verify_gimple_in_seq_2 (gimple_seq stmts)
5212 gimple_stmt_iterator ittr;
5213 bool err = false;
5215 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5217 gimple *stmt = gsi_stmt (ittr);
5219 switch (gimple_code (stmt))
5221 case GIMPLE_BIND:
5222 err |= verify_gimple_in_seq_2 (
5223 gimple_bind_body (as_a <gbind *> (stmt)));
5224 break;
5226 case GIMPLE_TRY:
5227 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5228 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5229 break;
5231 case GIMPLE_EH_FILTER:
5232 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5233 break;
5235 case GIMPLE_EH_ELSE:
5237 geh_else *eh_else = as_a <geh_else *> (stmt);
5238 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5239 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5241 break;
5243 case GIMPLE_CATCH:
5244 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5245 as_a <gcatch *> (stmt)));
5246 break;
5248 case GIMPLE_ASSUME:
5249 err |= verify_gimple_in_seq_2 (gimple_assume_body (stmt));
5250 break;
5252 case GIMPLE_TRANSACTION:
5253 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5254 break;
5256 default:
5258 bool err2 = verify_gimple_stmt (stmt);
5259 if (err2)
5260 debug_gimple_stmt (stmt);
5261 err |= err2;
5266 return err;
5269 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5270 is a problem, otherwise false. */
5272 static bool
5273 verify_gimple_transaction (gtransaction *stmt)
5275 tree lab;
5277 lab = gimple_transaction_label_norm (stmt);
5278 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5279 return true;
5280 lab = gimple_transaction_label_uninst (stmt);
5281 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5282 return true;
5283 lab = gimple_transaction_label_over (stmt);
5284 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5285 return true;
5287 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5291 /* Verify the GIMPLE statements inside the statement list STMTS. */
5293 DEBUG_FUNCTION bool
5294 verify_gimple_in_seq (gimple_seq stmts, bool ice)
5296 timevar_push (TV_TREE_STMT_VERIFY);
5297 bool res = verify_gimple_in_seq_2 (stmts);
5298 if (res && ice)
5299 internal_error ("%<verify_gimple%> failed");
5300 timevar_pop (TV_TREE_STMT_VERIFY);
5301 return res;
5304 /* Return true when the T can be shared. */
5306 static bool
5307 tree_node_can_be_shared (tree t)
5309 if (IS_TYPE_OR_DECL_P (t)
5310 || TREE_CODE (t) == SSA_NAME
5311 || TREE_CODE (t) == IDENTIFIER_NODE
5312 || TREE_CODE (t) == CASE_LABEL_EXPR
5313 || is_gimple_min_invariant (t))
5314 return true;
5316 if (t == error_mark_node)
5317 return true;
5319 return false;
5322 /* Called via walk_tree. Verify tree sharing. */
5324 static tree
5325 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5327 hash_set<void *> *visited = (hash_set<void *> *) data;
5329 if (tree_node_can_be_shared (*tp))
5331 *walk_subtrees = false;
5332 return NULL;
5335 if (visited->add (*tp))
5336 return *tp;
5338 return NULL;
5341 /* Called via walk_gimple_stmt. Verify tree sharing. */
5343 static tree
5344 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5346 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5347 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5350 static bool eh_error_found;
5351 bool
5352 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5353 hash_set<gimple *> *visited)
5355 if (!visited->contains (stmt))
5357 error ("dead statement in EH table");
5358 debug_gimple_stmt (stmt);
5359 eh_error_found = true;
5361 return true;
5364 /* Verify if the location LOCs block is in BLOCKS. */
5366 static bool
5367 verify_location (hash_set<tree> *blocks, location_t loc)
5369 tree block = LOCATION_BLOCK (loc);
5370 if (block != NULL_TREE
5371 && !blocks->contains (block))
5373 error ("location references block not in block tree");
5374 return true;
5376 if (block != NULL_TREE)
5377 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5378 return false;
5381 /* Called via walk_tree. Verify that expressions have no blocks. */
5383 static tree
5384 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5386 if (!EXPR_P (*tp))
5388 *walk_subtrees = false;
5389 return NULL;
5392 location_t loc = EXPR_LOCATION (*tp);
5393 if (LOCATION_BLOCK (loc) != NULL)
5394 return *tp;
5396 return NULL;
5399 /* Called via walk_tree. Verify locations of expressions. */
5401 static tree
5402 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5404 hash_set<tree> *blocks = (hash_set<tree> *) data;
5405 tree t = *tp;
5407 /* ??? This doesn't really belong here but there's no good place to
5408 stick this remainder of old verify_expr. */
5409 /* ??? This barfs on debug stmts which contain binds to vars with
5410 different function context. */
5411 #if 0
5412 if (VAR_P (t)
5413 || TREE_CODE (t) == PARM_DECL
5414 || TREE_CODE (t) == RESULT_DECL)
5416 tree context = decl_function_context (t);
5417 if (context != cfun->decl
5418 && !SCOPE_FILE_SCOPE_P (context)
5419 && !TREE_STATIC (t)
5420 && !DECL_EXTERNAL (t))
5422 error ("local declaration from a different function");
5423 return t;
5426 #endif
5428 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5430 tree x = DECL_DEBUG_EXPR (t);
5431 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5432 if (addr)
5433 return addr;
5435 if ((VAR_P (t)
5436 || TREE_CODE (t) == PARM_DECL
5437 || TREE_CODE (t) == RESULT_DECL)
5438 && DECL_HAS_VALUE_EXPR_P (t))
5440 tree x = DECL_VALUE_EXPR (t);
5441 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5442 if (addr)
5443 return addr;
5446 if (!EXPR_P (t))
5448 *walk_subtrees = false;
5449 return NULL;
5452 location_t loc = EXPR_LOCATION (t);
5453 if (verify_location (blocks, loc))
5454 return t;
5456 return NULL;
5459 /* Called via walk_gimple_op. Verify locations of expressions. */
5461 static tree
5462 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5464 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5465 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5468 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5470 static void
5471 collect_subblocks (hash_set<tree> *blocks, tree block)
5473 tree t;
5474 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5476 blocks->add (t);
5477 collect_subblocks (blocks, t);
5481 /* Disable warnings about missing quoting in GCC diagnostics for
5482 the verification errors. Their format strings don't follow
5483 GCC diagnostic conventions and trigger an ICE in the end. */
5484 #if __GNUC__ >= 10
5485 # pragma GCC diagnostic push
5486 # pragma GCC diagnostic ignored "-Wformat-diag"
5487 #endif
5489 /* Verify the GIMPLE statements in the CFG of FN. */
5491 DEBUG_FUNCTION bool
5492 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow, bool ice)
5494 basic_block bb;
5495 bool err = false;
5497 timevar_push (TV_TREE_STMT_VERIFY);
5498 hash_set<void *> visited;
5499 hash_set<gimple *> visited_throwing_stmts;
5501 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5502 hash_set<tree> blocks;
5503 if (DECL_INITIAL (fn->decl))
5505 blocks.add (DECL_INITIAL (fn->decl));
5506 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5509 FOR_EACH_BB_FN (bb, fn)
5511 gimple_stmt_iterator gsi;
5512 edge_iterator ei;
5513 edge e;
5515 for (gphi_iterator gpi = gsi_start_phis (bb);
5516 !gsi_end_p (gpi);
5517 gsi_next (&gpi))
5519 gphi *phi = gpi.phi ();
5520 bool err2 = false;
5521 unsigned i;
5523 if (gimple_bb (phi) != bb)
5525 error ("gimple_bb (phi) is set to a wrong basic block");
5526 err2 = true;
5529 err2 |= verify_gimple_phi (phi);
5531 /* Only PHI arguments have locations. */
5532 if (gimple_location (phi) != UNKNOWN_LOCATION)
5534 error ("PHI node with location");
5535 err2 = true;
5538 for (i = 0; i < gimple_phi_num_args (phi); i++)
5540 tree arg = gimple_phi_arg_def (phi, i);
5541 tree addr = walk_tree (&arg, verify_node_sharing_1,
5542 &visited, NULL);
5543 if (addr)
5545 error ("incorrect sharing of tree nodes");
5546 debug_generic_expr (addr);
5547 err2 |= true;
5549 location_t loc = gimple_phi_arg_location (phi, i);
5550 if (virtual_operand_p (gimple_phi_result (phi))
5551 && loc != UNKNOWN_LOCATION)
5553 error ("virtual PHI with argument locations");
5554 err2 = true;
5556 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5557 if (addr)
5559 debug_generic_expr (addr);
5560 err2 = true;
5562 err2 |= verify_location (&blocks, loc);
5565 if (err2)
5566 debug_gimple_stmt (phi);
5567 err |= err2;
5570 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5572 gimple *stmt = gsi_stmt (gsi);
5573 bool err2 = false;
5574 struct walk_stmt_info wi;
5575 tree addr;
5576 int lp_nr;
5578 if (gimple_bb (stmt) != bb)
5580 error ("gimple_bb (stmt) is set to a wrong basic block");
5581 err2 = true;
5584 err2 |= verify_gimple_stmt (stmt);
5585 err2 |= verify_location (&blocks, gimple_location (stmt));
5587 memset (&wi, 0, sizeof (wi));
5588 wi.info = (void *) &visited;
5589 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5590 if (addr)
5592 error ("incorrect sharing of tree nodes");
5593 debug_generic_expr (addr);
5594 err2 |= true;
5597 memset (&wi, 0, sizeof (wi));
5598 wi.info = (void *) &blocks;
5599 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5600 if (addr)
5602 debug_generic_expr (addr);
5603 err2 |= true;
5606 /* If the statement is marked as part of an EH region, then it is
5607 expected that the statement could throw. Verify that when we
5608 have optimizations that simplify statements such that we prove
5609 that they cannot throw, that we update other data structures
5610 to match. */
5611 lp_nr = lookup_stmt_eh_lp (stmt);
5612 if (lp_nr != 0)
5613 visited_throwing_stmts.add (stmt);
5614 if (lp_nr > 0)
5616 if (!stmt_could_throw_p (cfun, stmt))
5618 if (verify_nothrow)
5620 error ("statement marked for throw, but doesn%'t");
5621 err2 |= true;
5624 else if (!gsi_one_before_end_p (gsi))
5626 error ("statement marked for throw in middle of block");
5627 err2 |= true;
5631 if (err2)
5632 debug_gimple_stmt (stmt);
5633 err |= err2;
5636 FOR_EACH_EDGE (e, ei, bb->succs)
5637 if (e->goto_locus != UNKNOWN_LOCATION)
5638 err |= verify_location (&blocks, e->goto_locus);
5641 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5642 eh_error_found = false;
5643 if (eh_table)
5644 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5645 (&visited_throwing_stmts);
5647 if (ice && (err || eh_error_found))
5648 internal_error ("verify_gimple failed");
5650 verify_histograms ();
5651 timevar_pop (TV_TREE_STMT_VERIFY);
5653 return (err || eh_error_found);
5657 /* Verifies that the flow information is OK. */
5659 static int
5660 gimple_verify_flow_info (void)
5662 int err = 0;
5663 basic_block bb;
5664 gimple_stmt_iterator gsi;
5665 gimple *stmt;
5666 edge e;
5667 edge_iterator ei;
5669 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5670 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5672 error ("ENTRY_BLOCK has IL associated with it");
5673 err = 1;
5676 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5677 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5679 error ("EXIT_BLOCK has IL associated with it");
5680 err = 1;
5683 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5684 if (e->flags & EDGE_FALLTHRU)
5686 error ("fallthru to exit from bb %d", e->src->index);
5687 err = 1;
5690 FOR_EACH_BB_FN (bb, cfun)
5692 bool found_ctrl_stmt = false;
5694 stmt = NULL;
5696 /* Skip labels on the start of basic block. */
5697 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5699 tree label;
5700 gimple *prev_stmt = stmt;
5702 stmt = gsi_stmt (gsi);
5704 if (gimple_code (stmt) != GIMPLE_LABEL)
5705 break;
5707 label = gimple_label_label (as_a <glabel *> (stmt));
5708 if (prev_stmt && DECL_NONLOCAL (label))
5710 error ("nonlocal label %qD is not first in a sequence "
5711 "of labels in bb %d", label, bb->index);
5712 err = 1;
5715 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5717 error ("EH landing pad label %qD is not first in a sequence "
5718 "of labels in bb %d", label, bb->index);
5719 err = 1;
5722 if (label_to_block (cfun, label) != bb)
5724 error ("label %qD to block does not match in bb %d",
5725 label, bb->index);
5726 err = 1;
5729 if (decl_function_context (label) != current_function_decl)
5731 error ("label %qD has incorrect context in bb %d",
5732 label, bb->index);
5733 err = 1;
5737 /* Verify that body of basic block BB is free of control flow. */
5738 bool seen_nondebug_stmt = false;
5739 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5741 gimple *stmt = gsi_stmt (gsi);
5743 if (found_ctrl_stmt)
5745 error ("control flow in the middle of basic block %d",
5746 bb->index);
5747 err = 1;
5750 if (stmt_ends_bb_p (stmt))
5751 found_ctrl_stmt = true;
5753 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5755 error ("label %qD in the middle of basic block %d",
5756 gimple_label_label (label_stmt), bb->index);
5757 err = 1;
5760 /* Check that no statements appear between a returns_twice call
5761 and its associated abnormal edge. */
5762 if (gimple_code (stmt) == GIMPLE_CALL
5763 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
5765 const char *misplaced = NULL;
5766 /* TM is an exception: it points abnormal edges just after the
5767 call that starts a transaction, i.e. it must end the BB. */
5768 if (gimple_call_builtin_p (stmt, BUILT_IN_TM_START))
5770 if (single_succ_p (bb)
5771 && bb_has_abnormal_pred (single_succ (bb))
5772 && !gsi_one_nondebug_before_end_p (gsi))
5773 misplaced = "not last";
5775 else
5777 if (seen_nondebug_stmt
5778 && bb_has_abnormal_pred (bb))
5779 misplaced = "not first";
5781 if (misplaced)
5783 error ("returns_twice call is %s in basic block %d",
5784 misplaced, bb->index);
5785 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
5786 err = 1;
5789 if (!is_gimple_debug (stmt))
5790 seen_nondebug_stmt = true;
5793 gsi = gsi_last_nondebug_bb (bb);
5794 if (gsi_end_p (gsi))
5795 continue;
5797 stmt = gsi_stmt (gsi);
5799 if (gimple_code (stmt) == GIMPLE_LABEL)
5800 continue;
5802 err |= verify_eh_edges (stmt);
5804 if (is_ctrl_stmt (stmt))
5806 FOR_EACH_EDGE (e, ei, bb->succs)
5807 if (e->flags & EDGE_FALLTHRU)
5809 error ("fallthru edge after a control statement in bb %d",
5810 bb->index);
5811 err = 1;
5815 if (gimple_code (stmt) != GIMPLE_COND)
5817 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5818 after anything else but if statement. */
5819 FOR_EACH_EDGE (e, ei, bb->succs)
5820 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5822 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5823 bb->index);
5824 err = 1;
5828 switch (gimple_code (stmt))
5830 case GIMPLE_COND:
5832 edge true_edge;
5833 edge false_edge;
5835 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5837 if (!true_edge
5838 || !false_edge
5839 || !(true_edge->flags & EDGE_TRUE_VALUE)
5840 || !(false_edge->flags & EDGE_FALSE_VALUE)
5841 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5842 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5843 || EDGE_COUNT (bb->succs) >= 3)
5845 error ("wrong outgoing edge flags at end of bb %d",
5846 bb->index);
5847 err = 1;
5850 break;
5852 case GIMPLE_GOTO:
5853 if (simple_goto_p (stmt))
5855 error ("explicit goto at end of bb %d", bb->index);
5856 err = 1;
5858 else
5860 /* FIXME. We should double check that the labels in the
5861 destination blocks have their address taken. */
5862 FOR_EACH_EDGE (e, ei, bb->succs)
5863 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5864 | EDGE_FALSE_VALUE))
5865 || !(e->flags & EDGE_ABNORMAL))
5867 error ("wrong outgoing edge flags at end of bb %d",
5868 bb->index);
5869 err = 1;
5872 break;
5874 case GIMPLE_CALL:
5875 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5876 break;
5877 /* fallthru */
5878 case GIMPLE_RETURN:
5879 if (!single_succ_p (bb)
5880 || (single_succ_edge (bb)->flags
5881 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5882 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5884 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5885 err = 1;
5887 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5889 error ("return edge does not point to exit in bb %d",
5890 bb->index);
5891 err = 1;
5893 break;
5895 case GIMPLE_SWITCH:
5897 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5898 tree prev;
5899 edge e;
5900 size_t i, n;
5902 n = gimple_switch_num_labels (switch_stmt);
5904 /* Mark all the destination basic blocks. */
5905 for (i = 0; i < n; ++i)
5907 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5908 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5909 label_bb->aux = (void *)1;
5912 /* Verify that the case labels are sorted. */
5913 prev = gimple_switch_label (switch_stmt, 0);
5914 for (i = 1; i < n; ++i)
5916 tree c = gimple_switch_label (switch_stmt, i);
5917 if (!CASE_LOW (c))
5919 error ("found default case not at the start of "
5920 "case vector");
5921 err = 1;
5922 continue;
5924 if (CASE_LOW (prev)
5925 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5927 error ("case labels not sorted: ");
5928 print_generic_expr (stderr, prev);
5929 fprintf (stderr," is greater than ");
5930 print_generic_expr (stderr, c);
5931 fprintf (stderr," but comes before it.\n");
5932 err = 1;
5934 prev = c;
5936 /* VRP will remove the default case if it can prove it will
5937 never be executed. So do not verify there always exists
5938 a default case here. */
5940 FOR_EACH_EDGE (e, ei, bb->succs)
5942 if (!e->dest->aux)
5944 error ("extra outgoing edge %d->%d",
5945 bb->index, e->dest->index);
5946 err = 1;
5949 e->dest->aux = (void *)2;
5950 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5951 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5953 error ("wrong outgoing edge flags at end of bb %d",
5954 bb->index);
5955 err = 1;
5959 /* Check that we have all of them. */
5960 for (i = 0; i < n; ++i)
5962 basic_block label_bb = gimple_switch_label_bb (cfun,
5963 switch_stmt, i);
5965 if (label_bb->aux != (void *)2)
5967 error ("missing edge %i->%i", bb->index, label_bb->index);
5968 err = 1;
5972 FOR_EACH_EDGE (e, ei, bb->succs)
5973 e->dest->aux = (void *)0;
5975 break;
5977 case GIMPLE_EH_DISPATCH:
5978 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5979 break;
5981 default:
5982 break;
5986 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5987 verify_dominators (CDI_DOMINATORS);
5989 return err;
5992 #if __GNUC__ >= 10
5993 # pragma GCC diagnostic pop
5994 #endif
5996 /* Updates phi nodes after creating a forwarder block joined
5997 by edge FALLTHRU. */
5999 static void
6000 gimple_make_forwarder_block (edge fallthru)
6002 edge e;
6003 edge_iterator ei;
6004 basic_block dummy, bb;
6005 tree var;
6006 gphi_iterator gsi;
6007 bool forward_location_p;
6009 dummy = fallthru->src;
6010 bb = fallthru->dest;
6012 if (single_pred_p (bb))
6013 return;
6015 /* We can forward location info if we have only one predecessor. */
6016 forward_location_p = single_pred_p (dummy);
6018 /* If we redirected a branch we must create new PHI nodes at the
6019 start of BB. */
6020 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6022 gphi *phi, *new_phi;
6024 phi = gsi.phi ();
6025 var = gimple_phi_result (phi);
6026 new_phi = create_phi_node (var, bb);
6027 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
6028 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
6029 forward_location_p
6030 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
6033 /* Add the arguments we have stored on edges. */
6034 FOR_EACH_EDGE (e, ei, bb->preds)
6036 if (e == fallthru)
6037 continue;
6039 flush_pending_stmts (e);
6044 /* Return a non-special label in the head of basic block BLOCK.
6045 Create one if it doesn't exist. */
6047 tree
6048 gimple_block_label (basic_block bb)
6050 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6051 bool first = true;
6052 tree label;
6053 glabel *stmt;
6055 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6057 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6058 if (!stmt)
6059 break;
6060 label = gimple_label_label (stmt);
6061 if (!DECL_NONLOCAL (label))
6063 if (!first)
6064 gsi_move_before (&i, &s);
6065 return label;
6069 label = create_artificial_label (UNKNOWN_LOCATION);
6070 stmt = gimple_build_label (label);
6071 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6072 return label;
6076 /* Attempt to perform edge redirection by replacing a possibly complex
6077 jump instruction by a goto or by removing the jump completely.
6078 This can apply only if all edges now point to the same block. The
6079 parameters and return values are equivalent to
6080 redirect_edge_and_branch. */
6082 static edge
6083 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6085 basic_block src = e->src;
6086 gimple_stmt_iterator i;
6087 gimple *stmt;
6089 /* We can replace or remove a complex jump only when we have exactly
6090 two edges. */
6091 if (EDGE_COUNT (src->succs) != 2
6092 /* Verify that all targets will be TARGET. Specifically, the
6093 edge that is not E must also go to TARGET. */
6094 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6095 return NULL;
6097 i = gsi_last_bb (src);
6098 if (gsi_end_p (i))
6099 return NULL;
6101 stmt = gsi_stmt (i);
6103 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6105 gsi_remove (&i, true);
6106 e = ssa_redirect_edge (e, target);
6107 e->flags = EDGE_FALLTHRU;
6108 return e;
6111 return NULL;
6115 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6116 edge representing the redirected branch. */
6118 static edge
6119 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6121 basic_block bb = e->src;
6122 gimple_stmt_iterator gsi;
6123 edge ret;
6124 gimple *stmt;
6126 if (e->flags & EDGE_ABNORMAL)
6127 return NULL;
6129 if (e->dest == dest)
6130 return NULL;
6132 if (e->flags & EDGE_EH)
6133 return redirect_eh_edge (e, dest);
6135 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6137 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6138 if (ret)
6139 return ret;
6142 gsi = gsi_last_nondebug_bb (bb);
6143 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6145 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6147 case GIMPLE_COND:
6148 /* For COND_EXPR, we only need to redirect the edge. */
6149 break;
6151 case GIMPLE_GOTO:
6152 /* No non-abnormal edges should lead from a non-simple goto, and
6153 simple ones should be represented implicitly. */
6154 gcc_unreachable ();
6156 case GIMPLE_SWITCH:
6158 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6159 tree label = gimple_block_label (dest);
6160 tree cases = get_cases_for_edge (e, switch_stmt);
6162 /* If we have a list of cases associated with E, then use it
6163 as it's a lot faster than walking the entire case vector. */
6164 if (cases)
6166 edge e2 = find_edge (e->src, dest);
6167 tree last, first;
6169 first = cases;
6170 while (cases)
6172 last = cases;
6173 CASE_LABEL (cases) = label;
6174 cases = CASE_CHAIN (cases);
6177 /* If there was already an edge in the CFG, then we need
6178 to move all the cases associated with E to E2. */
6179 if (e2)
6181 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6183 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6184 CASE_CHAIN (cases2) = first;
6186 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6188 else
6190 size_t i, n = gimple_switch_num_labels (switch_stmt);
6192 for (i = 0; i < n; i++)
6194 tree elt = gimple_switch_label (switch_stmt, i);
6195 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6196 CASE_LABEL (elt) = label;
6200 break;
6202 case GIMPLE_ASM:
6204 gasm *asm_stmt = as_a <gasm *> (stmt);
6205 int i, n = gimple_asm_nlabels (asm_stmt);
6206 tree label = NULL;
6208 for (i = 0; i < n; ++i)
6210 tree cons = gimple_asm_label_op (asm_stmt, i);
6211 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6213 if (!label)
6214 label = gimple_block_label (dest);
6215 TREE_VALUE (cons) = label;
6219 /* If we didn't find any label matching the former edge in the
6220 asm labels, we must be redirecting the fallthrough
6221 edge. */
6222 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6224 break;
6226 case GIMPLE_RETURN:
6227 gsi_remove (&gsi, true);
6228 e->flags |= EDGE_FALLTHRU;
6229 break;
6231 case GIMPLE_OMP_RETURN:
6232 case GIMPLE_OMP_CONTINUE:
6233 case GIMPLE_OMP_SECTIONS_SWITCH:
6234 case GIMPLE_OMP_FOR:
6235 /* The edges from OMP constructs can be simply redirected. */
6236 break;
6238 case GIMPLE_EH_DISPATCH:
6239 if (!(e->flags & EDGE_FALLTHRU))
6240 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6241 break;
6243 case GIMPLE_TRANSACTION:
6244 if (e->flags & EDGE_TM_ABORT)
6245 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6246 gimple_block_label (dest));
6247 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6248 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6249 gimple_block_label (dest));
6250 else
6251 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6252 gimple_block_label (dest));
6253 break;
6255 default:
6256 /* Otherwise it must be a fallthru edge, and we don't need to
6257 do anything besides redirecting it. */
6258 gcc_assert (e->flags & EDGE_FALLTHRU);
6259 break;
6262 /* Update/insert PHI nodes as necessary. */
6264 /* Now update the edges in the CFG. */
6265 e = ssa_redirect_edge (e, dest);
6267 return e;
6270 /* Returns true if it is possible to remove edge E by redirecting
6271 it to the destination of the other edge from E->src. */
6273 static bool
6274 gimple_can_remove_branch_p (const_edge e)
6276 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6277 return false;
6279 return true;
6282 /* Simple wrapper, as we can always redirect fallthru edges. */
6284 static basic_block
6285 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6287 e = gimple_redirect_edge_and_branch (e, dest);
6288 gcc_assert (e);
6290 return NULL;
6294 /* Splits basic block BB after statement STMT (but at least after the
6295 labels). If STMT is NULL, BB is split just after the labels. */
6297 static basic_block
6298 gimple_split_block (basic_block bb, void *stmt)
6300 gimple_stmt_iterator gsi;
6301 gimple_stmt_iterator gsi_tgt;
6302 gimple_seq list;
6303 basic_block new_bb;
6304 edge e;
6305 edge_iterator ei;
6307 new_bb = create_empty_bb (bb);
6309 /* Redirect the outgoing edges. */
6310 new_bb->succs = bb->succs;
6311 bb->succs = NULL;
6312 FOR_EACH_EDGE (e, ei, new_bb->succs)
6313 e->src = new_bb;
6315 /* Get a stmt iterator pointing to the first stmt to move. */
6316 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6317 gsi = gsi_after_labels (bb);
6318 else
6320 gsi = gsi_for_stmt ((gimple *) stmt);
6321 gsi_next (&gsi);
6324 /* Move everything from GSI to the new basic block. */
6325 if (gsi_end_p (gsi))
6326 return new_bb;
6328 /* Split the statement list - avoid re-creating new containers as this
6329 brings ugly quadratic memory consumption in the inliner.
6330 (We are still quadratic since we need to update stmt BB pointers,
6331 sadly.) */
6332 gsi_split_seq_before (&gsi, &list);
6333 set_bb_seq (new_bb, list);
6334 for (gsi_tgt = gsi_start (list);
6335 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6336 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6338 return new_bb;
6342 /* Moves basic block BB after block AFTER. */
6344 static bool
6345 gimple_move_block_after (basic_block bb, basic_block after)
6347 if (bb->prev_bb == after)
6348 return true;
6350 unlink_block (bb);
6351 link_block (bb, after);
6353 return true;
6357 /* Return TRUE if block BB has no executable statements, otherwise return
6358 FALSE. */
6360 static bool
6361 gimple_empty_block_p (basic_block bb)
6363 /* BB must have no executable statements. */
6364 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6365 if (phi_nodes (bb))
6366 return false;
6367 while (!gsi_end_p (gsi))
6369 gimple *stmt = gsi_stmt (gsi);
6370 if (is_gimple_debug (stmt))
6372 else if (gimple_code (stmt) == GIMPLE_NOP
6373 || gimple_code (stmt) == GIMPLE_PREDICT)
6375 else
6376 return false;
6377 gsi_next (&gsi);
6379 return true;
6383 /* Split a basic block if it ends with a conditional branch and if the
6384 other part of the block is not empty. */
6386 static basic_block
6387 gimple_split_block_before_cond_jump (basic_block bb)
6389 gimple *last, *split_point;
6390 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6391 if (gsi_end_p (gsi))
6392 return NULL;
6393 last = gsi_stmt (gsi);
6394 if (gimple_code (last) != GIMPLE_COND
6395 && gimple_code (last) != GIMPLE_SWITCH)
6396 return NULL;
6397 gsi_prev (&gsi);
6398 split_point = gsi_stmt (gsi);
6399 return split_block (bb, split_point)->dest;
6403 /* Return true if basic_block can be duplicated. */
6405 static bool
6406 gimple_can_duplicate_bb_p (const_basic_block bb)
6408 gimple *last = last_stmt (CONST_CAST_BB (bb));
6410 /* Do checks that can only fail for the last stmt, to minimize the work in the
6411 stmt loop. */
6412 if (last) {
6413 /* A transaction is a single entry multiple exit region. It
6414 must be duplicated in its entirety or not at all. */
6415 if (gimple_code (last) == GIMPLE_TRANSACTION)
6416 return false;
6418 /* An IFN_UNIQUE call must be duplicated as part of its group,
6419 or not at all. */
6420 if (is_gimple_call (last)
6421 && gimple_call_internal_p (last)
6422 && gimple_call_internal_unique_p (last))
6423 return false;
6426 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6427 !gsi_end_p (gsi); gsi_next (&gsi))
6429 gimple *g = gsi_stmt (gsi);
6431 /* Prohibit duplication of returns_twice calls, otherwise associated
6432 abnormal edges also need to be duplicated properly.
6433 An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6434 duplicated as part of its group, or not at all.
6435 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6436 group, so the same holds there. */
6437 if (is_gimple_call (g)
6438 && (gimple_call_flags (g) & ECF_RETURNS_TWICE
6439 || gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6440 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6441 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6442 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6443 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6444 return false;
6447 return true;
6450 /* Create a duplicate of the basic block BB. NOTE: This does not
6451 preserve SSA form. */
6453 static basic_block
6454 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6456 basic_block new_bb;
6457 gimple_stmt_iterator gsi_tgt;
6459 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6461 /* Copy the PHI nodes. We ignore PHI node arguments here because
6462 the incoming edges have not been setup yet. */
6463 for (gphi_iterator gpi = gsi_start_phis (bb);
6464 !gsi_end_p (gpi);
6465 gsi_next (&gpi))
6467 gphi *phi, *copy;
6468 phi = gpi.phi ();
6469 copy = create_phi_node (NULL_TREE, new_bb);
6470 create_new_def_for (gimple_phi_result (phi), copy,
6471 gimple_phi_result_ptr (copy));
6472 gimple_set_uid (copy, gimple_uid (phi));
6475 gsi_tgt = gsi_start_bb (new_bb);
6476 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6477 !gsi_end_p (gsi);
6478 gsi_next (&gsi))
6480 def_operand_p def_p;
6481 ssa_op_iter op_iter;
6482 tree lhs;
6483 gimple *stmt, *copy;
6485 stmt = gsi_stmt (gsi);
6486 if (gimple_code (stmt) == GIMPLE_LABEL)
6487 continue;
6489 /* Don't duplicate label debug stmts. */
6490 if (gimple_debug_bind_p (stmt)
6491 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6492 == LABEL_DECL)
6493 continue;
6495 /* Create a new copy of STMT and duplicate STMT's virtual
6496 operands. */
6497 copy = gimple_copy (stmt);
6498 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6500 maybe_duplicate_eh_stmt (copy, stmt);
6501 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6503 /* When copying around a stmt writing into a local non-user
6504 aggregate, make sure it won't share stack slot with other
6505 vars. */
6506 lhs = gimple_get_lhs (stmt);
6507 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6509 tree base = get_base_address (lhs);
6510 if (base
6511 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6512 && DECL_IGNORED_P (base)
6513 && !TREE_STATIC (base)
6514 && !DECL_EXTERNAL (base)
6515 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6516 DECL_NONSHAREABLE (base) = 1;
6519 /* If requested remap dependence info of cliques brought in
6520 via inlining. */
6521 if (id)
6522 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6524 tree op = gimple_op (copy, i);
6525 if (!op)
6526 continue;
6527 if (TREE_CODE (op) == ADDR_EXPR
6528 || TREE_CODE (op) == WITH_SIZE_EXPR)
6529 op = TREE_OPERAND (op, 0);
6530 while (handled_component_p (op))
6531 op = TREE_OPERAND (op, 0);
6532 if ((TREE_CODE (op) == MEM_REF
6533 || TREE_CODE (op) == TARGET_MEM_REF)
6534 && MR_DEPENDENCE_CLIQUE (op) > 1
6535 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6537 if (!id->dependence_map)
6538 id->dependence_map = new hash_map<dependence_hash,
6539 unsigned short>;
6540 bool existed;
6541 unsigned short &newc = id->dependence_map->get_or_insert
6542 (MR_DEPENDENCE_CLIQUE (op), &existed);
6543 if (!existed)
6545 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6546 newc = ++cfun->last_clique;
6548 MR_DEPENDENCE_CLIQUE (op) = newc;
6552 /* Create new names for all the definitions created by COPY and
6553 add replacement mappings for each new name. */
6554 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6555 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6558 return new_bb;
6561 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6563 static void
6564 add_phi_args_after_copy_edge (edge e_copy)
6566 basic_block bb, bb_copy = e_copy->src, dest;
6567 edge e;
6568 edge_iterator ei;
6569 gphi *phi, *phi_copy;
6570 tree def;
6571 gphi_iterator psi, psi_copy;
6573 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6574 return;
6576 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6578 if (e_copy->dest->flags & BB_DUPLICATED)
6579 dest = get_bb_original (e_copy->dest);
6580 else
6581 dest = e_copy->dest;
6583 e = find_edge (bb, dest);
6584 if (!e)
6586 /* During loop unrolling the target of the latch edge is copied.
6587 In this case we are not looking for edge to dest, but to
6588 duplicated block whose original was dest. */
6589 FOR_EACH_EDGE (e, ei, bb->succs)
6591 if ((e->dest->flags & BB_DUPLICATED)
6592 && get_bb_original (e->dest) == dest)
6593 break;
6596 gcc_assert (e != NULL);
6599 for (psi = gsi_start_phis (e->dest),
6600 psi_copy = gsi_start_phis (e_copy->dest);
6601 !gsi_end_p (psi);
6602 gsi_next (&psi), gsi_next (&psi_copy))
6604 phi = psi.phi ();
6605 phi_copy = psi_copy.phi ();
6606 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6607 add_phi_arg (phi_copy, def, e_copy,
6608 gimple_phi_arg_location_from_edge (phi, e));
6613 /* Basic block BB_COPY was created by code duplication. Add phi node
6614 arguments for edges going out of BB_COPY. The blocks that were
6615 duplicated have BB_DUPLICATED set. */
6617 void
6618 add_phi_args_after_copy_bb (basic_block bb_copy)
6620 edge e_copy;
6621 edge_iterator ei;
6623 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6625 add_phi_args_after_copy_edge (e_copy);
6629 /* Blocks in REGION_COPY array of length N_REGION were created by
6630 duplication of basic blocks. Add phi node arguments for edges
6631 going from these blocks. If E_COPY is not NULL, also add
6632 phi node arguments for its destination.*/
6634 void
6635 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6636 edge e_copy)
6638 unsigned i;
6640 for (i = 0; i < n_region; i++)
6641 region_copy[i]->flags |= BB_DUPLICATED;
6643 for (i = 0; i < n_region; i++)
6644 add_phi_args_after_copy_bb (region_copy[i]);
6645 if (e_copy)
6646 add_phi_args_after_copy_edge (e_copy);
6648 for (i = 0; i < n_region; i++)
6649 region_copy[i]->flags &= ~BB_DUPLICATED;
6652 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6653 important exit edge EXIT. By important we mean that no SSA name defined
6654 inside region is live over the other exit edges of the region. All entry
6655 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6656 to the duplicate of the region. Dominance and loop information is
6657 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6658 UPDATE_DOMINANCE is false then we assume that the caller will update the
6659 dominance information after calling this function. The new basic
6660 blocks are stored to REGION_COPY in the same order as they had in REGION,
6661 provided that REGION_COPY is not NULL.
6662 The function returns false if it is unable to copy the region,
6663 true otherwise. */
6665 bool
6666 gimple_duplicate_sese_region (edge entry, edge exit,
6667 basic_block *region, unsigned n_region,
6668 basic_block *region_copy,
6669 bool update_dominance)
6671 unsigned i;
6672 bool free_region_copy = false, copying_header = false;
6673 class loop *loop = entry->dest->loop_father;
6674 edge exit_copy;
6675 edge redirected;
6676 profile_count total_count = profile_count::uninitialized ();
6677 profile_count entry_count = profile_count::uninitialized ();
6679 if (!can_copy_bbs_p (region, n_region))
6680 return false;
6682 /* Some sanity checking. Note that we do not check for all possible
6683 missuses of the functions. I.e. if you ask to copy something weird,
6684 it will work, but the state of structures probably will not be
6685 correct. */
6686 for (i = 0; i < n_region; i++)
6688 /* We do not handle subloops, i.e. all the blocks must belong to the
6689 same loop. */
6690 if (region[i]->loop_father != loop)
6691 return false;
6693 if (region[i] != entry->dest
6694 && region[i] == loop->header)
6695 return false;
6698 /* In case the function is used for loop header copying (which is the primary
6699 use), ensure that EXIT and its copy will be new latch and entry edges. */
6700 if (loop->header == entry->dest)
6702 copying_header = true;
6704 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6705 return false;
6707 for (i = 0; i < n_region; i++)
6708 if (region[i] != exit->src
6709 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6710 return false;
6713 initialize_original_copy_tables ();
6715 if (copying_header)
6716 set_loop_copy (loop, loop_outer (loop));
6717 else
6718 set_loop_copy (loop, loop);
6720 if (!region_copy)
6722 region_copy = XNEWVEC (basic_block, n_region);
6723 free_region_copy = true;
6726 /* Record blocks outside the region that are dominated by something
6727 inside. */
6728 auto_vec<basic_block> doms;
6729 if (update_dominance)
6731 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6734 if (entry->dest->count.initialized_p ())
6736 total_count = entry->dest->count;
6737 entry_count = entry->count ();
6738 /* Fix up corner cases, to avoid division by zero or creation of negative
6739 frequencies. */
6740 if (entry_count > total_count)
6741 entry_count = total_count;
6744 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6745 split_edge_bb_loc (entry), update_dominance);
6746 if (total_count.initialized_p () && entry_count.initialized_p ())
6748 scale_bbs_frequencies_profile_count (region, n_region,
6749 total_count - entry_count,
6750 total_count);
6751 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6752 total_count);
6755 if (copying_header)
6757 loop->header = exit->dest;
6758 loop->latch = exit->src;
6761 /* Redirect the entry and add the phi node arguments. */
6762 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6763 gcc_assert (redirected != NULL);
6764 flush_pending_stmts (entry);
6766 /* Concerning updating of dominators: We must recount dominators
6767 for entry block and its copy. Anything that is outside of the
6768 region, but was dominated by something inside needs recounting as
6769 well. */
6770 if (update_dominance)
6772 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6773 doms.safe_push (get_bb_original (entry->dest));
6774 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6777 /* Add the other PHI node arguments. */
6778 add_phi_args_after_copy (region_copy, n_region, NULL);
6780 if (free_region_copy)
6781 free (region_copy);
6783 free_original_copy_tables ();
6784 return true;
6787 /* Checks if BB is part of the region defined by N_REGION BBS. */
6788 static bool
6789 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6791 unsigned int n;
6793 for (n = 0; n < n_region; n++)
6795 if (bb == bbs[n])
6796 return true;
6798 return false;
6801 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6802 are stored to REGION_COPY in the same order in that they appear
6803 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6804 the region, EXIT an exit from it. The condition guarding EXIT
6805 is moved to ENTRY. Returns true if duplication succeeds, false
6806 otherwise.
6808 For example,
6810 some_code;
6811 if (cond)
6813 else
6816 is transformed to
6818 if (cond)
6820 some_code;
6823 else
6825 some_code;
6830 bool
6831 gimple_duplicate_sese_tail (edge entry, edge exit,
6832 basic_block *region, unsigned n_region,
6833 basic_block *region_copy)
6835 unsigned i;
6836 bool free_region_copy = false;
6837 class loop *loop = exit->dest->loop_father;
6838 class loop *orig_loop = entry->dest->loop_father;
6839 basic_block switch_bb, entry_bb, nentry_bb;
6840 profile_count total_count = profile_count::uninitialized (),
6841 exit_count = profile_count::uninitialized ();
6842 edge exits[2], nexits[2], e;
6843 gimple_stmt_iterator gsi;
6844 gimple *cond_stmt;
6845 edge sorig, snew;
6846 basic_block exit_bb;
6847 gphi_iterator psi;
6848 gphi *phi;
6849 tree def;
6850 class loop *target, *aloop, *cloop;
6852 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6853 exits[0] = exit;
6854 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6856 if (!can_copy_bbs_p (region, n_region))
6857 return false;
6859 initialize_original_copy_tables ();
6860 set_loop_copy (orig_loop, loop);
6862 target= loop;
6863 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6865 if (bb_part_of_region_p (aloop->header, region, n_region))
6867 cloop = duplicate_loop (aloop, target);
6868 duplicate_subloops (aloop, cloop);
6872 if (!region_copy)
6874 region_copy = XNEWVEC (basic_block, n_region);
6875 free_region_copy = true;
6878 gcc_assert (!need_ssa_update_p (cfun));
6880 /* Record blocks outside the region that are dominated by something
6881 inside. */
6882 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6883 n_region);
6885 total_count = exit->src->count;
6886 exit_count = exit->count ();
6887 /* Fix up corner cases, to avoid division by zero or creation of negative
6888 frequencies. */
6889 if (exit_count > total_count)
6890 exit_count = total_count;
6892 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6893 split_edge_bb_loc (exit), true);
6894 if (total_count.initialized_p () && exit_count.initialized_p ())
6896 scale_bbs_frequencies_profile_count (region, n_region,
6897 total_count - exit_count,
6898 total_count);
6899 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6900 total_count);
6903 /* Create the switch block, and put the exit condition to it. */
6904 entry_bb = entry->dest;
6905 nentry_bb = get_bb_copy (entry_bb);
6906 if (!last_stmt (entry->src)
6907 || !stmt_ends_bb_p (last_stmt (entry->src)))
6908 switch_bb = entry->src;
6909 else
6910 switch_bb = split_edge (entry);
6911 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6913 gsi = gsi_last_bb (switch_bb);
6914 cond_stmt = last_stmt (exit->src);
6915 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6916 cond_stmt = gimple_copy (cond_stmt);
6918 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6920 sorig = single_succ_edge (switch_bb);
6921 sorig->flags = exits[1]->flags;
6922 sorig->probability = exits[1]->probability;
6923 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6924 snew->probability = exits[0]->probability;
6927 /* Register the new edge from SWITCH_BB in loop exit lists. */
6928 rescan_loop_exit (snew, true, false);
6930 /* Add the PHI node arguments. */
6931 add_phi_args_after_copy (region_copy, n_region, snew);
6933 /* Get rid of now superfluous conditions and associated edges (and phi node
6934 arguments). */
6935 exit_bb = exit->dest;
6937 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6938 PENDING_STMT (e) = NULL;
6940 /* The latch of ORIG_LOOP was copied, and so was the backedge
6941 to the original header. We redirect this backedge to EXIT_BB. */
6942 for (i = 0; i < n_region; i++)
6943 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6945 gcc_assert (single_succ_edge (region_copy[i]));
6946 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6947 PENDING_STMT (e) = NULL;
6948 for (psi = gsi_start_phis (exit_bb);
6949 !gsi_end_p (psi);
6950 gsi_next (&psi))
6952 phi = psi.phi ();
6953 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6954 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6957 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6958 PENDING_STMT (e) = NULL;
6960 /* Anything that is outside of the region, but was dominated by something
6961 inside needs to update dominance info. */
6962 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6964 if (free_region_copy)
6965 free (region_copy);
6967 free_original_copy_tables ();
6968 return true;
6971 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6972 adding blocks when the dominator traversal reaches EXIT. This
6973 function silently assumes that ENTRY strictly dominates EXIT. */
6975 void
6976 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6977 vec<basic_block> *bbs_p)
6979 basic_block son;
6981 for (son = first_dom_son (CDI_DOMINATORS, entry);
6982 son;
6983 son = next_dom_son (CDI_DOMINATORS, son))
6985 bbs_p->safe_push (son);
6986 if (son != exit)
6987 gather_blocks_in_sese_region (son, exit, bbs_p);
6991 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6992 The duplicates are recorded in VARS_MAP. */
6994 static void
6995 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6996 tree to_context)
6998 tree t = *tp, new_t;
6999 struct function *f = DECL_STRUCT_FUNCTION (to_context);
7001 if (DECL_CONTEXT (t) == to_context)
7002 return;
7004 bool existed;
7005 tree &loc = vars_map->get_or_insert (t, &existed);
7007 if (!existed)
7009 if (SSA_VAR_P (t))
7011 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
7012 add_local_decl (f, new_t);
7014 else
7016 gcc_assert (TREE_CODE (t) == CONST_DECL);
7017 new_t = copy_node (t);
7019 DECL_CONTEXT (new_t) = to_context;
7021 loc = new_t;
7023 else
7024 new_t = loc;
7026 *tp = new_t;
7030 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7031 VARS_MAP maps old ssa names and var_decls to the new ones. */
7033 static tree
7034 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
7035 tree to_context)
7037 tree new_name;
7039 gcc_assert (!virtual_operand_p (name));
7041 tree *loc = vars_map->get (name);
7043 if (!loc)
7045 tree decl = SSA_NAME_VAR (name);
7046 if (decl)
7048 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
7049 replace_by_duplicate_decl (&decl, vars_map, to_context);
7050 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7051 decl, SSA_NAME_DEF_STMT (name));
7053 else
7054 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7055 name, SSA_NAME_DEF_STMT (name));
7057 /* Now that we've used the def stmt to define new_name, make sure it
7058 doesn't define name anymore. */
7059 SSA_NAME_DEF_STMT (name) = NULL;
7061 vars_map->put (name, new_name);
7063 else
7064 new_name = *loc;
7066 return new_name;
7069 struct move_stmt_d
7071 tree orig_block;
7072 tree new_block;
7073 tree from_context;
7074 tree to_context;
7075 hash_map<tree, tree> *vars_map;
7076 htab_t new_label_map;
7077 hash_map<void *, void *> *eh_map;
7078 bool remap_decls_p;
7081 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7082 contained in *TP if it has been ORIG_BLOCK previously and change the
7083 DECL_CONTEXT of every local variable referenced in *TP. */
7085 static tree
7086 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
7088 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7089 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7090 tree t = *tp;
7092 if (EXPR_P (t))
7094 tree block = TREE_BLOCK (t);
7095 if (block == NULL_TREE)
7097 else if (block == p->orig_block
7098 || p->orig_block == NULL_TREE)
7100 /* tree_node_can_be_shared says we can share invariant
7101 addresses but unshare_expr copies them anyways. Make sure
7102 to unshare before adjusting the block in place - we do not
7103 always see a copy here. */
7104 if (TREE_CODE (t) == ADDR_EXPR
7105 && is_gimple_min_invariant (t))
7106 *tp = t = unshare_expr (t);
7107 TREE_SET_BLOCK (t, p->new_block);
7109 else if (flag_checking)
7111 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7112 block = BLOCK_SUPERCONTEXT (block);
7113 gcc_assert (block == p->orig_block);
7116 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7118 if (TREE_CODE (t) == SSA_NAME)
7119 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7120 else if (TREE_CODE (t) == PARM_DECL
7121 && gimple_in_ssa_p (cfun))
7122 *tp = *(p->vars_map->get (t));
7123 else if (TREE_CODE (t) == LABEL_DECL)
7125 if (p->new_label_map)
7127 struct tree_map in, *out;
7128 in.base.from = t;
7129 out = (struct tree_map *)
7130 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7131 if (out)
7132 *tp = t = out->to;
7135 /* For FORCED_LABELs we can end up with references from other
7136 functions if some SESE regions are outlined. It is UB to
7137 jump in between them, but they could be used just for printing
7138 addresses etc. In that case, DECL_CONTEXT on the label should
7139 be the function containing the glabel stmt with that LABEL_DECL,
7140 rather than whatever function a reference to the label was seen
7141 last time. */
7142 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7143 DECL_CONTEXT (t) = p->to_context;
7145 else if (p->remap_decls_p)
7147 /* Replace T with its duplicate. T should no longer appear in the
7148 parent function, so this looks wasteful; however, it may appear
7149 in referenced_vars, and more importantly, as virtual operands of
7150 statements, and in alias lists of other variables. It would be
7151 quite difficult to expunge it from all those places. ??? It might
7152 suffice to do this for addressable variables. */
7153 if ((VAR_P (t) && !is_global_var (t))
7154 || TREE_CODE (t) == CONST_DECL)
7155 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7157 *walk_subtrees = 0;
7159 else if (TYPE_P (t))
7160 *walk_subtrees = 0;
7162 return NULL_TREE;
7165 /* Helper for move_stmt_r. Given an EH region number for the source
7166 function, map that to the duplicate EH regio number in the dest. */
7168 static int
7169 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7171 eh_region old_r, new_r;
7173 old_r = get_eh_region_from_number (old_nr);
7174 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7176 return new_r->index;
7179 /* Similar, but operate on INTEGER_CSTs. */
7181 static tree
7182 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7184 int old_nr, new_nr;
7186 old_nr = tree_to_shwi (old_t_nr);
7187 new_nr = move_stmt_eh_region_nr (old_nr, p);
7189 return build_int_cst (integer_type_node, new_nr);
7192 /* Like move_stmt_op, but for gimple statements.
7194 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7195 contained in the current statement in *GSI_P and change the
7196 DECL_CONTEXT of every local variable referenced in the current
7197 statement. */
7199 static tree
7200 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7201 struct walk_stmt_info *wi)
7203 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7204 gimple *stmt = gsi_stmt (*gsi_p);
7205 tree block = gimple_block (stmt);
7207 if (block == p->orig_block
7208 || (p->orig_block == NULL_TREE
7209 && block != NULL_TREE))
7210 gimple_set_block (stmt, p->new_block);
7212 switch (gimple_code (stmt))
7214 case GIMPLE_CALL:
7215 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7217 tree r, fndecl = gimple_call_fndecl (stmt);
7218 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7219 switch (DECL_FUNCTION_CODE (fndecl))
7221 case BUILT_IN_EH_COPY_VALUES:
7222 r = gimple_call_arg (stmt, 1);
7223 r = move_stmt_eh_region_tree_nr (r, p);
7224 gimple_call_set_arg (stmt, 1, r);
7225 /* FALLTHRU */
7227 case BUILT_IN_EH_POINTER:
7228 case BUILT_IN_EH_FILTER:
7229 r = gimple_call_arg (stmt, 0);
7230 r = move_stmt_eh_region_tree_nr (r, p);
7231 gimple_call_set_arg (stmt, 0, r);
7232 break;
7234 default:
7235 break;
7238 break;
7240 case GIMPLE_RESX:
7242 gresx *resx_stmt = as_a <gresx *> (stmt);
7243 int r = gimple_resx_region (resx_stmt);
7244 r = move_stmt_eh_region_nr (r, p);
7245 gimple_resx_set_region (resx_stmt, r);
7247 break;
7249 case GIMPLE_EH_DISPATCH:
7251 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7252 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7253 r = move_stmt_eh_region_nr (r, p);
7254 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7256 break;
7258 case GIMPLE_OMP_RETURN:
7259 case GIMPLE_OMP_CONTINUE:
7260 break;
7262 case GIMPLE_LABEL:
7264 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7265 so that such labels can be referenced from other regions.
7266 Make sure to update it when seeing a GIMPLE_LABEL though,
7267 that is the owner of the label. */
7268 walk_gimple_op (stmt, move_stmt_op, wi);
7269 *handled_ops_p = true;
7270 tree label = gimple_label_label (as_a <glabel *> (stmt));
7271 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7272 DECL_CONTEXT (label) = p->to_context;
7274 break;
7276 default:
7277 if (is_gimple_omp (stmt))
7279 /* Do not remap variables inside OMP directives. Variables
7280 referenced in clauses and directive header belong to the
7281 parent function and should not be moved into the child
7282 function. */
7283 bool save_remap_decls_p = p->remap_decls_p;
7284 p->remap_decls_p = false;
7285 *handled_ops_p = true;
7287 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7288 move_stmt_op, wi);
7290 p->remap_decls_p = save_remap_decls_p;
7292 break;
7295 return NULL_TREE;
7298 /* Move basic block BB from function CFUN to function DEST_FN. The
7299 block is moved out of the original linked list and placed after
7300 block AFTER in the new list. Also, the block is removed from the
7301 original array of blocks and placed in DEST_FN's array of blocks.
7302 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7303 updated to reflect the moved edges.
7305 The local variables are remapped to new instances, VARS_MAP is used
7306 to record the mapping. */
7308 static void
7309 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7310 basic_block after, bool update_edge_count_p,
7311 struct move_stmt_d *d)
7313 struct control_flow_graph *cfg;
7314 edge_iterator ei;
7315 edge e;
7316 gimple_stmt_iterator si;
7317 unsigned old_len;
7319 /* Remove BB from dominance structures. */
7320 delete_from_dominance_info (CDI_DOMINATORS, bb);
7322 /* Move BB from its current loop to the copy in the new function. */
7323 if (current_loops)
7325 class loop *new_loop = (class loop *)bb->loop_father->aux;
7326 if (new_loop)
7327 bb->loop_father = new_loop;
7330 /* Link BB to the new linked list. */
7331 move_block_after (bb, after);
7333 /* Update the edge count in the corresponding flowgraphs. */
7334 if (update_edge_count_p)
7335 FOR_EACH_EDGE (e, ei, bb->succs)
7337 cfun->cfg->x_n_edges--;
7338 dest_cfun->cfg->x_n_edges++;
7341 /* Remove BB from the original basic block array. */
7342 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7343 cfun->cfg->x_n_basic_blocks--;
7345 /* Grow DEST_CFUN's basic block array if needed. */
7346 cfg = dest_cfun->cfg;
7347 cfg->x_n_basic_blocks++;
7348 if (bb->index >= cfg->x_last_basic_block)
7349 cfg->x_last_basic_block = bb->index + 1;
7351 old_len = vec_safe_length (cfg->x_basic_block_info);
7352 if ((unsigned) cfg->x_last_basic_block >= old_len)
7353 vec_safe_grow_cleared (cfg->x_basic_block_info,
7354 cfg->x_last_basic_block + 1);
7356 (*cfg->x_basic_block_info)[bb->index] = bb;
7358 /* Remap the variables in phi nodes. */
7359 for (gphi_iterator psi = gsi_start_phis (bb);
7360 !gsi_end_p (psi); )
7362 gphi *phi = psi.phi ();
7363 use_operand_p use;
7364 tree op = PHI_RESULT (phi);
7365 ssa_op_iter oi;
7366 unsigned i;
7368 if (virtual_operand_p (op))
7370 /* Remove the phi nodes for virtual operands (alias analysis will be
7371 run for the new function, anyway). But replace all uses that
7372 might be outside of the region we move. */
7373 use_operand_p use_p;
7374 imm_use_iterator iter;
7375 gimple *use_stmt;
7376 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7377 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7378 SET_USE (use_p, SSA_NAME_VAR (op));
7379 remove_phi_node (&psi, true);
7380 continue;
7383 SET_PHI_RESULT (phi,
7384 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7385 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7387 op = USE_FROM_PTR (use);
7388 if (TREE_CODE (op) == SSA_NAME)
7389 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7392 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7394 location_t locus = gimple_phi_arg_location (phi, i);
7395 tree block = LOCATION_BLOCK (locus);
7397 if (locus == UNKNOWN_LOCATION)
7398 continue;
7399 if (d->orig_block == NULL_TREE || block == d->orig_block)
7401 locus = set_block (locus, d->new_block);
7402 gimple_phi_arg_set_location (phi, i, locus);
7406 gsi_next (&psi);
7409 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7411 gimple *stmt = gsi_stmt (si);
7412 struct walk_stmt_info wi;
7414 memset (&wi, 0, sizeof (wi));
7415 wi.info = d;
7416 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7418 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7420 tree label = gimple_label_label (label_stmt);
7421 int uid = LABEL_DECL_UID (label);
7423 gcc_assert (uid > -1);
7425 old_len = vec_safe_length (cfg->x_label_to_block_map);
7426 if (old_len <= (unsigned) uid)
7427 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7429 (*cfg->x_label_to_block_map)[uid] = bb;
7430 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7432 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7434 if (uid >= dest_cfun->cfg->last_label_uid)
7435 dest_cfun->cfg->last_label_uid = uid + 1;
7438 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7439 remove_stmt_from_eh_lp_fn (cfun, stmt);
7441 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7442 gimple_remove_stmt_histograms (cfun, stmt);
7444 /* We cannot leave any operands allocated from the operand caches of
7445 the current function. */
7446 free_stmt_operands (cfun, stmt);
7447 push_cfun (dest_cfun);
7448 update_stmt (stmt);
7449 if (is_gimple_call (stmt))
7450 notice_special_calls (as_a <gcall *> (stmt));
7451 pop_cfun ();
7454 FOR_EACH_EDGE (e, ei, bb->succs)
7455 if (e->goto_locus != UNKNOWN_LOCATION)
7457 tree block = LOCATION_BLOCK (e->goto_locus);
7458 if (d->orig_block == NULL_TREE
7459 || block == d->orig_block)
7460 e->goto_locus = set_block (e->goto_locus, d->new_block);
7464 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7465 the outermost EH region. Use REGION as the incoming base EH region.
7466 If there is no single outermost region, return NULL and set *ALL to
7467 true. */
7469 static eh_region
7470 find_outermost_region_in_block (struct function *src_cfun,
7471 basic_block bb, eh_region region,
7472 bool *all)
7474 gimple_stmt_iterator si;
7476 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7478 gimple *stmt = gsi_stmt (si);
7479 eh_region stmt_region;
7480 int lp_nr;
7482 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7483 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7484 if (stmt_region)
7486 if (region == NULL)
7487 region = stmt_region;
7488 else if (stmt_region != region)
7490 region = eh_region_outermost (src_cfun, stmt_region, region);
7491 if (region == NULL)
7493 *all = true;
7494 return NULL;
7500 return region;
7503 static tree
7504 new_label_mapper (tree decl, void *data)
7506 htab_t hash = (htab_t) data;
7507 struct tree_map *m;
7508 void **slot;
7510 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7512 m = XNEW (struct tree_map);
7513 m->hash = DECL_UID (decl);
7514 m->base.from = decl;
7515 m->to = create_artificial_label (UNKNOWN_LOCATION);
7516 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7517 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7518 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7520 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7521 gcc_assert (*slot == NULL);
7523 *slot = m;
7525 return m->to;
7528 /* Tree walker to replace the decls used inside value expressions by
7529 duplicates. */
7531 static tree
7532 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7534 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7536 switch (TREE_CODE (*tp))
7538 case VAR_DECL:
7539 case PARM_DECL:
7540 case RESULT_DECL:
7541 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7542 break;
7543 default:
7544 break;
7547 if (IS_TYPE_OR_DECL_P (*tp))
7548 *walk_subtrees = false;
7550 return NULL;
7553 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7554 subblocks. */
7556 static void
7557 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7558 tree to_context)
7560 tree *tp, t;
7562 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7564 t = *tp;
7565 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7566 continue;
7567 replace_by_duplicate_decl (&t, vars_map, to_context);
7568 if (t != *tp)
7570 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7572 tree x = DECL_VALUE_EXPR (*tp);
7573 struct replace_decls_d rd = { vars_map, to_context };
7574 unshare_expr (x);
7575 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7576 SET_DECL_VALUE_EXPR (t, x);
7577 DECL_HAS_VALUE_EXPR_P (t) = 1;
7579 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7580 *tp = t;
7584 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7585 replace_block_vars_by_duplicates (block, vars_map, to_context);
7588 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7589 from FN1 to FN2. */
7591 static void
7592 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7593 class loop *loop)
7595 /* Discard it from the old loop array. */
7596 (*get_loops (fn1))[loop->num] = NULL;
7598 /* Place it in the new loop array, assigning it a new number. */
7599 loop->num = number_of_loops (fn2);
7600 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7602 /* Recurse to children. */
7603 for (loop = loop->inner; loop; loop = loop->next)
7604 fixup_loop_arrays_after_move (fn1, fn2, loop);
7607 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7608 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7610 DEBUG_FUNCTION void
7611 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7613 basic_block bb;
7614 edge_iterator ei;
7615 edge e;
7616 bitmap bbs = BITMAP_ALLOC (NULL);
7617 int i;
7619 gcc_assert (entry != NULL);
7620 gcc_assert (entry != exit);
7621 gcc_assert (bbs_p != NULL);
7623 gcc_assert (bbs_p->length () > 0);
7625 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7626 bitmap_set_bit (bbs, bb->index);
7628 gcc_assert (bitmap_bit_p (bbs, entry->index));
7629 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7631 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7633 if (bb == entry)
7635 gcc_assert (single_pred_p (entry));
7636 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7638 else
7639 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7641 e = ei_edge (ei);
7642 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7645 if (bb == exit)
7647 gcc_assert (single_succ_p (exit));
7648 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7650 else
7651 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7653 e = ei_edge (ei);
7654 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7658 BITMAP_FREE (bbs);
7661 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7663 bool
7664 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7666 bitmap release_names = (bitmap)data;
7668 if (TREE_CODE (from) != SSA_NAME)
7669 return true;
7671 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7672 return true;
7675 /* Return LOOP_DIST_ALIAS call if present in BB. */
7677 static gimple *
7678 find_loop_dist_alias (basic_block bb)
7680 gimple *g = last_stmt (bb);
7681 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7682 return NULL;
7684 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7685 gsi_prev (&gsi);
7686 if (gsi_end_p (gsi))
7687 return NULL;
7689 g = gsi_stmt (gsi);
7690 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7691 return g;
7692 return NULL;
7695 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7696 to VALUE and update any immediate uses of it's LHS. */
7698 void
7699 fold_loop_internal_call (gimple *g, tree value)
7701 tree lhs = gimple_call_lhs (g);
7702 use_operand_p use_p;
7703 imm_use_iterator iter;
7704 gimple *use_stmt;
7705 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7707 replace_call_with_value (&gsi, value);
7708 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7710 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7711 SET_USE (use_p, value);
7712 update_stmt (use_stmt);
7716 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7717 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7718 single basic block in the original CFG and the new basic block is
7719 returned. DEST_CFUN must not have a CFG yet.
7721 Note that the region need not be a pure SESE region. Blocks inside
7722 the region may contain calls to abort/exit. The only restriction
7723 is that ENTRY_BB should be the only entry point and it must
7724 dominate EXIT_BB.
7726 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7727 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7728 to the new function.
7730 All local variables referenced in the region are assumed to be in
7731 the corresponding BLOCK_VARS and unexpanded variable lists
7732 associated with DEST_CFUN.
7734 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7735 reimplement move_sese_region_to_fn by duplicating the region rather than
7736 moving it. */
7738 basic_block
7739 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7740 basic_block exit_bb, tree orig_block)
7742 vec<basic_block> bbs;
7743 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7744 basic_block after, bb, *entry_pred, *exit_succ, abb;
7745 struct function *saved_cfun = cfun;
7746 int *entry_flag, *exit_flag;
7747 profile_probability *entry_prob, *exit_prob;
7748 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7749 edge e;
7750 edge_iterator ei;
7751 htab_t new_label_map;
7752 hash_map<void *, void *> *eh_map;
7753 class loop *loop = entry_bb->loop_father;
7754 class loop *loop0 = get_loop (saved_cfun, 0);
7755 struct move_stmt_d d;
7757 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7758 region. */
7759 gcc_assert (entry_bb != exit_bb
7760 && (!exit_bb
7761 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7763 /* Collect all the blocks in the region. Manually add ENTRY_BB
7764 because it won't be added by dfs_enumerate_from. */
7765 bbs.create (0);
7766 bbs.safe_push (entry_bb);
7767 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7769 if (flag_checking)
7770 verify_sese (entry_bb, exit_bb, &bbs);
7772 /* The blocks that used to be dominated by something in BBS will now be
7773 dominated by the new block. */
7774 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7775 bbs.address (),
7776 bbs.length ());
7778 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7779 the predecessor edges to ENTRY_BB and the successor edges to
7780 EXIT_BB so that we can re-attach them to the new basic block that
7781 will replace the region. */
7782 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7783 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7784 entry_flag = XNEWVEC (int, num_entry_edges);
7785 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7786 i = 0;
7787 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7789 entry_prob[i] = e->probability;
7790 entry_flag[i] = e->flags;
7791 entry_pred[i++] = e->src;
7792 remove_edge (e);
7795 if (exit_bb)
7797 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7798 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7799 exit_flag = XNEWVEC (int, num_exit_edges);
7800 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7801 i = 0;
7802 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7804 exit_prob[i] = e->probability;
7805 exit_flag[i] = e->flags;
7806 exit_succ[i++] = e->dest;
7807 remove_edge (e);
7810 else
7812 num_exit_edges = 0;
7813 exit_succ = NULL;
7814 exit_flag = NULL;
7815 exit_prob = NULL;
7818 /* Switch context to the child function to initialize DEST_FN's CFG. */
7819 gcc_assert (dest_cfun->cfg == NULL);
7820 push_cfun (dest_cfun);
7822 init_empty_tree_cfg ();
7824 /* Initialize EH information for the new function. */
7825 eh_map = NULL;
7826 new_label_map = NULL;
7827 if (saved_cfun->eh)
7829 eh_region region = NULL;
7830 bool all = false;
7832 FOR_EACH_VEC_ELT (bbs, i, bb)
7834 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7835 if (all)
7836 break;
7839 init_eh_for_function ();
7840 if (region != NULL || all)
7842 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7843 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7844 new_label_mapper, new_label_map);
7848 /* Initialize an empty loop tree. */
7849 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7850 init_loops_structure (dest_cfun, loops, 1);
7851 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7852 set_loops_for_fn (dest_cfun, loops);
7854 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7856 /* Move the outlined loop tree part. */
7857 num_nodes = bbs.length ();
7858 FOR_EACH_VEC_ELT (bbs, i, bb)
7860 if (bb->loop_father->header == bb)
7862 class loop *this_loop = bb->loop_father;
7863 /* Avoid the need to remap SSA names used in nb_iterations. */
7864 free_numbers_of_iterations_estimates (this_loop);
7865 class loop *outer = loop_outer (this_loop);
7866 if (outer == loop
7867 /* If the SESE region contains some bbs ending with
7868 a noreturn call, those are considered to belong
7869 to the outermost loop in saved_cfun, rather than
7870 the entry_bb's loop_father. */
7871 || outer == loop0)
7873 if (outer != loop)
7874 num_nodes -= this_loop->num_nodes;
7875 flow_loop_tree_node_remove (bb->loop_father);
7876 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7877 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7880 else if (bb->loop_father == loop0 && loop0 != loop)
7881 num_nodes--;
7883 /* Remove loop exits from the outlined region. */
7884 if (loops_for_fn (saved_cfun)->exits)
7885 FOR_EACH_EDGE (e, ei, bb->succs)
7887 struct loops *l = loops_for_fn (saved_cfun);
7888 loop_exit **slot
7889 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7890 NO_INSERT);
7891 if (slot)
7892 l->exits->clear_slot (slot);
7896 /* Adjust the number of blocks in the tree root of the outlined part. */
7897 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7899 /* Setup a mapping to be used by move_block_to_fn. */
7900 loop->aux = current_loops->tree_root;
7901 loop0->aux = current_loops->tree_root;
7903 /* Fix up orig_loop_num. If the block referenced in it has been moved
7904 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7905 signed char *moved_orig_loop_num = NULL;
7906 for (auto dloop : loops_list (dest_cfun, 0))
7907 if (dloop->orig_loop_num)
7909 if (moved_orig_loop_num == NULL)
7910 moved_orig_loop_num
7911 = XCNEWVEC (signed char, vec_safe_length (larray));
7912 if ((*larray)[dloop->orig_loop_num] != NULL
7913 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7915 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7916 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7917 moved_orig_loop_num[dloop->orig_loop_num]++;
7918 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7920 else
7922 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7923 dloop->orig_loop_num = 0;
7926 pop_cfun ();
7928 if (moved_orig_loop_num)
7930 FOR_EACH_VEC_ELT (bbs, i, bb)
7932 gimple *g = find_loop_dist_alias (bb);
7933 if (g == NULL)
7934 continue;
7936 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7937 gcc_assert (orig_loop_num
7938 && (unsigned) orig_loop_num < vec_safe_length (larray));
7939 if (moved_orig_loop_num[orig_loop_num] == 2)
7941 /* If we have moved both loops with this orig_loop_num into
7942 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7943 too, update the first argument. */
7944 gcc_assert ((*larray)[orig_loop_num] != NULL
7945 && (get_loop (saved_cfun, orig_loop_num) == NULL));
7946 tree t = build_int_cst (integer_type_node,
7947 (*larray)[orig_loop_num]->num);
7948 gimple_call_set_arg (g, 0, t);
7949 update_stmt (g);
7950 /* Make sure the following loop will not update it. */
7951 moved_orig_loop_num[orig_loop_num] = 0;
7953 else
7954 /* Otherwise at least one of the loops stayed in saved_cfun.
7955 Remove the LOOP_DIST_ALIAS call. */
7956 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7958 FOR_EACH_BB_FN (bb, saved_cfun)
7960 gimple *g = find_loop_dist_alias (bb);
7961 if (g == NULL)
7962 continue;
7963 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7964 gcc_assert (orig_loop_num
7965 && (unsigned) orig_loop_num < vec_safe_length (larray));
7966 if (moved_orig_loop_num[orig_loop_num])
7967 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7968 of the corresponding loops was moved, remove it. */
7969 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7971 XDELETEVEC (moved_orig_loop_num);
7973 ggc_free (larray);
7975 /* Move blocks from BBS into DEST_CFUN. */
7976 gcc_assert (bbs.length () >= 2);
7977 after = dest_cfun->cfg->x_entry_block_ptr;
7978 hash_map<tree, tree> vars_map;
7980 memset (&d, 0, sizeof (d));
7981 d.orig_block = orig_block;
7982 d.new_block = DECL_INITIAL (dest_cfun->decl);
7983 d.from_context = cfun->decl;
7984 d.to_context = dest_cfun->decl;
7985 d.vars_map = &vars_map;
7986 d.new_label_map = new_label_map;
7987 d.eh_map = eh_map;
7988 d.remap_decls_p = true;
7990 if (gimple_in_ssa_p (cfun))
7991 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7993 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7994 set_ssa_default_def (dest_cfun, arg, narg);
7995 vars_map.put (arg, narg);
7998 FOR_EACH_VEC_ELT (bbs, i, bb)
8000 /* No need to update edge counts on the last block. It has
8001 already been updated earlier when we detached the region from
8002 the original CFG. */
8003 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
8004 after = bb;
8007 /* Adjust the maximum clique used. */
8008 dest_cfun->last_clique = saved_cfun->last_clique;
8010 loop->aux = NULL;
8011 loop0->aux = NULL;
8012 /* Loop sizes are no longer correct, fix them up. */
8013 loop->num_nodes -= num_nodes;
8014 for (class loop *outer = loop_outer (loop);
8015 outer; outer = loop_outer (outer))
8016 outer->num_nodes -= num_nodes;
8017 loop0->num_nodes -= bbs.length () - num_nodes;
8019 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
8021 class loop *aloop;
8022 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
8023 if (aloop != NULL)
8025 if (aloop->simduid)
8027 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
8028 d.to_context);
8029 dest_cfun->has_simduid_loops = true;
8031 if (aloop->force_vectorize)
8032 dest_cfun->has_force_vectorize_loops = true;
8036 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8037 if (orig_block)
8039 tree block;
8040 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8041 == NULL_TREE);
8042 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8043 = BLOCK_SUBBLOCKS (orig_block);
8044 for (block = BLOCK_SUBBLOCKS (orig_block);
8045 block; block = BLOCK_CHAIN (block))
8046 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
8047 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
8050 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
8051 &vars_map, dest_cfun->decl);
8053 if (new_label_map)
8054 htab_delete (new_label_map);
8055 if (eh_map)
8056 delete eh_map;
8058 /* We need to release ssa-names in a defined order, so first find them,
8059 and then iterate in ascending version order. */
8060 bitmap release_names = BITMAP_ALLOC (NULL);
8061 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
8062 bitmap_iterator bi;
8063 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
8064 release_ssa_name (ssa_name (i));
8065 BITMAP_FREE (release_names);
8067 /* Rewire the entry and exit blocks. The successor to the entry
8068 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8069 the child function. Similarly, the predecessor of DEST_FN's
8070 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8071 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8072 various CFG manipulation function get to the right CFG.
8074 FIXME, this is silly. The CFG ought to become a parameter to
8075 these helpers. */
8076 push_cfun (dest_cfun);
8077 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
8078 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
8079 if (exit_bb)
8081 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
8082 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
8084 else
8085 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
8086 pop_cfun ();
8088 /* Back in the original function, the SESE region has disappeared,
8089 create a new basic block in its place. */
8090 bb = create_empty_bb (entry_pred[0]);
8091 if (current_loops)
8092 add_bb_to_loop (bb, loop);
8093 for (i = 0; i < num_entry_edges; i++)
8095 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8096 e->probability = entry_prob[i];
8099 for (i = 0; i < num_exit_edges; i++)
8101 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8102 e->probability = exit_prob[i];
8105 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8106 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8107 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8109 if (exit_bb)
8111 free (exit_prob);
8112 free (exit_flag);
8113 free (exit_succ);
8115 free (entry_prob);
8116 free (entry_flag);
8117 free (entry_pred);
8118 bbs.release ();
8120 return bb;
8123 /* Dump default def DEF to file FILE using FLAGS and indentation
8124 SPC. */
8126 static void
8127 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8129 for (int i = 0; i < spc; ++i)
8130 fprintf (file, " ");
8131 dump_ssaname_info_to_file (file, def, spc);
8133 print_generic_expr (file, TREE_TYPE (def), flags);
8134 fprintf (file, " ");
8135 print_generic_expr (file, def, flags);
8136 fprintf (file, " = ");
8137 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8138 fprintf (file, ";\n");
8141 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8143 static void
8144 print_no_sanitize_attr_value (FILE *file, tree value)
8146 unsigned int flags = tree_to_uhwi (value);
8147 bool first = true;
8148 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8150 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8152 if (!first)
8153 fprintf (file, " | ");
8154 fprintf (file, "%s", sanitizer_opts[i].name);
8155 first = false;
8160 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8163 void
8164 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8166 tree arg, var, old_current_fndecl = current_function_decl;
8167 struct function *dsf;
8168 bool ignore_topmost_bind = false, any_var = false;
8169 basic_block bb;
8170 tree chain;
8171 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8172 && decl_is_tm_clone (fndecl));
8173 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8175 tree fntype = TREE_TYPE (fndecl);
8176 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8178 for (int i = 0; i != 2; ++i)
8180 if (!attrs[i])
8181 continue;
8183 fprintf (file, "__attribute__((");
8185 bool first = true;
8186 tree chain;
8187 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8189 if (!first)
8190 fprintf (file, ", ");
8192 tree name = get_attribute_name (chain);
8193 print_generic_expr (file, name, dump_flags);
8194 if (TREE_VALUE (chain) != NULL_TREE)
8196 fprintf (file, " (");
8198 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8199 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8200 else
8201 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8202 fprintf (file, ")");
8206 fprintf (file, "))\n");
8209 current_function_decl = fndecl;
8210 if (flags & TDF_GIMPLE)
8212 static bool hotness_bb_param_printed = false;
8213 if (profile_info != NULL
8214 && !hotness_bb_param_printed)
8216 hotness_bb_param_printed = true;
8217 fprintf (file,
8218 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8219 " */\n", get_hot_bb_threshold ());
8222 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8223 dump_flags | TDF_SLIM);
8224 fprintf (file, " __GIMPLE (%s",
8225 (fun->curr_properties & PROP_ssa) ? "ssa"
8226 : (fun->curr_properties & PROP_cfg) ? "cfg"
8227 : "");
8229 if (fun && fun->cfg)
8231 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8232 if (bb->count.initialized_p ())
8233 fprintf (file, ",%s(%" PRIu64 ")",
8234 profile_quality_as_string (bb->count.quality ()),
8235 bb->count.value ());
8236 if (dump_flags & TDF_UID)
8237 fprintf (file, ")\n%sD_%u (", function_name (fun),
8238 DECL_UID (fndecl));
8239 else
8240 fprintf (file, ")\n%s (", function_name (fun));
8243 else
8245 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8246 if (dump_flags & TDF_UID)
8247 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8248 tmclone ? "[tm-clone] " : "");
8249 else
8250 fprintf (file, " %s %s(", function_name (fun),
8251 tmclone ? "[tm-clone] " : "");
8254 arg = DECL_ARGUMENTS (fndecl);
8255 while (arg)
8257 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8258 fprintf (file, " ");
8259 print_generic_expr (file, arg, dump_flags);
8260 if (DECL_CHAIN (arg))
8261 fprintf (file, ", ");
8262 arg = DECL_CHAIN (arg);
8264 fprintf (file, ")\n");
8266 dsf = DECL_STRUCT_FUNCTION (fndecl);
8267 if (dsf && (flags & TDF_EH))
8268 dump_eh_tree (file, dsf);
8270 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8272 dump_node (fndecl, TDF_SLIM | flags, file);
8273 current_function_decl = old_current_fndecl;
8274 return;
8277 /* When GIMPLE is lowered, the variables are no longer available in
8278 BIND_EXPRs, so display them separately. */
8279 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8281 unsigned ix;
8282 ignore_topmost_bind = true;
8284 fprintf (file, "{\n");
8285 if (gimple_in_ssa_p (fun)
8286 && (flags & TDF_ALIAS))
8288 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8289 arg = DECL_CHAIN (arg))
8291 tree def = ssa_default_def (fun, arg);
8292 if (def)
8293 dump_default_def (file, def, 2, flags);
8296 tree res = DECL_RESULT (fun->decl);
8297 if (res != NULL_TREE
8298 && DECL_BY_REFERENCE (res))
8300 tree def = ssa_default_def (fun, res);
8301 if (def)
8302 dump_default_def (file, def, 2, flags);
8305 tree static_chain = fun->static_chain_decl;
8306 if (static_chain != NULL_TREE)
8308 tree def = ssa_default_def (fun, static_chain);
8309 if (def)
8310 dump_default_def (file, def, 2, flags);
8314 if (!vec_safe_is_empty (fun->local_decls))
8315 FOR_EACH_LOCAL_DECL (fun, ix, var)
8317 print_generic_decl (file, var, flags);
8318 fprintf (file, "\n");
8320 any_var = true;
8323 tree name;
8325 if (gimple_in_ssa_p (fun))
8326 FOR_EACH_SSA_NAME (ix, name, fun)
8328 if (!SSA_NAME_VAR (name)
8329 /* SSA name with decls without a name still get
8330 dumped as _N, list those explicitely as well even
8331 though we've dumped the decl declaration as D.xxx
8332 above. */
8333 || !SSA_NAME_IDENTIFIER (name))
8335 fprintf (file, " ");
8336 print_generic_expr (file, TREE_TYPE (name), flags);
8337 fprintf (file, " ");
8338 print_generic_expr (file, name, flags);
8339 fprintf (file, ";\n");
8341 any_var = true;
8346 if (fun && fun->decl == fndecl
8347 && fun->cfg
8348 && basic_block_info_for_fn (fun))
8350 /* If the CFG has been built, emit a CFG-based dump. */
8351 if (!ignore_topmost_bind)
8352 fprintf (file, "{\n");
8354 if (any_var && n_basic_blocks_for_fn (fun))
8355 fprintf (file, "\n");
8357 FOR_EACH_BB_FN (bb, fun)
8358 dump_bb (file, bb, 2, flags);
8360 fprintf (file, "}\n");
8362 else if (fun && (fun->curr_properties & PROP_gimple_any))
8364 /* The function is now in GIMPLE form but the CFG has not been
8365 built yet. Emit the single sequence of GIMPLE statements
8366 that make up its body. */
8367 gimple_seq body = gimple_body (fndecl);
8369 if (gimple_seq_first_stmt (body)
8370 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8371 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8372 print_gimple_seq (file, body, 0, flags);
8373 else
8375 if (!ignore_topmost_bind)
8376 fprintf (file, "{\n");
8378 if (any_var)
8379 fprintf (file, "\n");
8381 print_gimple_seq (file, body, 2, flags);
8382 fprintf (file, "}\n");
8385 else
8387 int indent;
8389 /* Make a tree based dump. */
8390 chain = DECL_SAVED_TREE (fndecl);
8391 if (chain && TREE_CODE (chain) == BIND_EXPR)
8393 if (ignore_topmost_bind)
8395 chain = BIND_EXPR_BODY (chain);
8396 indent = 2;
8398 else
8399 indent = 0;
8401 else
8403 if (!ignore_topmost_bind)
8405 fprintf (file, "{\n");
8406 /* No topmost bind, pretend it's ignored for later. */
8407 ignore_topmost_bind = true;
8409 indent = 2;
8412 if (any_var)
8413 fprintf (file, "\n");
8415 print_generic_stmt_indented (file, chain, flags, indent);
8416 if (ignore_topmost_bind)
8417 fprintf (file, "}\n");
8420 if (flags & TDF_ENUMERATE_LOCALS)
8421 dump_enumerated_decls (file, flags);
8422 fprintf (file, "\n\n");
8424 current_function_decl = old_current_fndecl;
8427 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8429 DEBUG_FUNCTION void
8430 debug_function (tree fn, dump_flags_t flags)
8432 dump_function_to_file (fn, stderr, flags);
8436 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8438 static void
8439 print_pred_bbs (FILE *file, basic_block bb)
8441 edge e;
8442 edge_iterator ei;
8444 FOR_EACH_EDGE (e, ei, bb->preds)
8445 fprintf (file, "bb_%d ", e->src->index);
8449 /* Print on FILE the indexes for the successors of basic_block BB. */
8451 static void
8452 print_succ_bbs (FILE *file, basic_block bb)
8454 edge e;
8455 edge_iterator ei;
8457 FOR_EACH_EDGE (e, ei, bb->succs)
8458 fprintf (file, "bb_%d ", e->dest->index);
8461 /* Print to FILE the basic block BB following the VERBOSITY level. */
8463 void
8464 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8466 char *s_indent = (char *) alloca ((size_t) indent + 1);
8467 memset ((void *) s_indent, ' ', (size_t) indent);
8468 s_indent[indent] = '\0';
8470 /* Print basic_block's header. */
8471 if (verbosity >= 2)
8473 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8474 print_pred_bbs (file, bb);
8475 fprintf (file, "}, succs = {");
8476 print_succ_bbs (file, bb);
8477 fprintf (file, "})\n");
8480 /* Print basic_block's body. */
8481 if (verbosity >= 3)
8483 fprintf (file, "%s {\n", s_indent);
8484 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8485 fprintf (file, "%s }\n", s_indent);
8489 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8491 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8492 VERBOSITY level this outputs the contents of the loop, or just its
8493 structure. */
8495 static void
8496 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8498 char *s_indent;
8499 basic_block bb;
8501 if (loop == NULL)
8502 return;
8504 s_indent = (char *) alloca ((size_t) indent + 1);
8505 memset ((void *) s_indent, ' ', (size_t) indent);
8506 s_indent[indent] = '\0';
8508 /* Print loop's header. */
8509 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8510 if (loop->header)
8511 fprintf (file, "header = %d", loop->header->index);
8512 else
8514 fprintf (file, "deleted)\n");
8515 return;
8517 if (loop->latch)
8518 fprintf (file, ", latch = %d", loop->latch->index);
8519 else
8520 fprintf (file, ", multiple latches");
8521 fprintf (file, ", niter = ");
8522 print_generic_expr (file, loop->nb_iterations);
8524 if (loop->any_upper_bound)
8526 fprintf (file, ", upper_bound = ");
8527 print_decu (loop->nb_iterations_upper_bound, file);
8529 if (loop->any_likely_upper_bound)
8531 fprintf (file, ", likely_upper_bound = ");
8532 print_decu (loop->nb_iterations_likely_upper_bound, file);
8535 if (loop->any_estimate)
8537 fprintf (file, ", estimate = ");
8538 print_decu (loop->nb_iterations_estimate, file);
8540 if (loop->unroll)
8541 fprintf (file, ", unroll = %d", loop->unroll);
8542 fprintf (file, ")\n");
8544 /* Print loop's body. */
8545 if (verbosity >= 1)
8547 fprintf (file, "%s{\n", s_indent);
8548 FOR_EACH_BB_FN (bb, cfun)
8549 if (bb->loop_father == loop)
8550 print_loops_bb (file, bb, indent, verbosity);
8552 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8553 fprintf (file, "%s}\n", s_indent);
8557 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8558 spaces. Following VERBOSITY level this outputs the contents of the
8559 loop, or just its structure. */
8561 static void
8562 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8563 int verbosity)
8565 if (loop == NULL)
8566 return;
8568 print_loop (file, loop, indent, verbosity);
8569 print_loop_and_siblings (file, loop->next, indent, verbosity);
8572 /* Follow a CFG edge from the entry point of the program, and on entry
8573 of a loop, pretty print the loop structure on FILE. */
8575 void
8576 print_loops (FILE *file, int verbosity)
8578 basic_block bb;
8580 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8581 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8582 if (bb && bb->loop_father)
8583 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8586 /* Dump a loop. */
8588 DEBUG_FUNCTION void
8589 debug (class loop &ref)
8591 print_loop (stderr, &ref, 0, /*verbosity*/0);
8594 DEBUG_FUNCTION void
8595 debug (class loop *ptr)
8597 if (ptr)
8598 debug (*ptr);
8599 else
8600 fprintf (stderr, "<nil>\n");
8603 /* Dump a loop verbosely. */
8605 DEBUG_FUNCTION void
8606 debug_verbose (class loop &ref)
8608 print_loop (stderr, &ref, 0, /*verbosity*/3);
8611 DEBUG_FUNCTION void
8612 debug_verbose (class loop *ptr)
8614 if (ptr)
8615 debug (*ptr);
8616 else
8617 fprintf (stderr, "<nil>\n");
8621 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8623 DEBUG_FUNCTION void
8624 debug_loops (int verbosity)
8626 print_loops (stderr, verbosity);
8629 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8631 DEBUG_FUNCTION void
8632 debug_loop (class loop *loop, int verbosity)
8634 print_loop (stderr, loop, 0, verbosity);
8637 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8638 level. */
8640 DEBUG_FUNCTION void
8641 debug_loop_num (unsigned num, int verbosity)
8643 debug_loop (get_loop (cfun, num), verbosity);
8646 /* Return true if BB ends with a call, possibly followed by some
8647 instructions that must stay with the call. Return false,
8648 otherwise. */
8650 static bool
8651 gimple_block_ends_with_call_p (basic_block bb)
8653 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8654 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8658 /* Return true if BB ends with a conditional branch. Return false,
8659 otherwise. */
8661 static bool
8662 gimple_block_ends_with_condjump_p (const_basic_block bb)
8664 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8665 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8669 /* Return true if statement T may terminate execution of BB in ways not
8670 explicitly represtented in the CFG. */
8672 bool
8673 stmt_can_terminate_bb_p (gimple *t)
8675 tree fndecl = NULL_TREE;
8676 int call_flags = 0;
8678 /* Eh exception not handled internally terminates execution of the whole
8679 function. */
8680 if (stmt_can_throw_external (cfun, t))
8681 return true;
8683 /* NORETURN and LONGJMP calls already have an edge to exit.
8684 CONST and PURE calls do not need one.
8685 We don't currently check for CONST and PURE here, although
8686 it would be a good idea, because those attributes are
8687 figured out from the RTL in mark_constant_function, and
8688 the counter incrementation code from -fprofile-arcs
8689 leads to different results from -fbranch-probabilities. */
8690 if (is_gimple_call (t))
8692 fndecl = gimple_call_fndecl (t);
8693 call_flags = gimple_call_flags (t);
8696 if (is_gimple_call (t)
8697 && fndecl
8698 && fndecl_built_in_p (fndecl)
8699 && (call_flags & ECF_NOTHROW)
8700 && !(call_flags & ECF_RETURNS_TWICE)
8701 /* fork() doesn't really return twice, but the effect of
8702 wrapping it in __gcov_fork() which calls __gcov_dump() and
8703 __gcov_reset() and clears the counters before forking has the same
8704 effect as returning twice. Force a fake edge. */
8705 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8706 return false;
8708 if (is_gimple_call (t))
8710 edge_iterator ei;
8711 edge e;
8712 basic_block bb;
8714 if (call_flags & (ECF_PURE | ECF_CONST)
8715 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8716 return false;
8718 /* Function call may do longjmp, terminate program or do other things.
8719 Special case noreturn that have non-abnormal edges out as in this case
8720 the fact is sufficiently represented by lack of edges out of T. */
8721 if (!(call_flags & ECF_NORETURN))
8722 return true;
8724 bb = gimple_bb (t);
8725 FOR_EACH_EDGE (e, ei, bb->succs)
8726 if ((e->flags & EDGE_FAKE) == 0)
8727 return true;
8730 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8731 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8732 return true;
8734 return false;
8738 /* Add fake edges to the function exit for any non constant and non
8739 noreturn calls (or noreturn calls with EH/abnormal edges),
8740 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8741 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8742 that were split.
8744 The goal is to expose cases in which entering a basic block does
8745 not imply that all subsequent instructions must be executed. */
8747 static int
8748 gimple_flow_call_edges_add (sbitmap blocks)
8750 int i;
8751 int blocks_split = 0;
8752 int last_bb = last_basic_block_for_fn (cfun);
8753 bool check_last_block = false;
8755 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8756 return 0;
8758 if (! blocks)
8759 check_last_block = true;
8760 else
8761 check_last_block = bitmap_bit_p (blocks,
8762 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8764 /* In the last basic block, before epilogue generation, there will be
8765 a fallthru edge to EXIT. Special care is required if the last insn
8766 of the last basic block is a call because make_edge folds duplicate
8767 edges, which would result in the fallthru edge also being marked
8768 fake, which would result in the fallthru edge being removed by
8769 remove_fake_edges, which would result in an invalid CFG.
8771 Moreover, we can't elide the outgoing fake edge, since the block
8772 profiler needs to take this into account in order to solve the minimal
8773 spanning tree in the case that the call doesn't return.
8775 Handle this by adding a dummy instruction in a new last basic block. */
8776 if (check_last_block)
8778 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8779 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8780 gimple *t = NULL;
8782 if (!gsi_end_p (gsi))
8783 t = gsi_stmt (gsi);
8785 if (t && stmt_can_terminate_bb_p (t))
8787 edge e;
8789 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8790 if (e)
8792 gsi_insert_on_edge (e, gimple_build_nop ());
8793 gsi_commit_edge_inserts ();
8798 /* Now add fake edges to the function exit for any non constant
8799 calls since there is no way that we can determine if they will
8800 return or not... */
8801 for (i = 0; i < last_bb; i++)
8803 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8804 gimple_stmt_iterator gsi;
8805 gimple *stmt, *last_stmt;
8807 if (!bb)
8808 continue;
8810 if (blocks && !bitmap_bit_p (blocks, i))
8811 continue;
8813 gsi = gsi_last_nondebug_bb (bb);
8814 if (!gsi_end_p (gsi))
8816 last_stmt = gsi_stmt (gsi);
8819 stmt = gsi_stmt (gsi);
8820 if (stmt_can_terminate_bb_p (stmt))
8822 edge e;
8824 /* The handling above of the final block before the
8825 epilogue should be enough to verify that there is
8826 no edge to the exit block in CFG already.
8827 Calling make_edge in such case would cause us to
8828 mark that edge as fake and remove it later. */
8829 if (flag_checking && stmt == last_stmt)
8831 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8832 gcc_assert (e == NULL);
8835 /* Note that the following may create a new basic block
8836 and renumber the existing basic blocks. */
8837 if (stmt != last_stmt)
8839 e = split_block (bb, stmt);
8840 if (e)
8841 blocks_split++;
8843 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8844 e->probability = profile_probability::guessed_never ();
8846 gsi_prev (&gsi);
8848 while (!gsi_end_p (gsi));
8852 if (blocks_split)
8853 checking_verify_flow_info ();
8855 return blocks_split;
8858 /* Removes edge E and all the blocks dominated by it, and updates dominance
8859 information. The IL in E->src needs to be updated separately.
8860 If dominance info is not available, only the edge E is removed.*/
8862 void
8863 remove_edge_and_dominated_blocks (edge e)
8865 vec<basic_block> bbs_to_fix_dom = vNULL;
8866 edge f;
8867 edge_iterator ei;
8868 bool none_removed = false;
8869 unsigned i;
8870 basic_block bb, dbb;
8871 bitmap_iterator bi;
8873 /* If we are removing a path inside a non-root loop that may change
8874 loop ownership of blocks or remove loops. Mark loops for fixup. */
8875 if (current_loops
8876 && loop_outer (e->src->loop_father) != NULL
8877 && e->src->loop_father == e->dest->loop_father)
8878 loops_state_set (LOOPS_NEED_FIXUP);
8880 if (!dom_info_available_p (CDI_DOMINATORS))
8882 remove_edge (e);
8883 return;
8886 /* No updating is needed for edges to exit. */
8887 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8889 if (cfgcleanup_altered_bbs)
8890 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8891 remove_edge (e);
8892 return;
8895 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8896 that is not dominated by E->dest, then this set is empty. Otherwise,
8897 all the basic blocks dominated by E->dest are removed.
8899 Also, to DF_IDOM we store the immediate dominators of the blocks in
8900 the dominance frontier of E (i.e., of the successors of the
8901 removed blocks, if there are any, and of E->dest otherwise). */
8902 FOR_EACH_EDGE (f, ei, e->dest->preds)
8904 if (f == e)
8905 continue;
8907 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8909 none_removed = true;
8910 break;
8914 auto_bitmap df, df_idom;
8915 auto_vec<basic_block> bbs_to_remove;
8916 if (none_removed)
8917 bitmap_set_bit (df_idom,
8918 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8919 else
8921 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8922 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8924 FOR_EACH_EDGE (f, ei, bb->succs)
8926 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8927 bitmap_set_bit (df, f->dest->index);
8930 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8931 bitmap_clear_bit (df, bb->index);
8933 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8935 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8936 bitmap_set_bit (df_idom,
8937 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8941 if (cfgcleanup_altered_bbs)
8943 /* Record the set of the altered basic blocks. */
8944 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8945 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8948 /* Remove E and the cancelled blocks. */
8949 if (none_removed)
8950 remove_edge (e);
8951 else
8953 /* Walk backwards so as to get a chance to substitute all
8954 released DEFs into debug stmts. See
8955 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
8956 details. */
8957 for (i = bbs_to_remove.length (); i-- > 0; )
8958 delete_basic_block (bbs_to_remove[i]);
8961 /* Update the dominance information. The immediate dominator may change only
8962 for blocks whose immediate dominator belongs to DF_IDOM:
8964 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8965 removal. Let Z the arbitrary block such that idom(Z) = Y and
8966 Z dominates X after the removal. Before removal, there exists a path P
8967 from Y to X that avoids Z. Let F be the last edge on P that is
8968 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8969 dominates W, and because of P, Z does not dominate W), and W belongs to
8970 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8971 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8973 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8974 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8975 dbb;
8976 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8977 bbs_to_fix_dom.safe_push (dbb);
8980 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8982 bbs_to_fix_dom.release ();
8985 /* Purge dead EH edges from basic block BB. */
8987 bool
8988 gimple_purge_dead_eh_edges (basic_block bb)
8990 bool changed = false;
8991 edge e;
8992 edge_iterator ei;
8993 gimple *stmt = last_stmt (bb);
8995 if (stmt && stmt_can_throw_internal (cfun, stmt))
8996 return false;
8998 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9000 if (e->flags & EDGE_EH)
9002 remove_edge_and_dominated_blocks (e);
9003 changed = true;
9005 else
9006 ei_next (&ei);
9009 return changed;
9012 /* Purge dead EH edges from basic block listed in BLOCKS. */
9014 bool
9015 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
9017 bool changed = false;
9018 unsigned i;
9019 bitmap_iterator bi;
9021 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9023 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9025 /* Earlier gimple_purge_dead_eh_edges could have removed
9026 this basic block already. */
9027 gcc_assert (bb || changed);
9028 if (bb != NULL)
9029 changed |= gimple_purge_dead_eh_edges (bb);
9032 return changed;
9035 /* Purge dead abnormal call edges from basic block BB. */
9037 bool
9038 gimple_purge_dead_abnormal_call_edges (basic_block bb)
9040 bool changed = false;
9041 edge e;
9042 edge_iterator ei;
9043 gimple *stmt = last_stmt (bb);
9045 if (stmt && stmt_can_make_abnormal_goto (stmt))
9046 return false;
9048 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9050 if (e->flags & EDGE_ABNORMAL)
9052 if (e->flags & EDGE_FALLTHRU)
9053 e->flags &= ~EDGE_ABNORMAL;
9054 else
9055 remove_edge_and_dominated_blocks (e);
9056 changed = true;
9058 else
9059 ei_next (&ei);
9062 return changed;
9065 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9067 bool
9068 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
9070 bool changed = false;
9071 unsigned i;
9072 bitmap_iterator bi;
9074 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9076 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9078 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9079 this basic block already. */
9080 gcc_assert (bb || changed);
9081 if (bb != NULL)
9082 changed |= gimple_purge_dead_abnormal_call_edges (bb);
9085 return changed;
9088 /* This function is called whenever a new edge is created or
9089 redirected. */
9091 static void
9092 gimple_execute_on_growing_pred (edge e)
9094 basic_block bb = e->dest;
9096 if (!gimple_seq_empty_p (phi_nodes (bb)))
9097 reserve_phi_args_for_new_edge (bb);
9100 /* This function is called immediately before edge E is removed from
9101 the edge vector E->dest->preds. */
9103 static void
9104 gimple_execute_on_shrinking_pred (edge e)
9106 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9107 remove_phi_args (e);
9110 /*---------------------------------------------------------------------------
9111 Helper functions for Loop versioning
9112 ---------------------------------------------------------------------------*/
9114 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9115 of 'first'. Both of them are dominated by 'new_head' basic block. When
9116 'new_head' was created by 'second's incoming edge it received phi arguments
9117 on the edge by split_edge(). Later, additional edge 'e' was created to
9118 connect 'new_head' and 'first'. Now this routine adds phi args on this
9119 additional edge 'e' that new_head to second edge received as part of edge
9120 splitting. */
9122 static void
9123 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9124 basic_block new_head, edge e)
9126 gphi *phi1, *phi2;
9127 gphi_iterator psi1, psi2;
9128 tree def;
9129 edge e2 = find_edge (new_head, second);
9131 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9132 edge, we should always have an edge from NEW_HEAD to SECOND. */
9133 gcc_assert (e2 != NULL);
9135 /* Browse all 'second' basic block phi nodes and add phi args to
9136 edge 'e' for 'first' head. PHI args are always in correct order. */
9138 for (psi2 = gsi_start_phis (second),
9139 psi1 = gsi_start_phis (first);
9140 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9141 gsi_next (&psi2), gsi_next (&psi1))
9143 phi1 = psi1.phi ();
9144 phi2 = psi2.phi ();
9145 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9146 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9151 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9152 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9153 the destination of the ELSE part. */
9155 static void
9156 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9157 basic_block second_head ATTRIBUTE_UNUSED,
9158 basic_block cond_bb, void *cond_e)
9160 gimple_stmt_iterator gsi;
9161 gimple *new_cond_expr;
9162 tree cond_expr = (tree) cond_e;
9163 edge e0;
9165 /* Build new conditional expr */
9166 gsi = gsi_last_bb (cond_bb);
9168 cond_expr = force_gimple_operand_gsi_1 (&gsi, cond_expr,
9169 is_gimple_condexpr_for_cond,
9170 NULL_TREE, false,
9171 GSI_CONTINUE_LINKING);
9172 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9173 NULL_TREE, NULL_TREE);
9175 /* Add new cond in cond_bb. */
9176 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9178 /* Adjust edges appropriately to connect new head with first head
9179 as well as second head. */
9180 e0 = single_succ_edge (cond_bb);
9181 e0->flags &= ~EDGE_FALLTHRU;
9182 e0->flags |= EDGE_FALSE_VALUE;
9186 /* Do book-keeping of basic block BB for the profile consistency checker.
9187 Store the counting in RECORD. */
9188 static void
9189 gimple_account_profile_record (basic_block bb,
9190 struct profile_record *record)
9192 gimple_stmt_iterator i;
9193 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9194 gsi_next_nondebug (&i))
9196 record->size
9197 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9198 if (profile_info)
9200 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9201 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9202 && bb->count.ipa ().initialized_p ())
9203 record->time
9204 += estimate_num_insns (gsi_stmt (i),
9205 &eni_time_weights)
9206 * bb->count.ipa ().to_gcov_type ();
9208 else if (bb->count.initialized_p ()
9209 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9210 record->time
9211 += estimate_num_insns
9212 (gsi_stmt (i),
9213 &eni_time_weights)
9214 * bb->count.to_sreal_scale
9215 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9216 else
9217 record->time
9218 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9222 struct cfg_hooks gimple_cfg_hooks = {
9223 "gimple",
9224 gimple_verify_flow_info,
9225 gimple_dump_bb, /* dump_bb */
9226 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9227 create_bb, /* create_basic_block */
9228 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9229 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9230 gimple_can_remove_branch_p, /* can_remove_branch_p */
9231 remove_bb, /* delete_basic_block */
9232 gimple_split_block, /* split_block */
9233 gimple_move_block_after, /* move_block_after */
9234 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9235 gimple_merge_blocks, /* merge_blocks */
9236 gimple_predict_edge, /* predict_edge */
9237 gimple_predicted_by_p, /* predicted_by_p */
9238 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9239 gimple_duplicate_bb, /* duplicate_block */
9240 gimple_split_edge, /* split_edge */
9241 gimple_make_forwarder_block, /* make_forward_block */
9242 NULL, /* tidy_fallthru_edge */
9243 NULL, /* force_nonfallthru */
9244 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9245 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9246 gimple_flow_call_edges_add, /* flow_call_edges_add */
9247 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9248 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9249 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9250 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9251 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9252 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9253 flush_pending_stmts, /* flush_pending_stmts */
9254 gimple_empty_block_p, /* block_empty_p */
9255 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9256 gimple_account_profile_record,
9260 /* Split all critical edges. Split some extra (not necessarily critical) edges
9261 if FOR_EDGE_INSERTION_P is true. */
9263 unsigned int
9264 split_critical_edges (bool for_edge_insertion_p /* = false */)
9266 basic_block bb;
9267 edge e;
9268 edge_iterator ei;
9270 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9271 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9272 mappings around the calls to split_edge. */
9273 start_recording_case_labels ();
9274 FOR_ALL_BB_FN (bb, cfun)
9276 FOR_EACH_EDGE (e, ei, bb->succs)
9278 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9279 split_edge (e);
9280 /* PRE inserts statements to edges and expects that
9281 since split_critical_edges was done beforehand, committing edge
9282 insertions will not split more edges. In addition to critical
9283 edges we must split edges that have multiple successors and
9284 end by control flow statements, such as RESX.
9285 Go ahead and split them too. This matches the logic in
9286 gimple_find_edge_insert_loc. */
9287 else if (for_edge_insertion_p
9288 && (!single_pred_p (e->dest)
9289 || !gimple_seq_empty_p (phi_nodes (e->dest))
9290 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9291 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9292 && !(e->flags & EDGE_ABNORMAL))
9294 gimple_stmt_iterator gsi;
9296 gsi = gsi_last_bb (e->src);
9297 if (!gsi_end_p (gsi)
9298 && stmt_ends_bb_p (gsi_stmt (gsi))
9299 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9300 && !gimple_call_builtin_p (gsi_stmt (gsi),
9301 BUILT_IN_RETURN)))
9302 split_edge (e);
9306 end_recording_case_labels ();
9307 return 0;
9310 namespace {
9312 const pass_data pass_data_split_crit_edges =
9314 GIMPLE_PASS, /* type */
9315 "crited", /* name */
9316 OPTGROUP_NONE, /* optinfo_flags */
9317 TV_TREE_SPLIT_EDGES, /* tv_id */
9318 PROP_cfg, /* properties_required */
9319 PROP_no_crit_edges, /* properties_provided */
9320 0, /* properties_destroyed */
9321 0, /* todo_flags_start */
9322 0, /* todo_flags_finish */
9325 class pass_split_crit_edges : public gimple_opt_pass
9327 public:
9328 pass_split_crit_edges (gcc::context *ctxt)
9329 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9332 /* opt_pass methods: */
9333 unsigned int execute (function *) final override
9335 return split_critical_edges ();
9338 opt_pass * clone () final override
9340 return new pass_split_crit_edges (m_ctxt);
9342 }; // class pass_split_crit_edges
9344 } // anon namespace
9346 gimple_opt_pass *
9347 make_pass_split_crit_edges (gcc::context *ctxt)
9349 return new pass_split_crit_edges (ctxt);
9353 /* Insert COND expression which is GIMPLE_COND after STMT
9354 in basic block BB with appropriate basic block split
9355 and creation of a new conditionally executed basic block.
9356 Update profile so the new bb is visited with probability PROB.
9357 Return created basic block. */
9358 basic_block
9359 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9360 profile_probability prob)
9362 edge fall = split_block (bb, stmt);
9363 gimple_stmt_iterator iter = gsi_last_bb (bb);
9364 basic_block new_bb;
9366 /* Insert cond statement. */
9367 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9368 if (gsi_end_p (iter))
9369 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9370 else
9371 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9373 /* Create conditionally executed block. */
9374 new_bb = create_empty_bb (bb);
9375 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9376 e->probability = prob;
9377 new_bb->count = e->count ();
9378 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9380 /* Fix edge for split bb. */
9381 fall->flags = EDGE_FALSE_VALUE;
9382 fall->probability -= e->probability;
9384 /* Update dominance info. */
9385 if (dom_info_available_p (CDI_DOMINATORS))
9387 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9388 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9391 /* Update loop info. */
9392 if (current_loops)
9393 add_bb_to_loop (new_bb, bb->loop_father);
9395 return new_bb;
9400 /* Given a basic block B which ends with a conditional and has
9401 precisely two successors, determine which of the edges is taken if
9402 the conditional is true and which is taken if the conditional is
9403 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9405 void
9406 extract_true_false_edges_from_block (basic_block b,
9407 edge *true_edge,
9408 edge *false_edge)
9410 edge e = EDGE_SUCC (b, 0);
9412 if (e->flags & EDGE_TRUE_VALUE)
9414 *true_edge = e;
9415 *false_edge = EDGE_SUCC (b, 1);
9417 else
9419 *false_edge = e;
9420 *true_edge = EDGE_SUCC (b, 1);
9425 /* From a controlling predicate in the immediate dominator DOM of
9426 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9427 predicate evaluates to true and false and store them to
9428 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9429 they are non-NULL. Returns true if the edges can be determined,
9430 else return false. */
9432 bool
9433 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9434 edge *true_controlled_edge,
9435 edge *false_controlled_edge)
9437 basic_block bb = phiblock;
9438 edge true_edge, false_edge, tem;
9439 edge e0 = NULL, e1 = NULL;
9441 /* We have to verify that one edge into the PHI node is dominated
9442 by the true edge of the predicate block and the other edge
9443 dominated by the false edge. This ensures that the PHI argument
9444 we are going to take is completely determined by the path we
9445 take from the predicate block.
9446 We can only use BB dominance checks below if the destination of
9447 the true/false edges are dominated by their edge, thus only
9448 have a single predecessor. */
9449 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9450 tem = EDGE_PRED (bb, 0);
9451 if (tem == true_edge
9452 || (single_pred_p (true_edge->dest)
9453 && (tem->src == true_edge->dest
9454 || dominated_by_p (CDI_DOMINATORS,
9455 tem->src, true_edge->dest))))
9456 e0 = tem;
9457 else if (tem == false_edge
9458 || (single_pred_p (false_edge->dest)
9459 && (tem->src == false_edge->dest
9460 || dominated_by_p (CDI_DOMINATORS,
9461 tem->src, false_edge->dest))))
9462 e1 = tem;
9463 else
9464 return false;
9465 tem = EDGE_PRED (bb, 1);
9466 if (tem == true_edge
9467 || (single_pred_p (true_edge->dest)
9468 && (tem->src == true_edge->dest
9469 || dominated_by_p (CDI_DOMINATORS,
9470 tem->src, true_edge->dest))))
9471 e0 = tem;
9472 else if (tem == false_edge
9473 || (single_pred_p (false_edge->dest)
9474 && (tem->src == false_edge->dest
9475 || dominated_by_p (CDI_DOMINATORS,
9476 tem->src, false_edge->dest))))
9477 e1 = tem;
9478 else
9479 return false;
9480 if (!e0 || !e1)
9481 return false;
9483 if (true_controlled_edge)
9484 *true_controlled_edge = e0;
9485 if (false_controlled_edge)
9486 *false_controlled_edge = e1;
9488 return true;
9491 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9492 range [low, high]. Place associated stmts before *GSI. */
9494 void
9495 generate_range_test (basic_block bb, tree index, tree low, tree high,
9496 tree *lhs, tree *rhs)
9498 tree type = TREE_TYPE (index);
9499 tree utype = range_check_type (type);
9501 low = fold_convert (utype, low);
9502 high = fold_convert (utype, high);
9504 gimple_seq seq = NULL;
9505 index = gimple_convert (&seq, utype, index);
9506 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9507 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9509 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9510 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9513 /* Return the basic block that belongs to label numbered INDEX
9514 of a switch statement. */
9516 basic_block
9517 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9519 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9522 /* Return the default basic block of a switch statement. */
9524 basic_block
9525 gimple_switch_default_bb (function *ifun, gswitch *gs)
9527 return gimple_switch_label_bb (ifun, gs, 0);
9530 /* Return the edge that belongs to label numbered INDEX
9531 of a switch statement. */
9533 edge
9534 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9536 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9539 /* Return the default edge of a switch statement. */
9541 edge
9542 gimple_switch_default_edge (function *ifun, gswitch *gs)
9544 return gimple_switch_edge (ifun, gs, 0);
9547 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9549 bool
9550 cond_only_block_p (basic_block bb)
9552 /* BB must have no executable statements. */
9553 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9554 if (phi_nodes (bb))
9555 return false;
9556 while (!gsi_end_p (gsi))
9558 gimple *stmt = gsi_stmt (gsi);
9559 if (is_gimple_debug (stmt))
9561 else if (gimple_code (stmt) == GIMPLE_NOP
9562 || gimple_code (stmt) == GIMPLE_PREDICT
9563 || gimple_code (stmt) == GIMPLE_COND)
9565 else
9566 return false;
9567 gsi_next (&gsi);
9569 return true;
9573 /* Emit return warnings. */
9575 namespace {
9577 const pass_data pass_data_warn_function_return =
9579 GIMPLE_PASS, /* type */
9580 "*warn_function_return", /* name */
9581 OPTGROUP_NONE, /* optinfo_flags */
9582 TV_NONE, /* tv_id */
9583 PROP_cfg, /* properties_required */
9584 0, /* properties_provided */
9585 0, /* properties_destroyed */
9586 0, /* todo_flags_start */
9587 0, /* todo_flags_finish */
9590 class pass_warn_function_return : public gimple_opt_pass
9592 public:
9593 pass_warn_function_return (gcc::context *ctxt)
9594 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9597 /* opt_pass methods: */
9598 unsigned int execute (function *) final override;
9600 }; // class pass_warn_function_return
9602 unsigned int
9603 pass_warn_function_return::execute (function *fun)
9605 location_t location;
9606 gimple *last;
9607 edge e;
9608 edge_iterator ei;
9610 if (!targetm.warn_func_return (fun->decl))
9611 return 0;
9613 /* If we have a path to EXIT, then we do return. */
9614 if (TREE_THIS_VOLATILE (fun->decl)
9615 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9617 location = UNKNOWN_LOCATION;
9618 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9619 (e = ei_safe_edge (ei)); )
9621 last = last_stmt (e->src);
9622 if ((gimple_code (last) == GIMPLE_RETURN
9623 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9624 && location == UNKNOWN_LOCATION
9625 && ((location = LOCATION_LOCUS (gimple_location (last)))
9626 != UNKNOWN_LOCATION)
9627 && !optimize)
9628 break;
9629 /* When optimizing, replace return stmts in noreturn functions
9630 with __builtin_unreachable () call. */
9631 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9633 location_t loc = gimple_location (last);
9634 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
9635 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9636 gsi_replace (&gsi, new_stmt, true);
9637 remove_edge (e);
9639 else
9640 ei_next (&ei);
9642 if (location == UNKNOWN_LOCATION)
9643 location = cfun->function_end_locus;
9644 warning_at (location, 0, "%<noreturn%> function does return");
9647 /* If we see "return;" in some basic block, then we do reach the end
9648 without returning a value. */
9649 else if (warn_return_type > 0
9650 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9651 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9653 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9655 gimple *last = last_stmt (e->src);
9656 greturn *return_stmt = dyn_cast <greturn *> (last);
9657 if (return_stmt
9658 && gimple_return_retval (return_stmt) == NULL
9659 && !warning_suppressed_p (last, OPT_Wreturn_type))
9661 location = gimple_location (last);
9662 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9663 location = fun->function_end_locus;
9664 if (warning_at (location, OPT_Wreturn_type,
9665 "control reaches end of non-void function"))
9666 suppress_warning (fun->decl, OPT_Wreturn_type);
9667 break;
9670 /* The C++ FE turns fallthrough from the end of non-void function
9671 into __builtin_unreachable () call with BUILTINS_LOCATION.
9672 Recognize those as well as calls from ubsan_instrument_return. */
9673 basic_block bb;
9674 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9675 FOR_EACH_BB_FN (bb, fun)
9676 if (EDGE_COUNT (bb->succs) == 0)
9678 gimple *last = last_stmt (bb);
9679 const enum built_in_function ubsan_missing_ret
9680 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9681 if (last
9682 && ((LOCATION_LOCUS (gimple_location (last))
9683 == BUILTINS_LOCATION
9684 && (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
9685 || gimple_call_builtin_p (last,
9686 BUILT_IN_UNREACHABLE_TRAP)
9687 || gimple_call_builtin_p (last, BUILT_IN_TRAP)))
9688 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9690 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9691 gsi_prev_nondebug (&gsi);
9692 gimple *prev = gsi_stmt (gsi);
9693 if (prev == NULL)
9694 location = UNKNOWN_LOCATION;
9695 else
9696 location = gimple_location (prev);
9697 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9698 location = fun->function_end_locus;
9699 if (warning_at (location, OPT_Wreturn_type,
9700 "control reaches end of non-void function"))
9701 suppress_warning (fun->decl, OPT_Wreturn_type);
9702 break;
9706 return 0;
9709 } // anon namespace
9711 gimple_opt_pass *
9712 make_pass_warn_function_return (gcc::context *ctxt)
9714 return new pass_warn_function_return (ctxt);
9717 /* Walk a gimplified function and warn for functions whose return value is
9718 ignored and attribute((warn_unused_result)) is set. This is done before
9719 inlining, so we don't have to worry about that. */
9721 static void
9722 do_warn_unused_result (gimple_seq seq)
9724 tree fdecl, ftype;
9725 gimple_stmt_iterator i;
9727 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9729 gimple *g = gsi_stmt (i);
9731 switch (gimple_code (g))
9733 case GIMPLE_BIND:
9734 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9735 break;
9736 case GIMPLE_TRY:
9737 do_warn_unused_result (gimple_try_eval (g));
9738 do_warn_unused_result (gimple_try_cleanup (g));
9739 break;
9740 case GIMPLE_CATCH:
9741 do_warn_unused_result (gimple_catch_handler (
9742 as_a <gcatch *> (g)));
9743 break;
9744 case GIMPLE_EH_FILTER:
9745 do_warn_unused_result (gimple_eh_filter_failure (g));
9746 break;
9748 case GIMPLE_CALL:
9749 if (gimple_call_lhs (g))
9750 break;
9751 if (gimple_call_internal_p (g))
9752 break;
9754 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9755 LHS. All calls whose value is ignored should be
9756 represented like this. Look for the attribute. */
9757 fdecl = gimple_call_fndecl (g);
9758 ftype = gimple_call_fntype (g);
9760 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9762 location_t loc = gimple_location (g);
9764 if (fdecl)
9765 warning_at (loc, OPT_Wunused_result,
9766 "ignoring return value of %qD "
9767 "declared with attribute %<warn_unused_result%>",
9768 fdecl);
9769 else
9770 warning_at (loc, OPT_Wunused_result,
9771 "ignoring return value of function "
9772 "declared with attribute %<warn_unused_result%>");
9774 break;
9776 default:
9777 /* Not a container, not a call, or a call whose value is used. */
9778 break;
9783 namespace {
9785 const pass_data pass_data_warn_unused_result =
9787 GIMPLE_PASS, /* type */
9788 "*warn_unused_result", /* name */
9789 OPTGROUP_NONE, /* optinfo_flags */
9790 TV_NONE, /* tv_id */
9791 PROP_gimple_any, /* properties_required */
9792 0, /* properties_provided */
9793 0, /* properties_destroyed */
9794 0, /* todo_flags_start */
9795 0, /* todo_flags_finish */
9798 class pass_warn_unused_result : public gimple_opt_pass
9800 public:
9801 pass_warn_unused_result (gcc::context *ctxt)
9802 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9805 /* opt_pass methods: */
9806 bool gate (function *) final override { return flag_warn_unused_result; }
9807 unsigned int execute (function *) final override
9809 do_warn_unused_result (gimple_body (current_function_decl));
9810 return 0;
9813 }; // class pass_warn_unused_result
9815 } // anon namespace
9817 gimple_opt_pass *
9818 make_pass_warn_unused_result (gcc::context *ctxt)
9820 return new pass_warn_unused_result (ctxt);
9823 /* Maybe Remove stores to variables we marked write-only.
9824 Return true if a store was removed. */
9825 static bool
9826 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9827 bitmap dce_ssa_names)
9829 /* Keep access when store has side effect, i.e. in case when source
9830 is volatile. */
9831 if (!gimple_store_p (stmt)
9832 || gimple_has_side_effects (stmt)
9833 || optimize_debug)
9834 return false;
9836 tree lhs = get_base_address (gimple_get_lhs (stmt));
9838 if (!VAR_P (lhs)
9839 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9840 || !varpool_node::get (lhs)->writeonly)
9841 return false;
9843 if (dump_file && (dump_flags & TDF_DETAILS))
9845 fprintf (dump_file, "Removing statement, writes"
9846 " to write only var:\n");
9847 print_gimple_stmt (dump_file, stmt, 0,
9848 TDF_VOPS|TDF_MEMSYMS);
9851 /* Mark ssa name defining to be checked for simple dce. */
9852 if (gimple_assign_single_p (stmt))
9854 tree rhs = gimple_assign_rhs1 (stmt);
9855 if (TREE_CODE (rhs) == SSA_NAME
9856 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9857 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9859 unlink_stmt_vdef (stmt);
9860 gsi_remove (&gsi, true);
9861 release_defs (stmt);
9862 return true;
9865 /* IPA passes, compilation of earlier functions or inlining
9866 might have changed some properties, such as marked functions nothrow,
9867 pure, const or noreturn.
9868 Remove redundant edges and basic blocks, and create new ones if necessary. */
9870 unsigned int
9871 execute_fixup_cfg (void)
9873 basic_block bb;
9874 gimple_stmt_iterator gsi;
9875 int todo = 0;
9876 cgraph_node *node = cgraph_node::get (current_function_decl);
9877 /* Same scaling is also done by ipa_merge_profiles. */
9878 profile_count num = node->count;
9879 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9880 bool scale = num.initialized_p () && !(num == den);
9881 auto_bitmap dce_ssa_names;
9883 if (scale)
9885 profile_count::adjust_for_ipa_scaling (&num, &den);
9886 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9887 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9888 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9891 FOR_EACH_BB_FN (bb, cfun)
9893 if (scale)
9894 bb->count = bb->count.apply_scale (num, den);
9895 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9897 gimple *stmt = gsi_stmt (gsi);
9898 tree decl = is_gimple_call (stmt)
9899 ? gimple_call_fndecl (stmt)
9900 : NULL;
9901 if (decl)
9903 int flags = gimple_call_flags (stmt);
9904 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9906 if (gimple_in_ssa_p (cfun))
9908 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9909 update_stmt (stmt);
9912 if (flags & ECF_NORETURN
9913 && fixup_noreturn_call (stmt))
9914 todo |= TODO_cleanup_cfg;
9917 /* Remove stores to variables we marked write-only. */
9918 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
9920 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9921 continue;
9924 /* For calls we can simply remove LHS when it is known
9925 to be write-only. */
9926 if (is_gimple_call (stmt)
9927 && gimple_get_lhs (stmt))
9929 tree lhs = get_base_address (gimple_get_lhs (stmt));
9931 if (VAR_P (lhs)
9932 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9933 && varpool_node::get (lhs)->writeonly)
9935 gimple_call_set_lhs (stmt, NULL);
9936 update_stmt (stmt);
9937 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9941 gsi_next (&gsi);
9943 if (gimple *last = last_stmt (bb))
9945 if (maybe_clean_eh_stmt (last)
9946 && gimple_purge_dead_eh_edges (bb))
9947 todo |= TODO_cleanup_cfg;
9948 if (gimple_purge_dead_abnormal_call_edges (bb))
9949 todo |= TODO_cleanup_cfg;
9952 /* If we have a basic block with no successors that does not
9953 end with a control statement or a noreturn call end it with
9954 a call to __builtin_unreachable. This situation can occur
9955 when inlining a noreturn call that does in fact return. */
9956 if (EDGE_COUNT (bb->succs) == 0)
9958 gimple *stmt = last_stmt (bb);
9959 if (!stmt
9960 || (!is_ctrl_stmt (stmt)
9961 && (!is_gimple_call (stmt)
9962 || !gimple_call_noreturn_p (stmt))))
9964 if (stmt && is_gimple_call (stmt))
9965 gimple_call_set_ctrl_altering (stmt, false);
9966 stmt = gimple_build_builtin_unreachable (UNKNOWN_LOCATION);
9967 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9968 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9969 if (!cfun->after_inlining)
9970 if (tree fndecl = gimple_call_fndecl (stmt))
9972 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9973 node->create_edge (cgraph_node::get_create (fndecl),
9974 call_stmt, bb->count);
9979 if (scale)
9981 update_max_bb_count ();
9982 compute_function_frequency ();
9985 if (current_loops
9986 && (todo & TODO_cleanup_cfg))
9987 loops_state_set (LOOPS_NEED_FIXUP);
9989 simple_dce_from_worklist (dce_ssa_names);
9991 return todo;
9994 namespace {
9996 const pass_data pass_data_fixup_cfg =
9998 GIMPLE_PASS, /* type */
9999 "fixup_cfg", /* name */
10000 OPTGROUP_NONE, /* optinfo_flags */
10001 TV_NONE, /* tv_id */
10002 PROP_cfg, /* properties_required */
10003 0, /* properties_provided */
10004 0, /* properties_destroyed */
10005 0, /* todo_flags_start */
10006 0, /* todo_flags_finish */
10009 class pass_fixup_cfg : public gimple_opt_pass
10011 public:
10012 pass_fixup_cfg (gcc::context *ctxt)
10013 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
10016 /* opt_pass methods: */
10017 opt_pass * clone () final override { return new pass_fixup_cfg (m_ctxt); }
10018 unsigned int execute (function *) final override
10020 return execute_fixup_cfg ();
10023 }; // class pass_fixup_cfg
10025 } // anon namespace
10027 gimple_opt_pass *
10028 make_pass_fixup_cfg (gcc::context *ctxt)
10030 return new pass_fixup_cfg (ctxt);
10033 /* Garbage collection support for edge_def. */
10035 extern void gt_ggc_mx (tree&);
10036 extern void gt_ggc_mx (gimple *&);
10037 extern void gt_ggc_mx (rtx&);
10038 extern void gt_ggc_mx (basic_block&);
10040 static void
10041 gt_ggc_mx (rtx_insn *& x)
10043 if (x)
10044 gt_ggc_mx_rtx_def ((void *) x);
10047 void
10048 gt_ggc_mx (edge_def *e)
10050 tree block = LOCATION_BLOCK (e->goto_locus);
10051 gt_ggc_mx (e->src);
10052 gt_ggc_mx (e->dest);
10053 if (current_ir_type () == IR_GIMPLE)
10054 gt_ggc_mx (e->insns.g);
10055 else
10056 gt_ggc_mx (e->insns.r);
10057 gt_ggc_mx (block);
10060 /* PCH support for edge_def. */
10062 extern void gt_pch_nx (tree&);
10063 extern void gt_pch_nx (gimple *&);
10064 extern void gt_pch_nx (rtx&);
10065 extern void gt_pch_nx (basic_block&);
10067 static void
10068 gt_pch_nx (rtx_insn *& x)
10070 if (x)
10071 gt_pch_nx_rtx_def ((void *) x);
10074 void
10075 gt_pch_nx (edge_def *e)
10077 tree block = LOCATION_BLOCK (e->goto_locus);
10078 gt_pch_nx (e->src);
10079 gt_pch_nx (e->dest);
10080 if (current_ir_type () == IR_GIMPLE)
10081 gt_pch_nx (e->insns.g);
10082 else
10083 gt_pch_nx (e->insns.r);
10084 gt_pch_nx (block);
10087 void
10088 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
10090 tree block = LOCATION_BLOCK (e->goto_locus);
10091 op (&(e->src), NULL, cookie);
10092 op (&(e->dest), NULL, cookie);
10093 if (current_ir_type () == IR_GIMPLE)
10094 op (&(e->insns.g), NULL, cookie);
10095 else
10096 op (&(e->insns.r), NULL, cookie);
10097 op (&(block), &(block), cookie);
10100 #if CHECKING_P
10102 namespace selftest {
10104 /* Helper function for CFG selftests: create a dummy function decl
10105 and push it as cfun. */
10107 static tree
10108 push_fndecl (const char *name)
10110 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
10111 /* FIXME: this uses input_location: */
10112 tree fndecl = build_fn_decl (name, fn_type);
10113 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
10114 NULL_TREE, integer_type_node);
10115 DECL_RESULT (fndecl) = retval;
10116 push_struct_function (fndecl);
10117 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10118 ASSERT_TRUE (fun != NULL);
10119 init_empty_tree_cfg_for_function (fun);
10120 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10121 ASSERT_EQ (0, n_edges_for_fn (fun));
10122 return fndecl;
10125 /* These tests directly create CFGs.
10126 Compare with the static fns within tree-cfg.cc:
10127 - build_gimple_cfg
10128 - make_blocks: calls create_basic_block (seq, bb);
10129 - make_edges. */
10131 /* Verify a simple cfg of the form:
10132 ENTRY -> A -> B -> C -> EXIT. */
10134 static void
10135 test_linear_chain ()
10137 gimple_register_cfg_hooks ();
10139 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10140 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10142 /* Create some empty blocks. */
10143 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10144 basic_block bb_b = create_empty_bb (bb_a);
10145 basic_block bb_c = create_empty_bb (bb_b);
10147 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10148 ASSERT_EQ (0, n_edges_for_fn (fun));
10150 /* Create some edges: a simple linear chain of BBs. */
10151 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10152 make_edge (bb_a, bb_b, 0);
10153 make_edge (bb_b, bb_c, 0);
10154 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10156 /* Verify the edges. */
10157 ASSERT_EQ (4, n_edges_for_fn (fun));
10158 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10159 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10160 ASSERT_EQ (1, bb_a->preds->length ());
10161 ASSERT_EQ (1, bb_a->succs->length ());
10162 ASSERT_EQ (1, bb_b->preds->length ());
10163 ASSERT_EQ (1, bb_b->succs->length ());
10164 ASSERT_EQ (1, bb_c->preds->length ());
10165 ASSERT_EQ (1, bb_c->succs->length ());
10166 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10167 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10169 /* Verify the dominance information
10170 Each BB in our simple chain should be dominated by the one before
10171 it. */
10172 calculate_dominance_info (CDI_DOMINATORS);
10173 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10174 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10175 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10176 ASSERT_EQ (1, dom_by_b.length ());
10177 ASSERT_EQ (bb_c, dom_by_b[0]);
10178 free_dominance_info (CDI_DOMINATORS);
10180 /* Similarly for post-dominance: each BB in our chain is post-dominated
10181 by the one after it. */
10182 calculate_dominance_info (CDI_POST_DOMINATORS);
10183 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10184 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10185 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10186 ASSERT_EQ (1, postdom_by_b.length ());
10187 ASSERT_EQ (bb_a, postdom_by_b[0]);
10188 free_dominance_info (CDI_POST_DOMINATORS);
10190 pop_cfun ();
10193 /* Verify a simple CFG of the form:
10194 ENTRY
10198 /t \f
10204 EXIT. */
10206 static void
10207 test_diamond ()
10209 gimple_register_cfg_hooks ();
10211 tree fndecl = push_fndecl ("cfg_test_diamond");
10212 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10214 /* Create some empty blocks. */
10215 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10216 basic_block bb_b = create_empty_bb (bb_a);
10217 basic_block bb_c = create_empty_bb (bb_a);
10218 basic_block bb_d = create_empty_bb (bb_b);
10220 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10221 ASSERT_EQ (0, n_edges_for_fn (fun));
10223 /* Create the edges. */
10224 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10225 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10226 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10227 make_edge (bb_b, bb_d, 0);
10228 make_edge (bb_c, bb_d, 0);
10229 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10231 /* Verify the edges. */
10232 ASSERT_EQ (6, n_edges_for_fn (fun));
10233 ASSERT_EQ (1, bb_a->preds->length ());
10234 ASSERT_EQ (2, bb_a->succs->length ());
10235 ASSERT_EQ (1, bb_b->preds->length ());
10236 ASSERT_EQ (1, bb_b->succs->length ());
10237 ASSERT_EQ (1, bb_c->preds->length ());
10238 ASSERT_EQ (1, bb_c->succs->length ());
10239 ASSERT_EQ (2, bb_d->preds->length ());
10240 ASSERT_EQ (1, bb_d->succs->length ());
10242 /* Verify the dominance information. */
10243 calculate_dominance_info (CDI_DOMINATORS);
10244 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10245 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10246 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10247 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10248 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10249 dom_by_a.release ();
10250 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10251 ASSERT_EQ (0, dom_by_b.length ());
10252 dom_by_b.release ();
10253 free_dominance_info (CDI_DOMINATORS);
10255 /* Similarly for post-dominance. */
10256 calculate_dominance_info (CDI_POST_DOMINATORS);
10257 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10258 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10259 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10260 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10261 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10262 postdom_by_d.release ();
10263 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10264 ASSERT_EQ (0, postdom_by_b.length ());
10265 postdom_by_b.release ();
10266 free_dominance_info (CDI_POST_DOMINATORS);
10268 pop_cfun ();
10271 /* Verify that we can handle a CFG containing a "complete" aka
10272 fully-connected subgraph (where A B C D below all have edges
10273 pointing to each other node, also to themselves).
10274 e.g.:
10275 ENTRY EXIT
10281 A<--->B
10282 ^^ ^^
10283 | \ / |
10284 | X |
10285 | / \ |
10286 VV VV
10287 C<--->D
10290 static void
10291 test_fully_connected ()
10293 gimple_register_cfg_hooks ();
10295 tree fndecl = push_fndecl ("cfg_fully_connected");
10296 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10298 const int n = 4;
10300 /* Create some empty blocks. */
10301 auto_vec <basic_block> subgraph_nodes;
10302 for (int i = 0; i < n; i++)
10303 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10305 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10306 ASSERT_EQ (0, n_edges_for_fn (fun));
10308 /* Create the edges. */
10309 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10310 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10311 for (int i = 0; i < n; i++)
10312 for (int j = 0; j < n; j++)
10313 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10315 /* Verify the edges. */
10316 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10317 /* The first one is linked to ENTRY/EXIT as well as itself and
10318 everything else. */
10319 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10320 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10321 /* The other ones in the subgraph are linked to everything in
10322 the subgraph (including themselves). */
10323 for (int i = 1; i < n; i++)
10325 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10326 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10329 /* Verify the dominance information. */
10330 calculate_dominance_info (CDI_DOMINATORS);
10331 /* The initial block in the subgraph should be dominated by ENTRY. */
10332 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10333 get_immediate_dominator (CDI_DOMINATORS,
10334 subgraph_nodes[0]));
10335 /* Every other block in the subgraph should be dominated by the
10336 initial block. */
10337 for (int i = 1; i < n; i++)
10338 ASSERT_EQ (subgraph_nodes[0],
10339 get_immediate_dominator (CDI_DOMINATORS,
10340 subgraph_nodes[i]));
10341 free_dominance_info (CDI_DOMINATORS);
10343 /* Similarly for post-dominance. */
10344 calculate_dominance_info (CDI_POST_DOMINATORS);
10345 /* The initial block in the subgraph should be postdominated by EXIT. */
10346 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10347 get_immediate_dominator (CDI_POST_DOMINATORS,
10348 subgraph_nodes[0]));
10349 /* Every other block in the subgraph should be postdominated by the
10350 initial block, since that leads to EXIT. */
10351 for (int i = 1; i < n; i++)
10352 ASSERT_EQ (subgraph_nodes[0],
10353 get_immediate_dominator (CDI_POST_DOMINATORS,
10354 subgraph_nodes[i]));
10355 free_dominance_info (CDI_POST_DOMINATORS);
10357 pop_cfun ();
10360 /* Run all of the selftests within this file. */
10362 void
10363 tree_cfg_cc_tests ()
10365 test_linear_chain ();
10366 test_diamond ();
10367 test_fully_connected ();
10370 } // namespace selftest
10372 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10373 - loop
10374 - nested loops
10375 - switch statement (a block with many out-edges)
10376 - something that jumps to itself
10377 - etc */
10379 #endif /* CHECKING_P */