tree-ssa-strlen: Fix up handle_store [PR113603]
[official-gcc.git] / gcc / tree-cfg.cc
blobcdd439fe7506e7bc33654ffa027b493f23d278ac
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map<edge, tree> *edge_to_cases;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
101 /* CFG statistics. */
102 struct cfg_stats_d
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
119 int location_line;
120 int discriminator;
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
135 inline hashval_t
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
138 return item->location_line;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
144 inline bool
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
148 return a->location_line == b->location_line;
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static bool gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
181 void
182 init_empty_tree_cfg_for_function (struct function *fn)
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
205 void
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
218 static void
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
228 make_blocks (seq);
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
321 for (auto loop : loops_list (cfun, 0))
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
330 /* Push the global flag_finite_loops state down to individual loops. */
331 loop->finite_p = flag_finite_loops;
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
353 break;
354 default:
355 gcc_unreachable ();
358 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 gimple_call_arg (stmt, 0));
361 gsi_replace (&gsi, stmt, true);
366 static unsigned int
367 execute_build_cfg (void)
369 gimple_seq body = gimple_body (current_function_decl);
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
378 cleanup_tree_cfg ();
380 bb_to_omp_idx.release ();
382 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383 replace_loop_annotate ();
384 return 0;
387 namespace {
389 const pass_data pass_data_build_cfg =
391 GIMPLE_PASS, /* type */
392 "cfg", /* name */
393 OPTGROUP_NONE, /* optinfo_flags */
394 TV_TREE_CFG, /* tv_id */
395 PROP_gimple_leh, /* properties_required */
396 ( PROP_cfg | PROP_loops ), /* properties_provided */
397 0, /* properties_destroyed */
398 0, /* todo_flags_start */
399 0, /* todo_flags_finish */
402 class pass_build_cfg : public gimple_opt_pass
404 public:
405 pass_build_cfg (gcc::context *ctxt)
406 : gimple_opt_pass (pass_data_build_cfg, ctxt)
409 /* opt_pass methods: */
410 unsigned int execute (function *) final override
412 return execute_build_cfg ();
415 }; // class pass_build_cfg
417 } // anon namespace
419 gimple_opt_pass *
420 make_pass_build_cfg (gcc::context *ctxt)
422 return new pass_build_cfg (ctxt);
426 /* Return true if T is a computed goto. */
428 bool
429 computed_goto_p (gimple *t)
431 return (gimple_code (t) == GIMPLE_GOTO
432 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
435 /* Returns true if the sequence of statements STMTS only contains
436 a call to __builtin_unreachable (). */
438 bool
439 gimple_seq_unreachable_p (gimple_seq stmts)
441 if (stmts == NULL
442 /* Return false if -fsanitize=unreachable, we don't want to
443 optimize away those calls, but rather turn them into
444 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
445 later. */
446 || sanitize_flags_p (SANITIZE_UNREACHABLE))
447 return false;
449 gimple_stmt_iterator gsi = gsi_last (stmts);
451 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
452 return false;
454 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
456 gimple *stmt = gsi_stmt (gsi);
457 if (gimple_code (stmt) != GIMPLE_LABEL
458 && !is_gimple_debug (stmt)
459 && !gimple_clobber_p (stmt))
460 return false;
462 return true;
465 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
466 the other edge points to a bb with just __builtin_unreachable ().
467 I.e. return true for C->M edge in:
468 <bb C>:
470 if (something)
471 goto <bb N>;
472 else
473 goto <bb M>;
474 <bb N>:
475 __builtin_unreachable ();
476 <bb M>: */
478 bool
479 assert_unreachable_fallthru_edge_p (edge e)
481 basic_block pred_bb = e->src;
482 if (safe_is_a <gcond *> (*gsi_last_bb (pred_bb)))
484 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
485 if (other_bb == e->dest)
486 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
487 if (EDGE_COUNT (other_bb->succs) == 0)
488 return gimple_seq_unreachable_p (bb_seq (other_bb));
490 return false;
494 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
495 could alter control flow except via eh. We initialize the flag at
496 CFG build time and only ever clear it later. */
498 static void
499 gimple_call_initialize_ctrl_altering (gimple *stmt)
501 int flags = gimple_call_flags (stmt);
503 /* A call alters control flow if it can make an abnormal goto. */
504 if (call_can_make_abnormal_goto (stmt)
505 /* A call also alters control flow if it does not return. */
506 || flags & ECF_NORETURN
507 /* TM ending statements have backedges out of the transaction.
508 Return true so we split the basic block containing them.
509 Note that the TM_BUILTIN test is merely an optimization. */
510 || ((flags & ECF_TM_BUILTIN)
511 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
512 /* BUILT_IN_RETURN call is same as return statement. */
513 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
514 /* IFN_UNIQUE should be the last insn, to make checking for it
515 as cheap as possible. */
516 || (gimple_call_internal_p (stmt)
517 && gimple_call_internal_unique_p (stmt)))
518 gimple_call_set_ctrl_altering (stmt, true);
519 else
520 gimple_call_set_ctrl_altering (stmt, false);
524 /* Insert SEQ after BB and build a flowgraph. */
526 static basic_block
527 make_blocks_1 (gimple_seq seq, basic_block bb)
529 gimple_stmt_iterator i = gsi_start (seq);
530 gimple *stmt = NULL;
531 gimple *prev_stmt = NULL;
532 bool start_new_block = true;
533 bool first_stmt_of_seq = true;
535 while (!gsi_end_p (i))
537 /* PREV_STMT should only be set to a debug stmt if the debug
538 stmt is before nondebug stmts. Once stmt reaches a nondebug
539 nonlabel, prev_stmt will be set to it, so that
540 stmt_starts_bb_p will know to start a new block if a label is
541 found. However, if stmt was a label after debug stmts only,
542 keep the label in prev_stmt even if we find further debug
543 stmts, for there may be other labels after them, and they
544 should land in the same block. */
545 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
546 prev_stmt = stmt;
547 stmt = gsi_stmt (i);
549 if (stmt && is_gimple_call (stmt))
550 gimple_call_initialize_ctrl_altering (stmt);
552 /* If the statement starts a new basic block or if we have determined
553 in a previous pass that we need to create a new block for STMT, do
554 so now. */
555 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
557 if (!first_stmt_of_seq)
558 gsi_split_seq_before (&i, &seq);
559 bb = create_basic_block (seq, bb);
560 start_new_block = false;
561 prev_stmt = NULL;
564 /* Now add STMT to BB and create the subgraphs for special statement
565 codes. */
566 gimple_set_bb (stmt, bb);
568 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
569 next iteration. */
570 if (stmt_ends_bb_p (stmt))
572 /* If the stmt can make abnormal goto use a new temporary
573 for the assignment to the LHS. This makes sure the old value
574 of the LHS is available on the abnormal edge. Otherwise
575 we will end up with overlapping life-ranges for abnormal
576 SSA names. */
577 if (gimple_has_lhs (stmt)
578 && stmt_can_make_abnormal_goto (stmt)
579 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
581 tree lhs = gimple_get_lhs (stmt);
582 tree tmp = create_tmp_var (TREE_TYPE (lhs));
583 gimple *s = gimple_build_assign (lhs, tmp);
584 gimple_set_location (s, gimple_location (stmt));
585 gimple_set_block (s, gimple_block (stmt));
586 gimple_set_lhs (stmt, tmp);
587 gsi_insert_after (&i, s, GSI_SAME_STMT);
589 start_new_block = true;
592 gsi_next (&i);
593 first_stmt_of_seq = false;
595 return bb;
598 /* Build a flowgraph for the sequence of stmts SEQ. */
600 static void
601 make_blocks (gimple_seq seq)
603 /* Look for debug markers right before labels, and move the debug
604 stmts after the labels. Accepting labels among debug markers
605 adds no value, just complexity; if we wanted to annotate labels
606 with view numbers (so sequencing among markers would matter) or
607 somesuch, we're probably better off still moving the labels, but
608 adding other debug annotations in their original positions or
609 emitting nonbind or bind markers associated with the labels in
610 the original position of the labels.
612 Moving labels would probably be simpler, but we can't do that:
613 moving labels assigns label ids to them, and doing so because of
614 debug markers makes for -fcompare-debug and possibly even codegen
615 differences. So, we have to move the debug stmts instead. To
616 that end, we scan SEQ backwards, marking the position of the
617 latest (earliest we find) label, and moving debug stmts that are
618 not separated from it by nondebug nonlabel stmts after the
619 label. */
620 if (MAY_HAVE_DEBUG_MARKER_STMTS)
622 gimple_stmt_iterator label = gsi_none ();
624 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
626 gimple *stmt = gsi_stmt (i);
628 /* If this is the first label we encounter (latest in SEQ)
629 before nondebug stmts, record its position. */
630 if (is_a <glabel *> (stmt))
632 if (gsi_end_p (label))
633 label = i;
634 continue;
637 /* Without a recorded label position to move debug stmts to,
638 there's nothing to do. */
639 if (gsi_end_p (label))
640 continue;
642 /* Move the debug stmt at I after LABEL. */
643 if (is_gimple_debug (stmt))
645 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
646 /* As STMT is removed, I advances to the stmt after
647 STMT, so the gsi_prev in the for "increment"
648 expression gets us to the stmt we're to visit after
649 STMT. LABEL, however, would advance to the moved
650 stmt if we passed it to gsi_move_after, so pass it a
651 copy instead, so as to keep LABEL pointing to the
652 LABEL. */
653 gimple_stmt_iterator copy = label;
654 gsi_move_after (&i, &copy);
655 continue;
658 /* There aren't any (more?) debug stmts before label, so
659 there isn't anything else to move after it. */
660 label = gsi_none ();
664 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
667 /* Create and return a new empty basic block after bb AFTER. */
669 static basic_block
670 create_bb (void *h, void *e, basic_block after)
672 basic_block bb;
674 gcc_assert (!e);
676 /* Create and initialize a new basic block. Since alloc_block uses
677 GC allocation that clears memory to allocate a basic block, we do
678 not have to clear the newly allocated basic block here. */
679 bb = alloc_block ();
681 bb->index = last_basic_block_for_fn (cfun);
682 bb->flags = BB_NEW;
683 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
685 /* Add the new block to the linked list of blocks. */
686 link_block (bb, after);
688 /* Grow the basic block array if needed. */
689 if ((size_t) last_basic_block_for_fn (cfun)
690 == basic_block_info_for_fn (cfun)->length ())
691 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
692 last_basic_block_for_fn (cfun) + 1);
694 /* Add the newly created block to the array. */
695 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
697 n_basic_blocks_for_fn (cfun)++;
698 last_basic_block_for_fn (cfun)++;
700 return bb;
704 /*---------------------------------------------------------------------------
705 Edge creation
706 ---------------------------------------------------------------------------*/
708 /* If basic block BB has an abnormal edge to a basic block
709 containing IFN_ABNORMAL_DISPATCHER internal call, return
710 that the dispatcher's basic block, otherwise return NULL. */
712 basic_block
713 get_abnormal_succ_dispatcher (basic_block bb)
715 edge e;
716 edge_iterator ei;
718 FOR_EACH_EDGE (e, ei, bb->succs)
719 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
721 gimple_stmt_iterator gsi
722 = gsi_start_nondebug_after_labels_bb (e->dest);
723 gimple *g = gsi_stmt (gsi);
724 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
725 return e->dest;
727 return NULL;
730 /* Helper function for make_edges. Create a basic block with
731 with ABNORMAL_DISPATCHER internal call in it if needed, and
732 create abnormal edges from BBS to it and from it to FOR_BB
733 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
735 static void
736 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
737 auto_vec<basic_block> *bbs, bool computed_goto)
739 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
740 unsigned int idx = 0;
741 basic_block bb;
742 bool inner = false;
744 if (!bb_to_omp_idx.is_empty ())
746 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
747 if (bb_to_omp_idx[for_bb->index] != 0)
748 inner = true;
751 /* If the dispatcher has been created already, then there are basic
752 blocks with abnormal edges to it, so just make a new edge to
753 for_bb. */
754 if (*dispatcher == NULL)
756 /* Check if there are any basic blocks that need to have
757 abnormal edges to this dispatcher. If there are none, return
758 early. */
759 if (bb_to_omp_idx.is_empty ())
761 if (bbs->is_empty ())
762 return;
764 else
766 FOR_EACH_VEC_ELT (*bbs, idx, bb)
767 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
768 break;
769 if (bb == NULL)
770 return;
773 /* Create the dispatcher bb. */
774 *dispatcher = create_basic_block (NULL, for_bb);
775 if (computed_goto)
777 /* Factor computed gotos into a common computed goto site. Also
778 record the location of that site so that we can un-factor the
779 gotos after we have converted back to normal form. */
780 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
782 /* Create the destination of the factored goto. Each original
783 computed goto will put its desired destination into this
784 variable and jump to the label we create immediately below. */
785 tree var = create_tmp_var (ptr_type_node, "gotovar");
787 /* Build a label for the new block which will contain the
788 factored computed goto. */
789 tree factored_label_decl
790 = create_artificial_label (UNKNOWN_LOCATION);
791 gimple *factored_computed_goto_label
792 = gimple_build_label (factored_label_decl);
793 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
795 /* Build our new computed goto. */
796 gimple *factored_computed_goto = gimple_build_goto (var);
797 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
799 FOR_EACH_VEC_ELT (*bbs, idx, bb)
801 if (!bb_to_omp_idx.is_empty ()
802 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
803 continue;
805 gsi = gsi_last_bb (bb);
806 gimple *last = gsi_stmt (gsi);
808 gcc_assert (computed_goto_p (last));
810 /* Copy the original computed goto's destination into VAR. */
811 gimple *assignment
812 = gimple_build_assign (var, gimple_goto_dest (last));
813 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
815 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
816 e->goto_locus = gimple_location (last);
817 gsi_remove (&gsi, true);
820 else
822 tree arg = inner ? boolean_true_node : boolean_false_node;
823 gcall *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
824 1, arg);
825 gimple_call_set_ctrl_altering (g, true);
826 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
827 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
829 /* Create predecessor edges of the dispatcher. */
830 FOR_EACH_VEC_ELT (*bbs, idx, bb)
832 if (!bb_to_omp_idx.is_empty ()
833 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
834 continue;
835 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
840 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
843 /* Creates outgoing edges for BB. Returns 1 when it ends with an
844 computed goto, returns 2 when it ends with a statement that
845 might return to this function via an nonlocal goto, otherwise
846 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
848 static int
849 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
851 gimple *last = *gsi_last_bb (bb);
852 bool fallthru = false;
853 int ret = 0;
855 if (!last)
856 return ret;
858 switch (gimple_code (last))
860 case GIMPLE_GOTO:
861 if (make_goto_expr_edges (bb))
862 ret = 1;
863 fallthru = false;
864 break;
865 case GIMPLE_RETURN:
867 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
868 e->goto_locus = gimple_location (last);
869 fallthru = false;
871 break;
872 case GIMPLE_COND:
873 make_cond_expr_edges (bb);
874 fallthru = false;
875 break;
876 case GIMPLE_SWITCH:
877 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
878 fallthru = false;
879 break;
880 case GIMPLE_RESX:
881 make_eh_edge (last);
882 fallthru = false;
883 break;
884 case GIMPLE_EH_DISPATCH:
885 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
886 break;
888 case GIMPLE_CALL:
889 /* If this function receives a nonlocal goto, then we need to
890 make edges from this call site to all the nonlocal goto
891 handlers. */
892 if (stmt_can_make_abnormal_goto (last))
893 ret = 2;
895 /* If this statement has reachable exception handlers, then
896 create abnormal edges to them. */
897 make_eh_edge (last);
899 /* BUILTIN_RETURN is really a return statement. */
900 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
902 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
903 fallthru = false;
905 /* Some calls are known not to return. */
906 else
907 fallthru = !gimple_call_noreturn_p (last);
908 break;
910 case GIMPLE_ASSIGN:
911 /* A GIMPLE_ASSIGN may throw internally and thus be considered
912 control-altering. */
913 if (is_ctrl_altering_stmt (last))
914 make_eh_edge (last);
915 fallthru = true;
916 break;
918 case GIMPLE_ASM:
919 make_gimple_asm_edges (bb);
920 fallthru = true;
921 break;
923 CASE_GIMPLE_OMP:
924 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
925 break;
927 case GIMPLE_TRANSACTION:
929 gtransaction *txn = as_a <gtransaction *> (last);
930 tree label1 = gimple_transaction_label_norm (txn);
931 tree label2 = gimple_transaction_label_uninst (txn);
933 if (label1)
934 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
935 if (label2)
936 make_edge (bb, label_to_block (cfun, label2),
937 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
939 tree label3 = gimple_transaction_label_over (txn);
940 if (gimple_transaction_subcode (txn)
941 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
942 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
944 fallthru = false;
946 break;
948 default:
949 gcc_assert (!stmt_ends_bb_p (last));
950 fallthru = true;
951 break;
954 if (fallthru)
955 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
957 return ret;
960 /* Join all the blocks in the flowgraph. */
962 static void
963 make_edges (void)
965 basic_block bb;
966 struct omp_region *cur_region = NULL;
967 auto_vec<basic_block> ab_edge_goto;
968 auto_vec<basic_block> ab_edge_call;
969 int cur_omp_region_idx = 0;
971 /* Create an edge from entry to the first block with executable
972 statements in it. */
973 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
974 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
975 EDGE_FALLTHRU);
977 /* Traverse the basic block array placing edges. */
978 FOR_EACH_BB_FN (bb, cfun)
980 int mer;
982 if (!bb_to_omp_idx.is_empty ())
983 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
985 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
986 if (mer == 1)
987 ab_edge_goto.safe_push (bb);
988 else if (mer == 2)
989 ab_edge_call.safe_push (bb);
991 if (cur_region && bb_to_omp_idx.is_empty ())
992 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
995 /* Computed gotos are hell to deal with, especially if there are
996 lots of them with a large number of destinations. So we factor
997 them to a common computed goto location before we build the
998 edge list. After we convert back to normal form, we will un-factor
999 the computed gotos since factoring introduces an unwanted jump.
1000 For non-local gotos and abnormal edges from calls to calls that return
1001 twice or forced labels, factor the abnormal edges too, by having all
1002 abnormal edges from the calls go to a common artificial basic block
1003 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1004 basic block to all forced labels and calls returning twice.
1005 We do this per-OpenMP structured block, because those regions
1006 are guaranteed to be single entry single exit by the standard,
1007 so it is not allowed to enter or exit such regions abnormally this way,
1008 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1009 must not transfer control across SESE region boundaries. */
1010 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1012 gimple_stmt_iterator gsi;
1013 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1014 basic_block *dispatcher_bbs = dispatcher_bb_array;
1015 int count = n_basic_blocks_for_fn (cfun);
1017 if (!bb_to_omp_idx.is_empty ())
1018 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1020 FOR_EACH_BB_FN (bb, cfun)
1022 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1024 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1025 tree target;
1027 if (!label_stmt)
1028 break;
1030 target = gimple_label_label (label_stmt);
1032 /* Make an edge to every label block that has been marked as a
1033 potential target for a computed goto or a non-local goto. */
1034 if (FORCED_LABEL (target))
1035 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1036 true);
1037 if (DECL_NONLOCAL (target))
1039 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1040 false);
1041 break;
1045 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1046 gsi_next_nondebug (&gsi);
1047 if (!gsi_end_p (gsi))
1049 /* Make an edge to every setjmp-like call. */
1050 gimple *call_stmt = gsi_stmt (gsi);
1051 if (is_gimple_call (call_stmt)
1052 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1053 || gimple_call_builtin_p (call_stmt,
1054 BUILT_IN_SETJMP_RECEIVER)))
1055 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1056 false);
1060 if (!bb_to_omp_idx.is_empty ())
1061 XDELETE (dispatcher_bbs);
1064 omp_free_regions ();
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1116 bb = bb->next_bb;
1118 return true;
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1126 static int
1127 next_discriminator_for_locus (int line)
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1152 expanded_location to;
1154 if (locus1 == locus2)
1155 return true;
1157 to = expand_location (locus2);
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1168 /* Assign a unique discriminator value to all statements in block bb that
1169 have the same line number as locus. */
1171 static void
1172 assign_discriminator (location_t locus, basic_block bb)
1174 gimple_stmt_iterator gsi;
1175 int discriminator;
1177 if (locus == UNKNOWN_LOCATION)
1178 return;
1180 expanded_location locus_e = expand_location (locus);
1182 discriminator = next_discriminator_for_locus (locus_e.line);
1184 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1186 gimple *stmt = gsi_stmt (gsi);
1187 location_t stmt_locus = gimple_location (stmt);
1188 if (same_line_p (locus, &locus_e, stmt_locus))
1189 gimple_set_location (stmt,
1190 location_with_discriminator (stmt_locus, discriminator));
1194 /* Assign discriminators to statement locations. */
1196 static void
1197 assign_discriminators (void)
1199 basic_block bb;
1201 FOR_EACH_BB_FN (bb, cfun)
1203 edge e;
1204 edge_iterator ei;
1205 gimple_stmt_iterator gsi;
1206 location_t curr_locus = UNKNOWN_LOCATION;
1207 expanded_location curr_locus_e = {};
1208 int curr_discr = 0;
1210 /* Traverse the basic block, if two function calls within a basic block
1211 are mapped to the same line, assign a new discriminator because a call
1212 stmt could be a split point of a basic block. */
1213 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1215 gimple *stmt = gsi_stmt (gsi);
1217 /* Don't allow debug stmts to affect discriminators, but
1218 allow them to take discriminators when they're on the
1219 same line as the preceding nondebug stmt. */
1220 if (is_gimple_debug (stmt))
1222 if (curr_locus != UNKNOWN_LOCATION
1223 && same_line_p (curr_locus, &curr_locus_e,
1224 gimple_location (stmt)))
1226 location_t loc = gimple_location (stmt);
1227 location_t dloc = location_with_discriminator (loc,
1228 curr_discr);
1229 gimple_set_location (stmt, dloc);
1231 continue;
1233 if (curr_locus == UNKNOWN_LOCATION)
1235 curr_locus = gimple_location (stmt);
1236 curr_locus_e = expand_location (curr_locus);
1238 else if (!same_line_p (curr_locus, &curr_locus_e, gimple_location (stmt)))
1240 curr_locus = gimple_location (stmt);
1241 curr_locus_e = expand_location (curr_locus);
1242 curr_discr = 0;
1244 else if (curr_discr != 0)
1246 location_t loc = gimple_location (stmt);
1247 location_t dloc = location_with_discriminator (loc, curr_discr);
1248 gimple_set_location (stmt, dloc);
1250 /* Allocate a new discriminator for CALL stmt. */
1251 if (gimple_code (stmt) == GIMPLE_CALL)
1252 curr_discr = next_discriminator_for_locus (curr_locus);
1255 gimple *last = last_nondebug_stmt (bb);
1256 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1257 if (locus == UNKNOWN_LOCATION)
1258 continue;
1260 expanded_location locus_e = expand_location (locus);
1262 FOR_EACH_EDGE (e, ei, bb->succs)
1264 gimple *first = first_non_label_stmt (e->dest);
1265 gimple *last = last_nondebug_stmt (e->dest);
1267 gimple *stmt_on_same_line = NULL;
1268 if (first && same_line_p (locus, &locus_e,
1269 gimple_location (first)))
1270 stmt_on_same_line = first;
1271 else if (last && same_line_p (locus, &locus_e,
1272 gimple_location (last)))
1273 stmt_on_same_line = last;
1275 if (stmt_on_same_line)
1277 if (has_discriminator (gimple_location (stmt_on_same_line))
1278 && !has_discriminator (locus))
1279 assign_discriminator (locus, bb);
1280 else
1281 assign_discriminator (locus, e->dest);
1287 /* Create the edges for a GIMPLE_COND starting at block BB. */
1289 static void
1290 make_cond_expr_edges (basic_block bb)
1292 gcond *entry = as_a <gcond *> (*gsi_last_bb (bb));
1293 gimple *then_stmt, *else_stmt;
1294 basic_block then_bb, else_bb;
1295 tree then_label, else_label;
1296 edge e;
1298 gcc_assert (entry);
1300 /* Entry basic blocks for each component. */
1301 then_label = gimple_cond_true_label (entry);
1302 else_label = gimple_cond_false_label (entry);
1303 then_bb = label_to_block (cfun, then_label);
1304 else_bb = label_to_block (cfun, else_label);
1305 then_stmt = first_stmt (then_bb);
1306 else_stmt = first_stmt (else_bb);
1308 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1309 e->goto_locus = gimple_location (then_stmt);
1310 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1311 if (e)
1312 e->goto_locus = gimple_location (else_stmt);
1314 /* We do not need the labels anymore. */
1315 gimple_cond_set_true_label (entry, NULL_TREE);
1316 gimple_cond_set_false_label (entry, NULL_TREE);
1320 /* Called for each element in the hash table (P) as we delete the
1321 edge to cases hash table.
1323 Clear all the CASE_CHAINs to prevent problems with copying of
1324 SWITCH_EXPRs and structure sharing rules, then free the hash table
1325 element. */
1327 bool
1328 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1330 tree t, next;
1332 for (t = value; t; t = next)
1334 next = CASE_CHAIN (t);
1335 CASE_CHAIN (t) = NULL;
1338 return true;
1341 /* Start recording information mapping edges to case labels. */
1343 void
1344 start_recording_case_labels (void)
1346 gcc_assert (edge_to_cases == NULL);
1347 edge_to_cases = new hash_map<edge, tree>;
1348 touched_switch_bbs = BITMAP_ALLOC (NULL);
1351 /* Return nonzero if we are recording information for case labels. */
1353 static bool
1354 recording_case_labels_p (void)
1356 return (edge_to_cases != NULL);
1359 /* Stop recording information mapping edges to case labels and
1360 remove any information we have recorded. */
1361 void
1362 end_recording_case_labels (void)
1364 bitmap_iterator bi;
1365 unsigned i;
1366 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1367 delete edge_to_cases;
1368 edge_to_cases = NULL;
1369 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1371 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1372 if (bb)
1374 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1375 group_case_labels_stmt (stmt);
1378 BITMAP_FREE (touched_switch_bbs);
1381 /* If we are inside a {start,end}_recording_cases block, then return
1382 a chain of CASE_LABEL_EXPRs from T which reference E.
1384 Otherwise return NULL. */
1386 tree
1387 get_cases_for_edge (edge e, gswitch *t)
1389 tree *slot;
1390 size_t i, n;
1392 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1393 chains available. Return NULL so the caller can detect this case. */
1394 if (!recording_case_labels_p ())
1395 return NULL;
1397 slot = edge_to_cases->get (e);
1398 if (slot)
1399 return *slot;
1401 /* If we did not find E in the hash table, then this must be the first
1402 time we have been queried for information about E & T. Add all the
1403 elements from T to the hash table then perform the query again. */
1405 n = gimple_switch_num_labels (t);
1406 for (i = 0; i < n; i++)
1408 tree elt = gimple_switch_label (t, i);
1409 tree lab = CASE_LABEL (elt);
1410 basic_block label_bb = label_to_block (cfun, lab);
1411 edge this_edge = find_edge (e->src, label_bb);
1413 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1414 a new chain. */
1415 tree &s = edge_to_cases->get_or_insert (this_edge);
1416 CASE_CHAIN (elt) = s;
1417 s = elt;
1420 return *edge_to_cases->get (e);
1423 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1425 static void
1426 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1428 size_t i, n;
1430 n = gimple_switch_num_labels (entry);
1432 for (i = 0; i < n; ++i)
1434 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1435 make_edge (bb, label_bb, 0);
1440 /* Return the basic block holding label DEST. */
1442 basic_block
1443 label_to_block (struct function *ifun, tree dest)
1445 int uid = LABEL_DECL_UID (dest);
1447 /* We would die hard when faced by an undefined label. Emit a label to
1448 the very first basic block. This will hopefully make even the dataflow
1449 and undefined variable warnings quite right. */
1450 if (seen_error () && uid < 0)
1452 gimple_stmt_iterator gsi =
1453 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1454 gimple *stmt;
1456 stmt = gimple_build_label (dest);
1457 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1458 uid = LABEL_DECL_UID (dest);
1460 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1461 return NULL;
1462 return (*ifun->cfg->x_label_to_block_map)[uid];
1465 /* Create edges for a goto statement at block BB. Returns true
1466 if abnormal edges should be created. */
1468 static bool
1469 make_goto_expr_edges (basic_block bb)
1471 gimple_stmt_iterator last = gsi_last_bb (bb);
1472 gimple *goto_t = gsi_stmt (last);
1474 /* A simple GOTO creates normal edges. */
1475 if (simple_goto_p (goto_t))
1477 tree dest = gimple_goto_dest (goto_t);
1478 basic_block label_bb = label_to_block (cfun, dest);
1479 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1480 e->goto_locus = gimple_location (goto_t);
1481 gsi_remove (&last, true);
1482 return false;
1485 /* A computed GOTO creates abnormal edges. */
1486 return true;
1489 /* Create edges for an asm statement with labels at block BB. */
1491 static void
1492 make_gimple_asm_edges (basic_block bb)
1494 gasm *stmt = as_a <gasm *> (*gsi_last_bb (bb));
1495 int i, n = gimple_asm_nlabels (stmt);
1497 for (i = 0; i < n; ++i)
1499 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1500 basic_block label_bb = label_to_block (cfun, label);
1501 make_edge (bb, label_bb, 0);
1505 /*---------------------------------------------------------------------------
1506 Flowgraph analysis
1507 ---------------------------------------------------------------------------*/
1509 /* Cleanup useless labels in basic blocks. This is something we wish
1510 to do early because it allows us to group case labels before creating
1511 the edges for the CFG, and it speeds up block statement iterators in
1512 all passes later on.
1513 We rerun this pass after CFG is created, to get rid of the labels that
1514 are no longer referenced. After then we do not run it any more, since
1515 (almost) no new labels should be created. */
1517 /* A map from basic block index to the leading label of that block. */
1518 struct label_record
1520 /* The label. */
1521 tree label;
1523 /* True if the label is referenced from somewhere. */
1524 bool used;
1527 /* Given LABEL return the first label in the same basic block. */
1529 static tree
1530 main_block_label (tree label, label_record *label_for_bb)
1532 basic_block bb = label_to_block (cfun, label);
1533 tree main_label = label_for_bb[bb->index].label;
1535 /* label_to_block possibly inserted undefined label into the chain. */
1536 if (!main_label)
1538 label_for_bb[bb->index].label = label;
1539 main_label = label;
1542 label_for_bb[bb->index].used = true;
1543 return main_label;
1546 /* Clean up redundant labels within the exception tree. */
1548 static void
1549 cleanup_dead_labels_eh (label_record *label_for_bb)
1551 eh_landing_pad lp;
1552 eh_region r;
1553 tree lab;
1554 int i;
1556 if (cfun->eh == NULL)
1557 return;
1559 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1560 if (lp && lp->post_landing_pad)
1562 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1563 if (lab != lp->post_landing_pad)
1565 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1566 lp->post_landing_pad = lab;
1567 EH_LANDING_PAD_NR (lab) = lp->index;
1571 FOR_ALL_EH_REGION (r)
1572 switch (r->type)
1574 case ERT_CLEANUP:
1575 case ERT_MUST_NOT_THROW:
1576 break;
1578 case ERT_TRY:
1580 eh_catch c;
1581 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1583 lab = c->label;
1584 if (lab)
1585 c->label = main_block_label (lab, label_for_bb);
1588 break;
1590 case ERT_ALLOWED_EXCEPTIONS:
1591 lab = r->u.allowed.label;
1592 if (lab)
1593 r->u.allowed.label = main_block_label (lab, label_for_bb);
1594 break;
1599 /* Cleanup redundant labels. This is a three-step process:
1600 1) Find the leading label for each block.
1601 2) Redirect all references to labels to the leading labels.
1602 3) Cleanup all useless labels. */
1604 void
1605 cleanup_dead_labels (void)
1607 basic_block bb;
1608 label_record *label_for_bb = XCNEWVEC (struct label_record,
1609 last_basic_block_for_fn (cfun));
1611 /* Find a suitable label for each block. We use the first user-defined
1612 label if there is one, or otherwise just the first label we see. */
1613 FOR_EACH_BB_FN (bb, cfun)
1615 gimple_stmt_iterator i;
1617 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1619 tree label;
1620 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1622 if (!label_stmt)
1623 break;
1625 label = gimple_label_label (label_stmt);
1627 /* If we have not yet seen a label for the current block,
1628 remember this one and see if there are more labels. */
1629 if (!label_for_bb[bb->index].label)
1631 label_for_bb[bb->index].label = label;
1632 continue;
1635 /* If we did see a label for the current block already, but it
1636 is an artificially created label, replace it if the current
1637 label is a user defined label. */
1638 if (!DECL_ARTIFICIAL (label)
1639 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1641 label_for_bb[bb->index].label = label;
1642 break;
1647 /* Now redirect all jumps/branches to the selected label.
1648 First do so for each block ending in a control statement. */
1649 FOR_EACH_BB_FN (bb, cfun)
1651 gimple *stmt = *gsi_last_bb (bb);
1652 tree label, new_label;
1654 if (!stmt)
1655 continue;
1657 switch (gimple_code (stmt))
1659 case GIMPLE_COND:
1661 gcond *cond_stmt = as_a <gcond *> (stmt);
1662 label = gimple_cond_true_label (cond_stmt);
1663 if (label)
1665 new_label = main_block_label (label, label_for_bb);
1666 if (new_label != label)
1667 gimple_cond_set_true_label (cond_stmt, new_label);
1670 label = gimple_cond_false_label (cond_stmt);
1671 if (label)
1673 new_label = main_block_label (label, label_for_bb);
1674 if (new_label != label)
1675 gimple_cond_set_false_label (cond_stmt, new_label);
1678 break;
1680 case GIMPLE_SWITCH:
1682 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1683 size_t i, n = gimple_switch_num_labels (switch_stmt);
1685 /* Replace all destination labels. */
1686 for (i = 0; i < n; ++i)
1688 tree case_label = gimple_switch_label (switch_stmt, i);
1689 label = CASE_LABEL (case_label);
1690 new_label = main_block_label (label, label_for_bb);
1691 if (new_label != label)
1692 CASE_LABEL (case_label) = new_label;
1694 break;
1697 case GIMPLE_ASM:
1699 gasm *asm_stmt = as_a <gasm *> (stmt);
1700 int i, n = gimple_asm_nlabels (asm_stmt);
1702 for (i = 0; i < n; ++i)
1704 tree cons = gimple_asm_label_op (asm_stmt, i);
1705 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1706 TREE_VALUE (cons) = label;
1708 break;
1711 /* We have to handle gotos until they're removed, and we don't
1712 remove them until after we've created the CFG edges. */
1713 case GIMPLE_GOTO:
1714 if (!computed_goto_p (stmt))
1716 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1717 label = gimple_goto_dest (goto_stmt);
1718 new_label = main_block_label (label, label_for_bb);
1719 if (new_label != label)
1720 gimple_goto_set_dest (goto_stmt, new_label);
1722 break;
1724 case GIMPLE_TRANSACTION:
1726 gtransaction *txn = as_a <gtransaction *> (stmt);
1728 label = gimple_transaction_label_norm (txn);
1729 if (label)
1731 new_label = main_block_label (label, label_for_bb);
1732 if (new_label != label)
1733 gimple_transaction_set_label_norm (txn, new_label);
1736 label = gimple_transaction_label_uninst (txn);
1737 if (label)
1739 new_label = main_block_label (label, label_for_bb);
1740 if (new_label != label)
1741 gimple_transaction_set_label_uninst (txn, new_label);
1744 label = gimple_transaction_label_over (txn);
1745 if (label)
1747 new_label = main_block_label (label, label_for_bb);
1748 if (new_label != label)
1749 gimple_transaction_set_label_over (txn, new_label);
1752 break;
1754 default:
1755 break;
1759 /* Do the same for the exception region tree labels. */
1760 cleanup_dead_labels_eh (label_for_bb);
1762 /* Finally, purge dead labels. All user-defined labels and labels that
1763 can be the target of non-local gotos and labels which have their
1764 address taken are preserved. */
1765 FOR_EACH_BB_FN (bb, cfun)
1767 gimple_stmt_iterator i;
1768 tree label_for_this_bb = label_for_bb[bb->index].label;
1770 if (!label_for_this_bb)
1771 continue;
1773 /* If the main label of the block is unused, we may still remove it. */
1774 if (!label_for_bb[bb->index].used)
1775 label_for_this_bb = NULL;
1777 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1779 tree label;
1780 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1782 if (!label_stmt)
1783 break;
1785 label = gimple_label_label (label_stmt);
1787 if (label == label_for_this_bb
1788 || !DECL_ARTIFICIAL (label)
1789 || DECL_NONLOCAL (label)
1790 || FORCED_LABEL (label))
1791 gsi_next (&i);
1792 else
1794 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1795 gsi_remove (&i, true);
1800 free (label_for_bb);
1803 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1804 the ones jumping to the same label.
1805 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1807 bool
1808 group_case_labels_stmt (gswitch *stmt)
1810 int old_size = gimple_switch_num_labels (stmt);
1811 int i, next_index, new_size;
1812 basic_block default_bb = NULL;
1813 hash_set<tree> *removed_labels = NULL;
1815 default_bb = gimple_switch_default_bb (cfun, stmt);
1817 /* Look for possible opportunities to merge cases. */
1818 new_size = i = 1;
1819 while (i < old_size)
1821 tree base_case, base_high;
1822 basic_block base_bb;
1824 base_case = gimple_switch_label (stmt, i);
1826 gcc_assert (base_case);
1827 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1829 /* Discard cases that have the same destination as the default case or
1830 whose destination blocks have already been removed as unreachable. */
1831 if (base_bb == NULL
1832 || base_bb == default_bb
1833 || (removed_labels
1834 && removed_labels->contains (CASE_LABEL (base_case))))
1836 i++;
1837 continue;
1840 base_high = CASE_HIGH (base_case)
1841 ? CASE_HIGH (base_case)
1842 : CASE_LOW (base_case);
1843 next_index = i + 1;
1845 /* Try to merge case labels. Break out when we reach the end
1846 of the label vector or when we cannot merge the next case
1847 label with the current one. */
1848 while (next_index < old_size)
1850 tree merge_case = gimple_switch_label (stmt, next_index);
1851 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1852 wide_int bhp1 = wi::to_wide (base_high) + 1;
1854 /* Merge the cases if they jump to the same place,
1855 and their ranges are consecutive. */
1856 if (merge_bb == base_bb
1857 && (removed_labels == NULL
1858 || !removed_labels->contains (CASE_LABEL (merge_case)))
1859 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1861 base_high
1862 = (CASE_HIGH (merge_case)
1863 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1864 CASE_HIGH (base_case) = base_high;
1865 next_index++;
1867 else
1868 break;
1871 /* Discard cases that have an unreachable destination block. */
1872 if (EDGE_COUNT (base_bb->succs) == 0
1873 && gimple_seq_unreachable_p (bb_seq (base_bb))
1874 /* Don't optimize this if __builtin_unreachable () is the
1875 implicitly added one by the C++ FE too early, before
1876 -Wreturn-type can be diagnosed. We'll optimize it later
1877 during switchconv pass or any other cfg cleanup. */
1878 && (gimple_in_ssa_p (cfun)
1879 || (LOCATION_LOCUS (gimple_location (last_nondebug_stmt (base_bb)))
1880 != BUILTINS_LOCATION)))
1882 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1883 if (base_edge != NULL)
1885 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1886 !gsi_end_p (gsi); gsi_next (&gsi))
1887 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1889 if (FORCED_LABEL (gimple_label_label (stmt))
1890 || DECL_NONLOCAL (gimple_label_label (stmt)))
1892 /* Forced/non-local labels aren't going to be removed,
1893 but they will be moved to some neighbouring basic
1894 block. If some later case label refers to one of
1895 those labels, we should throw that case away rather
1896 than keeping it around and refering to some random
1897 other basic block without an edge to it. */
1898 if (removed_labels == NULL)
1899 removed_labels = new hash_set<tree>;
1900 removed_labels->add (gimple_label_label (stmt));
1903 else
1904 break;
1905 remove_edge_and_dominated_blocks (base_edge);
1907 i = next_index;
1908 continue;
1911 if (new_size < i)
1912 gimple_switch_set_label (stmt, new_size,
1913 gimple_switch_label (stmt, i));
1914 i = next_index;
1915 new_size++;
1918 gcc_assert (new_size <= old_size);
1920 if (new_size < old_size)
1921 gimple_switch_set_num_labels (stmt, new_size);
1923 delete removed_labels;
1924 return new_size < old_size;
1927 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1928 and scan the sorted vector of cases. Combine the ones jumping to the
1929 same label. */
1931 bool
1932 group_case_labels (void)
1934 basic_block bb;
1935 bool changed = false;
1937 FOR_EACH_BB_FN (bb, cfun)
1939 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1940 changed |= group_case_labels_stmt (stmt);
1943 return changed;
1946 /* Checks whether we can merge block B into block A. */
1948 static bool
1949 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1951 gimple *stmt;
1953 if (!single_succ_p (a))
1954 return false;
1956 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1957 return false;
1959 if (single_succ (a) != b)
1960 return false;
1962 if (!single_pred_p (b))
1963 return false;
1965 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1966 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1967 return false;
1969 /* If A ends by a statement causing exceptions or something similar, we
1970 cannot merge the blocks. */
1971 stmt = *gsi_last_bb (a);
1972 if (stmt && stmt_ends_bb_p (stmt))
1973 return false;
1975 /* Examine the labels at the beginning of B. */
1976 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1977 gsi_next (&gsi))
1979 tree lab;
1980 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1981 if (!label_stmt)
1982 break;
1983 lab = gimple_label_label (label_stmt);
1985 /* Do not remove user forced labels or for -O0 any user labels. */
1986 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1987 return false;
1990 /* Protect simple loop latches. We only want to avoid merging
1991 the latch with the loop header or with a block in another
1992 loop in this case. */
1993 if (current_loops
1994 && b->loop_father->latch == b
1995 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1996 && (b->loop_father->header == a
1997 || b->loop_father != a->loop_father))
1998 return false;
2000 /* It must be possible to eliminate all phi nodes in B. If ssa form
2001 is not up-to-date and a name-mapping is registered, we cannot eliminate
2002 any phis. Symbols marked for renaming are never a problem though. */
2003 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
2004 gsi_next (&gsi))
2006 gphi *phi = gsi.phi ();
2007 /* Technically only new names matter. */
2008 if (name_registered_for_update_p (PHI_RESULT (phi)))
2009 return false;
2012 /* When not optimizing, don't merge if we'd lose goto_locus. */
2013 if (!optimize
2014 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
2016 location_t goto_locus = single_succ_edge (a)->goto_locus;
2017 gimple_stmt_iterator prev, next;
2018 prev = gsi_last_nondebug_bb (a);
2019 next = gsi_after_labels (b);
2020 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
2021 gsi_next_nondebug (&next);
2022 if ((gsi_end_p (prev)
2023 || gimple_location (gsi_stmt (prev)) != goto_locus)
2024 && (gsi_end_p (next)
2025 || gimple_location (gsi_stmt (next)) != goto_locus))
2026 return false;
2029 return true;
2032 /* Replaces all uses of NAME by VAL. */
2034 void
2035 replace_uses_by (tree name, tree val)
2037 imm_use_iterator imm_iter;
2038 use_operand_p use;
2039 gimple *stmt;
2040 edge e;
2042 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2044 /* Mark the block if we change the last stmt in it. */
2045 if (cfgcleanup_altered_bbs
2046 && stmt_ends_bb_p (stmt))
2047 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2049 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2051 replace_exp (use, val);
2053 if (gimple_code (stmt) == GIMPLE_PHI)
2055 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2056 PHI_ARG_INDEX_FROM_USE (use));
2057 if (e->flags & EDGE_ABNORMAL
2058 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2060 /* This can only occur for virtual operands, since
2061 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2062 would prevent replacement. */
2063 gcc_checking_assert (virtual_operand_p (name));
2064 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2069 if (gimple_code (stmt) != GIMPLE_PHI)
2071 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2072 gimple *orig_stmt = stmt;
2073 size_t i;
2075 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2076 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2077 only change sth from non-invariant to invariant, and only
2078 when propagating constants. */
2079 if (is_gimple_min_invariant (val))
2080 for (i = 0; i < gimple_num_ops (stmt); i++)
2082 tree op = gimple_op (stmt, i);
2083 /* Operands may be empty here. For example, the labels
2084 of a GIMPLE_COND are nulled out following the creation
2085 of the corresponding CFG edges. */
2086 if (op && TREE_CODE (op) == ADDR_EXPR)
2087 recompute_tree_invariant_for_addr_expr (op);
2090 if (fold_stmt (&gsi))
2091 stmt = gsi_stmt (gsi);
2093 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2094 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2096 update_stmt (stmt);
2100 gcc_checking_assert (has_zero_uses (name));
2102 /* Also update the trees stored in loop structures. */
2103 if (current_loops)
2105 for (auto loop : loops_list (cfun, 0))
2106 substitute_in_loop_info (loop, name, val);
2110 /* Merge block B into block A. */
2112 static void
2113 gimple_merge_blocks (basic_block a, basic_block b)
2115 gimple_stmt_iterator last, gsi;
2116 gphi_iterator psi;
2118 if (dump_file)
2119 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2121 /* Remove all single-valued PHI nodes from block B of the form
2122 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2123 gsi = gsi_last_bb (a);
2124 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2126 gimple *phi = gsi_stmt (psi);
2127 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2128 gimple *copy;
2129 bool may_replace_uses = (virtual_operand_p (def)
2130 || may_propagate_copy (def, use));
2132 /* In case we maintain loop closed ssa form, do not propagate arguments
2133 of loop exit phi nodes. */
2134 if (current_loops
2135 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2136 && !virtual_operand_p (def)
2137 && TREE_CODE (use) == SSA_NAME
2138 && a->loop_father != b->loop_father)
2139 may_replace_uses = false;
2141 if (!may_replace_uses)
2143 gcc_assert (!virtual_operand_p (def));
2145 /* Note that just emitting the copies is fine -- there is no problem
2146 with ordering of phi nodes. This is because A is the single
2147 predecessor of B, therefore results of the phi nodes cannot
2148 appear as arguments of the phi nodes. */
2149 copy = gimple_build_assign (def, use);
2150 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2151 remove_phi_node (&psi, false);
2153 else
2155 /* If we deal with a PHI for virtual operands, we can simply
2156 propagate these without fussing with folding or updating
2157 the stmt. */
2158 if (virtual_operand_p (def))
2160 imm_use_iterator iter;
2161 use_operand_p use_p;
2162 gimple *stmt;
2164 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2165 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2166 SET_USE (use_p, use);
2168 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2169 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2171 else
2172 replace_uses_by (def, use);
2174 remove_phi_node (&psi, true);
2178 /* Ensure that B follows A. */
2179 move_block_after (b, a);
2181 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2182 gcc_assert (!*gsi_last_bb (a)
2183 || !stmt_ends_bb_p (*gsi_last_bb (a)));
2185 /* Remove labels from B and set gimple_bb to A for other statements. */
2186 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2188 gimple *stmt = gsi_stmt (gsi);
2189 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2191 tree label = gimple_label_label (label_stmt);
2192 int lp_nr;
2194 gsi_remove (&gsi, false);
2196 /* Now that we can thread computed gotos, we might have
2197 a situation where we have a forced label in block B
2198 However, the label at the start of block B might still be
2199 used in other ways (think about the runtime checking for
2200 Fortran assigned gotos). So we cannot just delete the
2201 label. Instead we move the label to the start of block A. */
2202 if (FORCED_LABEL (label))
2204 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2205 tree first_label = NULL_TREE;
2206 if (!gsi_end_p (dest_gsi))
2207 if (glabel *first_label_stmt
2208 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2209 first_label = gimple_label_label (first_label_stmt);
2210 if (first_label
2211 && (DECL_NONLOCAL (first_label)
2212 || EH_LANDING_PAD_NR (first_label) != 0))
2213 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2214 else
2215 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2217 /* Other user labels keep around in a form of a debug stmt. */
2218 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2220 gimple *dbg = gimple_build_debug_bind (label,
2221 integer_zero_node,
2222 stmt);
2223 gimple_debug_bind_reset_value (dbg);
2224 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2227 lp_nr = EH_LANDING_PAD_NR (label);
2228 if (lp_nr)
2230 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2231 lp->post_landing_pad = NULL;
2234 else
2236 gimple_set_bb (stmt, a);
2237 gsi_next (&gsi);
2241 /* When merging two BBs, if their counts are different, the larger count
2242 is selected as the new bb count. This is to handle inconsistent
2243 profiles. */
2244 if (a->loop_father == b->loop_father)
2246 a->count = a->count.merge (b->count);
2249 /* Merge the sequences. */
2250 last = gsi_last_bb (a);
2251 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2252 set_bb_seq (b, NULL);
2254 if (cfgcleanup_altered_bbs)
2255 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2259 /* Return the one of two successors of BB that is not reachable by a
2260 complex edge, if there is one. Else, return BB. We use
2261 this in optimizations that use post-dominators for their heuristics,
2262 to catch the cases in C++ where function calls are involved. */
2264 basic_block
2265 single_noncomplex_succ (basic_block bb)
2267 edge e0, e1;
2268 if (EDGE_COUNT (bb->succs) != 2)
2269 return bb;
2271 e0 = EDGE_SUCC (bb, 0);
2272 e1 = EDGE_SUCC (bb, 1);
2273 if (e0->flags & EDGE_COMPLEX)
2274 return e1->dest;
2275 if (e1->flags & EDGE_COMPLEX)
2276 return e0->dest;
2278 return bb;
2281 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2283 void
2284 notice_special_calls (gcall *call)
2286 int flags = gimple_call_flags (call);
2288 if (flags & ECF_MAY_BE_ALLOCA)
2289 cfun->calls_alloca = true;
2290 if (flags & ECF_RETURNS_TWICE)
2291 cfun->calls_setjmp = true;
2295 /* Clear flags set by notice_special_calls. Used by dead code removal
2296 to update the flags. */
2298 void
2299 clear_special_calls (void)
2301 cfun->calls_alloca = false;
2302 cfun->calls_setjmp = false;
2305 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2307 static void
2308 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2310 /* Since this block is no longer reachable, we can just delete all
2311 of its PHI nodes. */
2312 remove_phi_nodes (bb);
2314 /* Remove edges to BB's successors. */
2315 while (EDGE_COUNT (bb->succs) > 0)
2316 remove_edge (EDGE_SUCC (bb, 0));
2320 /* Remove statements of basic block BB. */
2322 static void
2323 remove_bb (basic_block bb)
2325 gimple_stmt_iterator i;
2327 if (dump_file)
2329 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2330 if (dump_flags & TDF_DETAILS)
2332 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2333 fprintf (dump_file, "\n");
2337 if (current_loops)
2339 class loop *loop = bb->loop_father;
2341 /* If a loop gets removed, clean up the information associated
2342 with it. */
2343 if (loop->latch == bb
2344 || loop->header == bb)
2345 free_numbers_of_iterations_estimates (loop);
2348 /* Remove all the instructions in the block. */
2349 if (bb_seq (bb) != NULL)
2351 /* Walk backwards so as to get a chance to substitute all
2352 released DEFs into debug stmts. See
2353 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2354 details. */
2355 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2357 gimple *stmt = gsi_stmt (i);
2358 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2359 if (label_stmt
2360 && (FORCED_LABEL (gimple_label_label (label_stmt))
2361 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2363 basic_block new_bb;
2364 gimple_stmt_iterator new_gsi;
2366 /* A non-reachable non-local label may still be referenced.
2367 But it no longer needs to carry the extra semantics of
2368 non-locality. */
2369 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2371 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2372 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2375 new_bb = bb->prev_bb;
2376 /* Don't move any labels into ENTRY block. */
2377 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2379 new_bb = single_succ (new_bb);
2380 gcc_assert (new_bb != bb);
2382 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2383 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2384 || (bb_to_omp_idx[bb->index]
2385 != bb_to_omp_idx[new_bb->index])))
2387 /* During cfg pass make sure to put orphaned labels
2388 into the right OMP region. */
2389 unsigned int i;
2390 int idx;
2391 new_bb = NULL;
2392 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2393 if (i >= NUM_FIXED_BLOCKS
2394 && idx == bb_to_omp_idx[bb->index]
2395 && i != (unsigned) bb->index)
2397 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2398 break;
2400 if (new_bb == NULL)
2402 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2403 gcc_assert (new_bb != bb);
2406 new_gsi = gsi_after_labels (new_bb);
2407 gsi_remove (&i, false);
2408 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2410 else
2412 /* Release SSA definitions. */
2413 release_defs (stmt);
2414 gsi_remove (&i, true);
2417 if (gsi_end_p (i))
2418 i = gsi_last_bb (bb);
2419 else
2420 gsi_prev (&i);
2424 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2425 bb_to_omp_idx[bb->index] = -1;
2426 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2427 bb->il.gimple.seq = NULL;
2428 bb->il.gimple.phi_nodes = NULL;
2432 /* Given a basic block BB and a value VAL for use in the final statement
2433 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2434 the edge that will be taken out of the block.
2435 If VAL is NULL_TREE, then the current value of the final statement's
2436 predicate or index is used.
2437 If the value does not match a unique edge, NULL is returned. */
2439 edge
2440 find_taken_edge (basic_block bb, tree val)
2442 gimple *stmt;
2444 stmt = *gsi_last_bb (bb);
2446 /* Handle ENTRY and EXIT. */
2447 if (!stmt)
2450 else if (gimple_code (stmt) == GIMPLE_COND)
2451 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2453 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2454 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2456 else if (computed_goto_p (stmt))
2458 /* Only optimize if the argument is a label, if the argument is
2459 not a label then we cannot construct a proper CFG.
2461 It may be the case that we only need to allow the LABEL_REF to
2462 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2463 appear inside a LABEL_EXPR just to be safe. */
2464 if (val
2465 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2466 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2467 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2470 /* Otherwise we only know the taken successor edge if it's unique. */
2471 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2474 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2475 statement, determine which of the outgoing edges will be taken out of the
2476 block. Return NULL if either edge may be taken. */
2478 static edge
2479 find_taken_edge_computed_goto (basic_block bb, tree val)
2481 basic_block dest;
2482 edge e = NULL;
2484 dest = label_to_block (cfun, val);
2485 if (dest)
2486 e = find_edge (bb, dest);
2488 /* It's possible for find_edge to return NULL here on invalid code
2489 that abuses the labels-as-values extension (e.g. code that attempts to
2490 jump *between* functions via stored labels-as-values; PR 84136).
2491 If so, then we simply return that NULL for the edge.
2492 We don't currently have a way of detecting such invalid code, so we
2493 can't assert that it was the case when a NULL edge occurs here. */
2495 return e;
2498 /* Given COND_STMT and a constant value VAL for use as the predicate,
2499 determine which of the two edges will be taken out of
2500 the statement's block. Return NULL if either edge may be taken.
2501 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2502 is used. */
2504 static edge
2505 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2507 edge true_edge, false_edge;
2509 if (val == NULL_TREE)
2511 /* Use the current value of the predicate. */
2512 if (gimple_cond_true_p (cond_stmt))
2513 val = integer_one_node;
2514 else if (gimple_cond_false_p (cond_stmt))
2515 val = integer_zero_node;
2516 else
2517 return NULL;
2519 else if (TREE_CODE (val) != INTEGER_CST)
2520 return NULL;
2522 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2523 &true_edge, &false_edge);
2525 return (integer_zerop (val) ? false_edge : true_edge);
2528 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2529 which edge will be taken out of the statement's block. Return NULL if any
2530 edge may be taken.
2531 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2532 is used. */
2534 edge
2535 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2537 basic_block dest_bb;
2538 edge e;
2539 tree taken_case;
2541 if (gimple_switch_num_labels (switch_stmt) == 1)
2542 taken_case = gimple_switch_default_label (switch_stmt);
2543 else
2545 if (val == NULL_TREE)
2546 val = gimple_switch_index (switch_stmt);
2547 if (TREE_CODE (val) != INTEGER_CST)
2548 return NULL;
2549 else
2550 taken_case = find_case_label_for_value (switch_stmt, val);
2552 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2554 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2555 gcc_assert (e);
2556 return e;
2560 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2561 We can make optimal use here of the fact that the case labels are
2562 sorted: We can do a binary search for a case matching VAL. */
2564 tree
2565 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2567 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2568 tree default_case = gimple_switch_default_label (switch_stmt);
2570 for (low = 0, high = n; high - low > 1; )
2572 size_t i = (high + low) / 2;
2573 tree t = gimple_switch_label (switch_stmt, i);
2574 int cmp;
2576 /* Cache the result of comparing CASE_LOW and val. */
2577 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2579 if (cmp > 0)
2580 high = i;
2581 else
2582 low = i;
2584 if (CASE_HIGH (t) == NULL)
2586 /* A singe-valued case label. */
2587 if (cmp == 0)
2588 return t;
2590 else
2592 /* A case range. We can only handle integer ranges. */
2593 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2594 return t;
2598 return default_case;
2602 /* Dump a basic block on stderr. */
2604 void
2605 gimple_debug_bb (basic_block bb)
2607 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2611 /* Dump basic block with index N on stderr. */
2613 basic_block
2614 gimple_debug_bb_n (int n)
2616 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2617 return BASIC_BLOCK_FOR_FN (cfun, n);
2621 /* Dump the CFG on stderr.
2623 FLAGS are the same used by the tree dumping functions
2624 (see TDF_* in dumpfile.h). */
2626 void
2627 gimple_debug_cfg (dump_flags_t flags)
2629 gimple_dump_cfg (stderr, flags);
2633 /* Dump the program showing basic block boundaries on the given FILE.
2635 FLAGS are the same used by the tree dumping functions (see TDF_* in
2636 tree.h). */
2638 void
2639 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2641 if (flags & TDF_DETAILS)
2643 dump_function_header (file, current_function_decl, flags);
2644 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2645 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2646 last_basic_block_for_fn (cfun));
2648 brief_dump_cfg (file, flags);
2649 fprintf (file, "\n");
2652 if (flags & TDF_STATS)
2653 dump_cfg_stats (file);
2655 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2659 /* Dump CFG statistics on FILE. */
2661 void
2662 dump_cfg_stats (FILE *file)
2664 static long max_num_merged_labels = 0;
2665 unsigned long size, total = 0;
2666 long num_edges;
2667 basic_block bb;
2668 const char * const fmt_str = "%-30s%-13s%12s\n";
2669 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2670 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2671 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2672 const char *funcname = current_function_name ();
2674 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2676 fprintf (file, "---------------------------------------------------------\n");
2677 fprintf (file, fmt_str, "", " Number of ", "Memory");
2678 fprintf (file, fmt_str, "", " instances ", "used ");
2679 fprintf (file, "---------------------------------------------------------\n");
2681 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2682 total += size;
2683 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2684 SIZE_AMOUNT (size));
2686 num_edges = 0;
2687 FOR_EACH_BB_FN (bb, cfun)
2688 num_edges += EDGE_COUNT (bb->succs);
2689 size = num_edges * sizeof (class edge_def);
2690 total += size;
2691 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2693 fprintf (file, "---------------------------------------------------------\n");
2694 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2695 SIZE_AMOUNT (total));
2696 fprintf (file, "---------------------------------------------------------\n");
2697 fprintf (file, "\n");
2699 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2700 max_num_merged_labels = cfg_stats.num_merged_labels;
2702 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2703 cfg_stats.num_merged_labels, max_num_merged_labels);
2705 fprintf (file, "\n");
2709 /* Dump CFG statistics on stderr. Keep extern so that it's always
2710 linked in the final executable. */
2712 DEBUG_FUNCTION void
2713 debug_cfg_stats (void)
2715 dump_cfg_stats (stderr);
2718 /*---------------------------------------------------------------------------
2719 Miscellaneous helpers
2720 ---------------------------------------------------------------------------*/
2722 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2723 flow. Transfers of control flow associated with EH are excluded. */
2725 static bool
2726 call_can_make_abnormal_goto (gimple *t)
2728 /* If the function has no non-local labels, then a call cannot make an
2729 abnormal transfer of control. */
2730 if (!cfun->has_nonlocal_label
2731 && !cfun->calls_setjmp)
2732 return false;
2734 /* Likewise if the call has no side effects. */
2735 if (!gimple_has_side_effects (t))
2736 return false;
2738 /* Likewise if the called function is leaf. */
2739 if (gimple_call_flags (t) & ECF_LEAF)
2740 return false;
2742 return true;
2746 /* Return true if T can make an abnormal transfer of control flow.
2747 Transfers of control flow associated with EH are excluded. */
2749 bool
2750 stmt_can_make_abnormal_goto (gimple *t)
2752 if (computed_goto_p (t))
2753 return true;
2754 if (is_gimple_call (t))
2755 return call_can_make_abnormal_goto (t);
2756 return false;
2760 /* Return true if T represents a stmt that always transfers control. */
2762 bool
2763 is_ctrl_stmt (gimple *t)
2765 switch (gimple_code (t))
2767 case GIMPLE_COND:
2768 case GIMPLE_SWITCH:
2769 case GIMPLE_GOTO:
2770 case GIMPLE_RETURN:
2771 case GIMPLE_RESX:
2772 return true;
2773 default:
2774 return false;
2779 /* Return true if T is a statement that may alter the flow of control
2780 (e.g., a call to a non-returning function). */
2782 bool
2783 is_ctrl_altering_stmt (gimple *t)
2785 gcc_assert (t);
2787 switch (gimple_code (t))
2789 case GIMPLE_CALL:
2790 /* Per stmt call flag indicates whether the call could alter
2791 controlflow. */
2792 if (gimple_call_ctrl_altering_p (t))
2793 return true;
2794 break;
2796 case GIMPLE_EH_DISPATCH:
2797 /* EH_DISPATCH branches to the individual catch handlers at
2798 this level of a try or allowed-exceptions region. It can
2799 fallthru to the next statement as well. */
2800 return true;
2802 case GIMPLE_ASM:
2803 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2804 return true;
2805 break;
2807 CASE_GIMPLE_OMP:
2808 /* OpenMP directives alter control flow. */
2809 return true;
2811 case GIMPLE_TRANSACTION:
2812 /* A transaction start alters control flow. */
2813 return true;
2815 default:
2816 break;
2819 /* If a statement can throw, it alters control flow. */
2820 return stmt_can_throw_internal (cfun, t);
2824 /* Return true if T is a simple local goto. */
2826 bool
2827 simple_goto_p (gimple *t)
2829 return (gimple_code (t) == GIMPLE_GOTO
2830 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2834 /* Return true if STMT should start a new basic block. PREV_STMT is
2835 the statement preceding STMT. It is used when STMT is a label or a
2836 case label. Labels should only start a new basic block if their
2837 previous statement wasn't a label. Otherwise, sequence of labels
2838 would generate unnecessary basic blocks that only contain a single
2839 label. */
2841 static inline bool
2842 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2844 if (stmt == NULL)
2845 return false;
2847 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2848 any nondebug stmts in the block. We don't want to start another
2849 block in this case: the debug stmt will already have started the
2850 one STMT would start if we weren't outputting debug stmts. */
2851 if (prev_stmt && is_gimple_debug (prev_stmt))
2852 return false;
2854 /* Labels start a new basic block only if the preceding statement
2855 wasn't a label of the same type. This prevents the creation of
2856 consecutive blocks that have nothing but a single label. */
2857 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2859 /* Nonlocal and computed GOTO targets always start a new block. */
2860 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2861 || FORCED_LABEL (gimple_label_label (label_stmt)))
2862 return true;
2864 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2866 if (DECL_NONLOCAL (gimple_label_label (plabel))
2867 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2868 return true;
2870 cfg_stats.num_merged_labels++;
2871 return false;
2873 else
2874 return true;
2876 else if (gimple_code (stmt) == GIMPLE_CALL)
2878 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2879 /* setjmp acts similar to a nonlocal GOTO target and thus should
2880 start a new block. */
2881 return true;
2882 if (gimple_call_internal_p (stmt, IFN_PHI)
2883 && prev_stmt
2884 && gimple_code (prev_stmt) != GIMPLE_LABEL
2885 && (gimple_code (prev_stmt) != GIMPLE_CALL
2886 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2887 /* PHI nodes start a new block unless preceeded by a label
2888 or another PHI. */
2889 return true;
2892 return false;
2896 /* Return true if T should end a basic block. */
2898 bool
2899 stmt_ends_bb_p (gimple *t)
2901 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2904 /* Remove block annotations and other data structures. */
2906 void
2907 delete_tree_cfg_annotations (struct function *fn)
2909 vec_free (label_to_block_map_for_fn (fn));
2912 /* Return the virtual phi in BB. */
2914 gphi *
2915 get_virtual_phi (basic_block bb)
2917 for (gphi_iterator gsi = gsi_start_phis (bb);
2918 !gsi_end_p (gsi);
2919 gsi_next (&gsi))
2921 gphi *phi = gsi.phi ();
2923 if (virtual_operand_p (PHI_RESULT (phi)))
2924 return phi;
2927 return NULL;
2930 /* Return the first statement in basic block BB. */
2932 gimple *
2933 first_stmt (basic_block bb)
2935 gimple_stmt_iterator i = gsi_start_bb (bb);
2936 gimple *stmt = NULL;
2938 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2940 gsi_next (&i);
2941 stmt = NULL;
2943 return stmt;
2946 /* Return the first non-label statement in basic block BB. */
2948 static gimple *
2949 first_non_label_stmt (basic_block bb)
2951 gimple_stmt_iterator i = gsi_start_bb (bb);
2952 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2953 gsi_next (&i);
2954 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2957 /* Return the last statement in basic block BB. */
2959 gimple *
2960 last_nondebug_stmt (basic_block bb)
2962 gimple_stmt_iterator i = gsi_last_bb (bb);
2963 gimple *stmt = NULL;
2965 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2967 gsi_prev (&i);
2968 stmt = NULL;
2970 return stmt;
2973 /* Return the last statement of an otherwise empty block. Return NULL
2974 if the block is totally empty, or if it contains more than one
2975 statement. */
2977 gimple *
2978 last_and_only_stmt (basic_block bb)
2980 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2981 gimple *last, *prev;
2983 if (gsi_end_p (i))
2984 return NULL;
2986 last = gsi_stmt (i);
2987 gsi_prev_nondebug (&i);
2988 if (gsi_end_p (i))
2989 return last;
2991 /* Empty statements should no longer appear in the instruction stream.
2992 Everything that might have appeared before should be deleted by
2993 remove_useless_stmts, and the optimizers should just gsi_remove
2994 instead of smashing with build_empty_stmt.
2996 Thus the only thing that should appear here in a block containing
2997 one executable statement is a label. */
2998 prev = gsi_stmt (i);
2999 if (gimple_code (prev) == GIMPLE_LABEL)
3000 return last;
3001 else
3002 return NULL;
3005 /* Returns the basic block after which the new basic block created
3006 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3007 near its "logical" location. This is of most help to humans looking
3008 at debugging dumps. */
3010 basic_block
3011 split_edge_bb_loc (edge edge_in)
3013 basic_block dest = edge_in->dest;
3014 basic_block dest_prev = dest->prev_bb;
3016 if (dest_prev)
3018 edge e = find_edge (dest_prev, dest);
3019 if (e && !(e->flags & EDGE_COMPLEX))
3020 return edge_in->src;
3022 return dest_prev;
3025 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3026 Abort on abnormal edges. */
3028 static basic_block
3029 gimple_split_edge (edge edge_in)
3031 basic_block new_bb, after_bb, dest;
3032 edge new_edge, e;
3034 /* Abnormal edges cannot be split. */
3035 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3037 dest = edge_in->dest;
3039 after_bb = split_edge_bb_loc (edge_in);
3041 new_bb = create_empty_bb (after_bb);
3042 new_bb->count = edge_in->count ();
3044 /* We want to avoid re-allocating PHIs when we first
3045 add the fallthru edge from new_bb to dest but we also
3046 want to avoid changing PHI argument order when
3047 first redirecting edge_in away from dest. The former
3048 avoids changing PHI argument order by adding them
3049 last and then the redirection swapping it back into
3050 place by means of unordered remove.
3051 So hack around things by temporarily removing all PHIs
3052 from the destination during the edge redirection and then
3053 making sure the edges stay in order. */
3054 gimple_seq saved_phis = phi_nodes (dest);
3055 unsigned old_dest_idx = edge_in->dest_idx;
3056 set_phi_nodes (dest, NULL);
3057 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3058 e = redirect_edge_and_branch (edge_in, new_bb);
3059 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
3060 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3061 dest->il.gimple.phi_nodes = saved_phis;
3063 return new_bb;
3067 /* Verify properties of the address expression T whose base should be
3068 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3070 static bool
3071 verify_address (tree t, bool verify_addressable)
3073 bool old_constant;
3074 bool old_side_effects;
3075 bool new_constant;
3076 bool new_side_effects;
3078 old_constant = TREE_CONSTANT (t);
3079 old_side_effects = TREE_SIDE_EFFECTS (t);
3081 recompute_tree_invariant_for_addr_expr (t);
3082 new_side_effects = TREE_SIDE_EFFECTS (t);
3083 new_constant = TREE_CONSTANT (t);
3085 if (old_constant != new_constant)
3087 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3088 return true;
3090 if (old_side_effects != new_side_effects)
3092 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3093 return true;
3096 tree base = TREE_OPERAND (t, 0);
3097 while (handled_component_p (base))
3098 base = TREE_OPERAND (base, 0);
3100 if (!(VAR_P (base)
3101 || TREE_CODE (base) == PARM_DECL
3102 || TREE_CODE (base) == RESULT_DECL))
3103 return false;
3105 if (verify_addressable && !TREE_ADDRESSABLE (base))
3107 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3108 return true;
3111 return false;
3115 /* Verify if EXPR is a valid GIMPLE reference expression. If
3116 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3117 if there is an error, otherwise false. */
3119 static bool
3120 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3122 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3124 if (TREE_CODE (expr) == REALPART_EXPR
3125 || TREE_CODE (expr) == IMAGPART_EXPR
3126 || TREE_CODE (expr) == BIT_FIELD_REF
3127 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3129 tree op = TREE_OPERAND (expr, 0);
3130 if (TREE_CODE (expr) != VIEW_CONVERT_EXPR
3131 && !is_gimple_reg_type (TREE_TYPE (expr)))
3133 error ("non-scalar %qs", code_name);
3134 return true;
3137 if (TREE_CODE (expr) == BIT_FIELD_REF)
3139 tree t1 = TREE_OPERAND (expr, 1);
3140 tree t2 = TREE_OPERAND (expr, 2);
3141 poly_uint64 size, bitpos;
3142 if (!poly_int_tree_p (t1, &size)
3143 || !poly_int_tree_p (t2, &bitpos)
3144 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3145 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3147 error ("invalid position or size operand to %qs", code_name);
3148 return true;
3150 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3151 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3153 error ("integral result type precision does not match "
3154 "field size of %qs", code_name);
3155 return true;
3157 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3158 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3159 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3160 size))
3162 error ("mode size of non-integral result does not "
3163 "match field size of %qs",
3164 code_name);
3165 return true;
3167 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3168 && !type_has_mode_precision_p (TREE_TYPE (op)))
3170 error ("%qs of non-mode-precision operand", code_name);
3171 return true;
3173 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3174 && maybe_gt (size + bitpos,
3175 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3177 error ("position plus size exceeds size of referenced object in "
3178 "%qs", code_name);
3179 return true;
3183 if ((TREE_CODE (expr) == REALPART_EXPR
3184 || TREE_CODE (expr) == IMAGPART_EXPR)
3185 && !useless_type_conversion_p (TREE_TYPE (expr),
3186 TREE_TYPE (TREE_TYPE (op))))
3188 error ("type mismatch in %qs reference", code_name);
3189 debug_generic_stmt (TREE_TYPE (expr));
3190 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3191 return true;
3194 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3196 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3197 that their operand is not a register an invariant when
3198 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3199 bug). Otherwise there is nothing to verify, gross mismatches at
3200 most invoke undefined behavior. */
3201 if (require_lvalue
3202 && (is_gimple_reg (op) || is_gimple_min_invariant (op)))
3204 error ("conversion of %qs on the left hand side of %qs",
3205 get_tree_code_name (TREE_CODE (op)), code_name);
3206 debug_generic_stmt (expr);
3207 return true;
3209 else if (is_gimple_reg (op)
3210 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3212 error ("conversion of register to a different size in %qs",
3213 code_name);
3214 debug_generic_stmt (expr);
3215 return true;
3219 expr = op;
3222 bool require_non_reg = false;
3223 while (handled_component_p (expr))
3225 require_non_reg = true;
3226 code_name = get_tree_code_name (TREE_CODE (expr));
3228 if (TREE_CODE (expr) == REALPART_EXPR
3229 || TREE_CODE (expr) == IMAGPART_EXPR
3230 || TREE_CODE (expr) == BIT_FIELD_REF)
3232 error ("non-top-level %qs", code_name);
3233 return true;
3236 tree op = TREE_OPERAND (expr, 0);
3238 if (TREE_CODE (expr) == ARRAY_REF
3239 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3241 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3242 || (TREE_OPERAND (expr, 2)
3243 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3244 || (TREE_OPERAND (expr, 3)
3245 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3247 error ("invalid operands to %qs", code_name);
3248 debug_generic_stmt (expr);
3249 return true;
3253 /* Verify if the reference array element types are compatible. */
3254 if (TREE_CODE (expr) == ARRAY_REF
3255 && !useless_type_conversion_p (TREE_TYPE (expr),
3256 TREE_TYPE (TREE_TYPE (op))))
3258 error ("type mismatch in %qs", code_name);
3259 debug_generic_stmt (TREE_TYPE (expr));
3260 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3261 return true;
3263 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3264 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3265 TREE_TYPE (TREE_TYPE (op))))
3267 error ("type mismatch in %qs", code_name);
3268 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3269 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3270 return true;
3273 if (TREE_CODE (expr) == COMPONENT_REF)
3275 if (TREE_OPERAND (expr, 2)
3276 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3278 error ("invalid %qs offset operator", code_name);
3279 return true;
3281 if (!useless_type_conversion_p (TREE_TYPE (expr),
3282 TREE_TYPE (TREE_OPERAND (expr, 1))))
3284 error ("type mismatch in %qs", code_name);
3285 debug_generic_stmt (TREE_TYPE (expr));
3286 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3287 return true;
3291 expr = op;
3294 code_name = get_tree_code_name (TREE_CODE (expr));
3296 if (TREE_CODE (expr) == MEM_REF)
3298 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3299 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3300 && verify_address (TREE_OPERAND (expr, 0), false)))
3302 error ("invalid address operand in %qs", code_name);
3303 debug_generic_stmt (expr);
3304 return true;
3306 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3307 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3309 error ("invalid offset operand in %qs", code_name);
3310 debug_generic_stmt (expr);
3311 return true;
3313 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3314 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3316 error ("invalid clique in %qs", code_name);
3317 debug_generic_stmt (expr);
3318 return true;
3321 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3323 if (!TMR_BASE (expr)
3324 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3325 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3326 && verify_address (TMR_BASE (expr), false)))
3328 error ("invalid address operand in %qs", code_name);
3329 return true;
3331 if (!TMR_OFFSET (expr)
3332 || !poly_int_tree_p (TMR_OFFSET (expr))
3333 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3335 error ("invalid offset operand in %qs", code_name);
3336 debug_generic_stmt (expr);
3337 return true;
3339 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3340 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3342 error ("invalid clique in %qs", code_name);
3343 debug_generic_stmt (expr);
3344 return true;
3347 else if (INDIRECT_REF_P (expr))
3349 error ("%qs in gimple IL", code_name);
3350 debug_generic_stmt (expr);
3351 return true;
3353 else if (require_non_reg
3354 && (is_gimple_reg (expr)
3355 || (is_gimple_min_invariant (expr)
3356 /* STRING_CSTs are representatives of the string table
3357 entry which lives in memory. */
3358 && TREE_CODE (expr) != STRING_CST)))
3360 error ("%qs as base where non-register is required", code_name);
3361 debug_generic_stmt (expr);
3362 return true;
3365 if (!require_lvalue
3366 && (is_gimple_reg (expr) || is_gimple_min_invariant (expr)))
3367 return false;
3369 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3370 return false;
3372 if (TREE_CODE (expr) != TARGET_MEM_REF
3373 && TREE_CODE (expr) != MEM_REF)
3375 error ("invalid expression for min lvalue");
3376 return true;
3379 return false;
3382 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3383 list of pointer-to types that is trivially convertible to DEST. */
3385 static bool
3386 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3388 tree src;
3390 if (!TYPE_POINTER_TO (src_obj))
3391 return true;
3393 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3394 if (useless_type_conversion_p (dest, src))
3395 return true;
3397 return false;
3400 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3401 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3403 static bool
3404 valid_fixed_convert_types_p (tree type1, tree type2)
3406 return (FIXED_POINT_TYPE_P (type1)
3407 && (INTEGRAL_TYPE_P (type2)
3408 || SCALAR_FLOAT_TYPE_P (type2)
3409 || FIXED_POINT_TYPE_P (type2)));
3412 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3413 is a problem, otherwise false. */
3415 static bool
3416 verify_gimple_call (gcall *stmt)
3418 tree fn = gimple_call_fn (stmt);
3419 tree fntype, fndecl;
3420 unsigned i;
3422 if (gimple_call_internal_p (stmt))
3424 if (fn)
3426 error ("gimple call has two targets");
3427 debug_generic_stmt (fn);
3428 return true;
3431 else
3433 if (!fn)
3435 error ("gimple call has no target");
3436 return true;
3440 if (fn && !is_gimple_call_addr (fn))
3442 error ("invalid function in gimple call");
3443 debug_generic_stmt (fn);
3444 return true;
3447 if (fn
3448 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3449 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3450 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3452 error ("non-function in gimple call");
3453 return true;
3456 fndecl = gimple_call_fndecl (stmt);
3457 if (fndecl
3458 && TREE_CODE (fndecl) == FUNCTION_DECL
3459 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3460 && !DECL_PURE_P (fndecl)
3461 && !TREE_READONLY (fndecl))
3463 error ("invalid pure const state for function");
3464 return true;
3467 tree lhs = gimple_call_lhs (stmt);
3468 if (lhs
3469 && (!is_gimple_reg (lhs)
3470 && (!is_gimple_lvalue (lhs)
3471 || verify_types_in_gimple_reference
3472 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3473 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3475 error ("invalid LHS in gimple call");
3476 return true;
3479 if (gimple_call_ctrl_altering_p (stmt)
3480 && gimple_call_noreturn_p (stmt)
3481 && should_remove_lhs_p (lhs))
3483 error ("LHS in %<noreturn%> call");
3484 return true;
3487 fntype = gimple_call_fntype (stmt);
3488 if (fntype
3489 && lhs
3490 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3491 /* ??? At least C++ misses conversions at assignments from
3492 void * call results.
3493 For now simply allow arbitrary pointer type conversions. */
3494 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3495 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3497 error ("invalid conversion in gimple call");
3498 debug_generic_stmt (TREE_TYPE (lhs));
3499 debug_generic_stmt (TREE_TYPE (fntype));
3500 return true;
3503 if (gimple_call_chain (stmt)
3504 && !is_gimple_val (gimple_call_chain (stmt)))
3506 error ("invalid static chain in gimple call");
3507 debug_generic_stmt (gimple_call_chain (stmt));
3508 return true;
3511 /* If there is a static chain argument, the call should either be
3512 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3513 if (gimple_call_chain (stmt)
3514 && fndecl
3515 && !DECL_STATIC_CHAIN (fndecl))
3517 error ("static chain with function that doesn%'t use one");
3518 return true;
3521 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3523 switch (DECL_FUNCTION_CODE (fndecl))
3525 case BUILT_IN_UNREACHABLE:
3526 case BUILT_IN_UNREACHABLE_TRAP:
3527 case BUILT_IN_TRAP:
3528 if (gimple_call_num_args (stmt) > 0)
3530 /* Built-in unreachable with parameters might not be caught by
3531 undefined behavior sanitizer. Front-ends do check users do not
3532 call them that way but we also produce calls to
3533 __builtin_unreachable internally, for example when IPA figures
3534 out a call cannot happen in a legal program. In such cases,
3535 we must make sure arguments are stripped off. */
3536 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3537 "with arguments");
3538 return true;
3540 break;
3541 default:
3542 break;
3546 /* For a call to .DEFERRED_INIT,
3547 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3548 we should guarantee that when the 1st argument is a constant, it should
3549 be the same as the size of the LHS. */
3551 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3553 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3554 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3556 if (TREE_CODE (lhs) == SSA_NAME)
3557 lhs = SSA_NAME_VAR (lhs);
3559 poly_uint64 size_from_arg0, size_from_lhs;
3560 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3561 &size_from_arg0);
3562 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3563 &size_from_lhs);
3564 if (is_constant_size_arg0 && is_constant_size_lhs)
3565 if (maybe_ne (size_from_arg0, size_from_lhs))
3567 error ("%<DEFERRED_INIT%> calls should have same "
3568 "constant size for the first argument and LHS");
3569 return true;
3573 /* ??? The C frontend passes unpromoted arguments in case it
3574 didn't see a function declaration before the call. So for now
3575 leave the call arguments mostly unverified. Once we gimplify
3576 unit-at-a-time we have a chance to fix this. */
3577 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3579 tree arg = gimple_call_arg (stmt, i);
3580 if ((is_gimple_reg_type (TREE_TYPE (arg))
3581 && !is_gimple_val (arg))
3582 || (!is_gimple_reg_type (TREE_TYPE (arg))
3583 && !is_gimple_lvalue (arg)))
3585 error ("invalid argument to gimple call");
3586 debug_generic_expr (arg);
3587 return true;
3589 if (!is_gimple_reg (arg))
3591 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3592 arg = TREE_OPERAND (arg, 0);
3593 if (verify_types_in_gimple_reference (arg, false))
3594 return true;
3598 return false;
3601 /* Verifies the gimple comparison with the result type TYPE and
3602 the operands OP0 and OP1, comparison code is CODE. */
3604 static bool
3605 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3607 tree op0_type = TREE_TYPE (op0);
3608 tree op1_type = TREE_TYPE (op1);
3610 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3612 error ("invalid operands in gimple comparison");
3613 return true;
3616 /* For comparisons we do not have the operations type as the
3617 effective type the comparison is carried out in. Instead
3618 we require that either the first operand is trivially
3619 convertible into the second, or the other way around. */
3620 if (!useless_type_conversion_p (op0_type, op1_type)
3621 && !useless_type_conversion_p (op1_type, op0_type))
3623 error ("mismatching comparison operand types");
3624 debug_generic_expr (op0_type);
3625 debug_generic_expr (op1_type);
3626 return true;
3629 /* The resulting type of a comparison may be an effective boolean type. */
3630 if (INTEGRAL_TYPE_P (type)
3631 && (TREE_CODE (type) == BOOLEAN_TYPE
3632 || TYPE_PRECISION (type) == 1))
3634 if ((VECTOR_TYPE_P (op0_type)
3635 || VECTOR_TYPE_P (op1_type))
3636 && code != EQ_EXPR && code != NE_EXPR
3637 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3638 && !VECTOR_INTEGER_TYPE_P (op0_type))
3640 error ("unsupported operation or type for vector comparison"
3641 " returning a boolean");
3642 debug_generic_expr (op0_type);
3643 debug_generic_expr (op1_type);
3644 return true;
3647 /* Or a boolean vector type with the same element count
3648 as the comparison operand types. */
3649 else if (VECTOR_TYPE_P (type)
3650 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3652 if (TREE_CODE (op0_type) != VECTOR_TYPE
3653 || TREE_CODE (op1_type) != VECTOR_TYPE)
3655 error ("non-vector operands in vector comparison");
3656 debug_generic_expr (op0_type);
3657 debug_generic_expr (op1_type);
3658 return true;
3661 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3662 TYPE_VECTOR_SUBPARTS (op0_type)))
3664 error ("invalid vector comparison resulting type");
3665 debug_generic_expr (type);
3666 return true;
3669 else
3671 error ("bogus comparison result type");
3672 debug_generic_expr (type);
3673 return true;
3676 return false;
3679 /* Verify a gimple assignment statement STMT with an unary rhs.
3680 Returns true if anything is wrong. */
3682 static bool
3683 verify_gimple_assign_unary (gassign *stmt)
3685 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3686 tree lhs = gimple_assign_lhs (stmt);
3687 tree lhs_type = TREE_TYPE (lhs);
3688 tree rhs1 = gimple_assign_rhs1 (stmt);
3689 tree rhs1_type = TREE_TYPE (rhs1);
3691 if (!is_gimple_reg (lhs))
3693 error ("non-register as LHS of unary operation");
3694 return true;
3697 if (!is_gimple_val (rhs1))
3699 error ("invalid operand in unary operation");
3700 return true;
3703 const char* const code_name = get_tree_code_name (rhs_code);
3705 /* First handle conversions. */
3706 switch (rhs_code)
3708 CASE_CONVERT:
3710 /* Allow conversions between vectors with the same number of elements,
3711 provided that the conversion is OK for the element types too. */
3712 if (VECTOR_TYPE_P (lhs_type)
3713 && VECTOR_TYPE_P (rhs1_type)
3714 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3715 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3717 lhs_type = TREE_TYPE (lhs_type);
3718 rhs1_type = TREE_TYPE (rhs1_type);
3720 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3722 error ("invalid vector types in nop conversion");
3723 debug_generic_expr (lhs_type);
3724 debug_generic_expr (rhs1_type);
3725 return true;
3728 /* Allow conversions from pointer type to integral type only if
3729 there is no sign or zero extension involved.
3730 For targets were the precision of ptrofftype doesn't match that
3731 of pointers we allow conversions to types where
3732 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3733 if ((POINTER_TYPE_P (lhs_type)
3734 && INTEGRAL_TYPE_P (rhs1_type))
3735 || (POINTER_TYPE_P (rhs1_type)
3736 && INTEGRAL_TYPE_P (lhs_type)
3737 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3738 #if defined(POINTERS_EXTEND_UNSIGNED)
3739 || (TYPE_MODE (rhs1_type) == ptr_mode
3740 && (TYPE_PRECISION (lhs_type)
3741 == BITS_PER_WORD /* word_mode */
3742 || (TYPE_PRECISION (lhs_type)
3743 == GET_MODE_PRECISION (Pmode))))
3744 #endif
3746 return false;
3748 /* Allow conversion from integral to offset type and vice versa. */
3749 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3750 && INTEGRAL_TYPE_P (rhs1_type))
3751 || (INTEGRAL_TYPE_P (lhs_type)
3752 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3753 return false;
3755 /* Otherwise assert we are converting between types of the
3756 same kind. */
3757 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3759 error ("invalid types in nop conversion");
3760 debug_generic_expr (lhs_type);
3761 debug_generic_expr (rhs1_type);
3762 return true;
3765 return false;
3768 case ADDR_SPACE_CONVERT_EXPR:
3770 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3771 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3772 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3774 error ("invalid types in address space conversion");
3775 debug_generic_expr (lhs_type);
3776 debug_generic_expr (rhs1_type);
3777 return true;
3780 return false;
3783 case FIXED_CONVERT_EXPR:
3785 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3786 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3788 error ("invalid types in fixed-point conversion");
3789 debug_generic_expr (lhs_type);
3790 debug_generic_expr (rhs1_type);
3791 return true;
3794 return false;
3797 case FLOAT_EXPR:
3799 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3800 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3801 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3803 error ("invalid types in conversion to floating-point");
3804 debug_generic_expr (lhs_type);
3805 debug_generic_expr (rhs1_type);
3806 return true;
3809 return false;
3812 case FIX_TRUNC_EXPR:
3814 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3815 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3816 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3818 error ("invalid types in conversion to integer");
3819 debug_generic_expr (lhs_type);
3820 debug_generic_expr (rhs1_type);
3821 return true;
3824 return false;
3827 case VEC_UNPACK_HI_EXPR:
3828 case VEC_UNPACK_LO_EXPR:
3829 case VEC_UNPACK_FLOAT_HI_EXPR:
3830 case VEC_UNPACK_FLOAT_LO_EXPR:
3831 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3832 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3833 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3834 || TREE_CODE (lhs_type) != VECTOR_TYPE
3835 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3836 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3837 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3838 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3839 || ((rhs_code == VEC_UNPACK_HI_EXPR
3840 || rhs_code == VEC_UNPACK_LO_EXPR)
3841 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3842 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3843 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3844 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3845 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3846 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3847 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3848 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3849 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3850 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3851 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3852 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3853 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3854 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3855 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3856 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3858 error ("type mismatch in %qs expression", code_name);
3859 debug_generic_expr (lhs_type);
3860 debug_generic_expr (rhs1_type);
3861 return true;
3864 return false;
3866 case NEGATE_EXPR:
3867 case ABS_EXPR:
3868 case BIT_NOT_EXPR:
3869 case PAREN_EXPR:
3870 case CONJ_EXPR:
3871 /* Disallow pointer and offset types for many of the unary gimple. */
3872 if (POINTER_TYPE_P (lhs_type)
3873 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3875 error ("invalid types for %qs", code_name);
3876 debug_generic_expr (lhs_type);
3877 debug_generic_expr (rhs1_type);
3878 return true;
3880 break;
3882 case ABSU_EXPR:
3883 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3884 || !TYPE_UNSIGNED (lhs_type)
3885 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3886 || TYPE_UNSIGNED (rhs1_type)
3887 || element_precision (lhs_type) != element_precision (rhs1_type))
3889 error ("invalid types for %qs", code_name);
3890 debug_generic_expr (lhs_type);
3891 debug_generic_expr (rhs1_type);
3892 return true;
3894 return false;
3896 case VEC_DUPLICATE_EXPR:
3897 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3898 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3900 error ("%qs should be from a scalar to a like vector", code_name);
3901 debug_generic_expr (lhs_type);
3902 debug_generic_expr (rhs1_type);
3903 return true;
3905 return false;
3907 default:
3908 gcc_unreachable ();
3911 /* For the remaining codes assert there is no conversion involved. */
3912 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3914 error ("non-trivial conversion in unary operation");
3915 debug_generic_expr (lhs_type);
3916 debug_generic_expr (rhs1_type);
3917 return true;
3920 return false;
3923 /* Verify a gimple assignment statement STMT with a binary rhs.
3924 Returns true if anything is wrong. */
3926 static bool
3927 verify_gimple_assign_binary (gassign *stmt)
3929 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3930 tree lhs = gimple_assign_lhs (stmt);
3931 tree lhs_type = TREE_TYPE (lhs);
3932 tree rhs1 = gimple_assign_rhs1 (stmt);
3933 tree rhs1_type = TREE_TYPE (rhs1);
3934 tree rhs2 = gimple_assign_rhs2 (stmt);
3935 tree rhs2_type = TREE_TYPE (rhs2);
3937 if (!is_gimple_reg (lhs))
3939 error ("non-register as LHS of binary operation");
3940 return true;
3943 if (!is_gimple_val (rhs1)
3944 || !is_gimple_val (rhs2))
3946 error ("invalid operands in binary operation");
3947 return true;
3950 const char* const code_name = get_tree_code_name (rhs_code);
3952 /* First handle operations that involve different types. */
3953 switch (rhs_code)
3955 case COMPLEX_EXPR:
3957 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3958 || !(INTEGRAL_TYPE_P (rhs1_type)
3959 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3960 || !(INTEGRAL_TYPE_P (rhs2_type)
3961 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3963 error ("type mismatch in %qs", code_name);
3964 debug_generic_expr (lhs_type);
3965 debug_generic_expr (rhs1_type);
3966 debug_generic_expr (rhs2_type);
3967 return true;
3970 return false;
3973 case LSHIFT_EXPR:
3974 case RSHIFT_EXPR:
3975 case LROTATE_EXPR:
3976 case RROTATE_EXPR:
3978 /* Shifts and rotates are ok on integral types, fixed point
3979 types and integer vector types. */
3980 if ((!INTEGRAL_TYPE_P (rhs1_type)
3981 && !FIXED_POINT_TYPE_P (rhs1_type)
3982 && ! (VECTOR_TYPE_P (rhs1_type)
3983 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3984 || (!INTEGRAL_TYPE_P (rhs2_type)
3985 /* Vector shifts of vectors are also ok. */
3986 && ! (VECTOR_TYPE_P (rhs1_type)
3987 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3988 && VECTOR_TYPE_P (rhs2_type)
3989 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3990 || !useless_type_conversion_p (lhs_type, rhs1_type))
3992 error ("type mismatch in %qs", code_name);
3993 debug_generic_expr (lhs_type);
3994 debug_generic_expr (rhs1_type);
3995 debug_generic_expr (rhs2_type);
3996 return true;
3999 return false;
4002 case WIDEN_LSHIFT_EXPR:
4004 if (!INTEGRAL_TYPE_P (lhs_type)
4005 || !INTEGRAL_TYPE_P (rhs1_type)
4006 || TREE_CODE (rhs2) != INTEGER_CST
4007 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
4009 error ("type mismatch in %qs", code_name);
4010 debug_generic_expr (lhs_type);
4011 debug_generic_expr (rhs1_type);
4012 debug_generic_expr (rhs2_type);
4013 return true;
4016 return false;
4019 case VEC_WIDEN_LSHIFT_HI_EXPR:
4020 case VEC_WIDEN_LSHIFT_LO_EXPR:
4022 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4023 || TREE_CODE (lhs_type) != VECTOR_TYPE
4024 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4025 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4026 || TREE_CODE (rhs2) != INTEGER_CST
4027 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4028 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4030 error ("type mismatch in %qs", code_name);
4031 debug_generic_expr (lhs_type);
4032 debug_generic_expr (rhs1_type);
4033 debug_generic_expr (rhs2_type);
4034 return true;
4037 return false;
4040 case PLUS_EXPR:
4041 case MINUS_EXPR:
4043 tree lhs_etype = lhs_type;
4044 tree rhs1_etype = rhs1_type;
4045 tree rhs2_etype = rhs2_type;
4046 if (VECTOR_TYPE_P (lhs_type))
4048 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4049 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4051 error ("invalid non-vector operands to %qs", code_name);
4052 return true;
4054 lhs_etype = TREE_TYPE (lhs_type);
4055 rhs1_etype = TREE_TYPE (rhs1_type);
4056 rhs2_etype = TREE_TYPE (rhs2_type);
4058 if (POINTER_TYPE_P (lhs_etype)
4059 || POINTER_TYPE_P (rhs1_etype)
4060 || POINTER_TYPE_P (rhs2_etype))
4062 error ("invalid (pointer) operands %qs", code_name);
4063 return true;
4066 /* Continue with generic binary expression handling. */
4067 break;
4070 case POINTER_PLUS_EXPR:
4072 if (!POINTER_TYPE_P (rhs1_type)
4073 || !useless_type_conversion_p (lhs_type, rhs1_type)
4074 || !ptrofftype_p (rhs2_type))
4076 error ("type mismatch in %qs", code_name);
4077 debug_generic_stmt (lhs_type);
4078 debug_generic_stmt (rhs1_type);
4079 debug_generic_stmt (rhs2_type);
4080 return true;
4083 return false;
4086 case POINTER_DIFF_EXPR:
4088 if (!POINTER_TYPE_P (rhs1_type)
4089 || !POINTER_TYPE_P (rhs2_type)
4090 /* Because we special-case pointers to void we allow difference
4091 of arbitrary pointers with the same mode. */
4092 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4093 || !INTEGRAL_TYPE_P (lhs_type)
4094 || TYPE_UNSIGNED (lhs_type)
4095 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4097 error ("type mismatch in %qs", code_name);
4098 debug_generic_stmt (lhs_type);
4099 debug_generic_stmt (rhs1_type);
4100 debug_generic_stmt (rhs2_type);
4101 return true;
4104 return false;
4107 case TRUTH_ANDIF_EXPR:
4108 case TRUTH_ORIF_EXPR:
4109 case TRUTH_AND_EXPR:
4110 case TRUTH_OR_EXPR:
4111 case TRUTH_XOR_EXPR:
4113 gcc_unreachable ();
4115 case LT_EXPR:
4116 case LE_EXPR:
4117 case GT_EXPR:
4118 case GE_EXPR:
4119 case EQ_EXPR:
4120 case NE_EXPR:
4121 case UNORDERED_EXPR:
4122 case ORDERED_EXPR:
4123 case UNLT_EXPR:
4124 case UNLE_EXPR:
4125 case UNGT_EXPR:
4126 case UNGE_EXPR:
4127 case UNEQ_EXPR:
4128 case LTGT_EXPR:
4129 /* Comparisons are also binary, but the result type is not
4130 connected to the operand types. */
4131 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4133 case WIDEN_MULT_EXPR:
4134 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4135 return true;
4136 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4137 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4139 case WIDEN_SUM_EXPR:
4141 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4142 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4143 && ((!INTEGRAL_TYPE_P (rhs1_type)
4144 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4145 || (!INTEGRAL_TYPE_P (lhs_type)
4146 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4147 || !useless_type_conversion_p (lhs_type, rhs2_type)
4148 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4149 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4151 error ("type mismatch in %qs", code_name);
4152 debug_generic_expr (lhs_type);
4153 debug_generic_expr (rhs1_type);
4154 debug_generic_expr (rhs2_type);
4155 return true;
4157 return false;
4160 case VEC_WIDEN_MULT_HI_EXPR:
4161 case VEC_WIDEN_MULT_LO_EXPR:
4162 case VEC_WIDEN_MULT_EVEN_EXPR:
4163 case VEC_WIDEN_MULT_ODD_EXPR:
4165 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4166 || TREE_CODE (lhs_type) != VECTOR_TYPE
4167 || !types_compatible_p (rhs1_type, rhs2_type)
4168 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4169 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4171 error ("type mismatch in %qs", code_name);
4172 debug_generic_expr (lhs_type);
4173 debug_generic_expr (rhs1_type);
4174 debug_generic_expr (rhs2_type);
4175 return true;
4177 return false;
4180 case VEC_PACK_TRUNC_EXPR:
4181 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4182 vector boolean types. */
4183 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4184 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4185 && types_compatible_p (rhs1_type, rhs2_type)
4186 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4187 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4188 return false;
4190 /* Fallthru. */
4191 case VEC_PACK_SAT_EXPR:
4192 case VEC_PACK_FIX_TRUNC_EXPR:
4194 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4195 || TREE_CODE (lhs_type) != VECTOR_TYPE
4196 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4197 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4198 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4199 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4200 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4201 || !types_compatible_p (rhs1_type, rhs2_type)
4202 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4203 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4204 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4205 TYPE_VECTOR_SUBPARTS (lhs_type)))
4207 error ("type mismatch in %qs", code_name);
4208 debug_generic_expr (lhs_type);
4209 debug_generic_expr (rhs1_type);
4210 debug_generic_expr (rhs2_type);
4211 return true;
4214 return false;
4217 case VEC_PACK_FLOAT_EXPR:
4218 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4219 || TREE_CODE (lhs_type) != VECTOR_TYPE
4220 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4221 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4222 || !types_compatible_p (rhs1_type, rhs2_type)
4223 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4224 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4225 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4226 TYPE_VECTOR_SUBPARTS (lhs_type)))
4228 error ("type mismatch in %qs", code_name);
4229 debug_generic_expr (lhs_type);
4230 debug_generic_expr (rhs1_type);
4231 debug_generic_expr (rhs2_type);
4232 return true;
4235 return false;
4237 case MULT_EXPR:
4238 case MULT_HIGHPART_EXPR:
4239 case TRUNC_DIV_EXPR:
4240 case CEIL_DIV_EXPR:
4241 case FLOOR_DIV_EXPR:
4242 case ROUND_DIV_EXPR:
4243 case TRUNC_MOD_EXPR:
4244 case CEIL_MOD_EXPR:
4245 case FLOOR_MOD_EXPR:
4246 case ROUND_MOD_EXPR:
4247 case RDIV_EXPR:
4248 case EXACT_DIV_EXPR:
4249 case BIT_IOR_EXPR:
4250 case BIT_XOR_EXPR:
4251 /* Disallow pointer and offset types for many of the binary gimple. */
4252 if (POINTER_TYPE_P (lhs_type)
4253 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4255 error ("invalid types for %qs", code_name);
4256 debug_generic_expr (lhs_type);
4257 debug_generic_expr (rhs1_type);
4258 debug_generic_expr (rhs2_type);
4259 return true;
4261 /* Continue with generic binary expression handling. */
4262 break;
4264 case MIN_EXPR:
4265 case MAX_EXPR:
4266 /* Continue with generic binary expression handling. */
4267 break;
4269 case BIT_AND_EXPR:
4270 if (POINTER_TYPE_P (lhs_type)
4271 && TREE_CODE (rhs2) == INTEGER_CST)
4272 break;
4273 /* Disallow pointer and offset types for many of the binary gimple. */
4274 if (POINTER_TYPE_P (lhs_type)
4275 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4277 error ("invalid types for %qs", code_name);
4278 debug_generic_expr (lhs_type);
4279 debug_generic_expr (rhs1_type);
4280 debug_generic_expr (rhs2_type);
4281 return true;
4283 /* Continue with generic binary expression handling. */
4284 break;
4286 case VEC_SERIES_EXPR:
4287 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4289 error ("type mismatch in %qs", code_name);
4290 debug_generic_expr (rhs1_type);
4291 debug_generic_expr (rhs2_type);
4292 return true;
4294 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4295 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4297 error ("vector type expected in %qs", code_name);
4298 debug_generic_expr (lhs_type);
4299 return true;
4301 return false;
4303 default:
4304 gcc_unreachable ();
4307 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4308 || !useless_type_conversion_p (lhs_type, rhs2_type))
4310 error ("type mismatch in binary expression");
4311 debug_generic_stmt (lhs_type);
4312 debug_generic_stmt (rhs1_type);
4313 debug_generic_stmt (rhs2_type);
4314 return true;
4317 return false;
4320 /* Verify a gimple assignment statement STMT with a ternary rhs.
4321 Returns true if anything is wrong. */
4323 static bool
4324 verify_gimple_assign_ternary (gassign *stmt)
4326 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4327 tree lhs = gimple_assign_lhs (stmt);
4328 tree lhs_type = TREE_TYPE (lhs);
4329 tree rhs1 = gimple_assign_rhs1 (stmt);
4330 tree rhs1_type = TREE_TYPE (rhs1);
4331 tree rhs2 = gimple_assign_rhs2 (stmt);
4332 tree rhs2_type = TREE_TYPE (rhs2);
4333 tree rhs3 = gimple_assign_rhs3 (stmt);
4334 tree rhs3_type = TREE_TYPE (rhs3);
4336 if (!is_gimple_reg (lhs))
4338 error ("non-register as LHS of ternary operation");
4339 return true;
4342 if (!is_gimple_val (rhs1)
4343 || !is_gimple_val (rhs2)
4344 || !is_gimple_val (rhs3))
4346 error ("invalid operands in ternary operation");
4347 return true;
4350 const char* const code_name = get_tree_code_name (rhs_code);
4352 /* First handle operations that involve different types. */
4353 switch (rhs_code)
4355 case WIDEN_MULT_PLUS_EXPR:
4356 case WIDEN_MULT_MINUS_EXPR:
4357 if ((!INTEGRAL_TYPE_P (rhs1_type)
4358 && !FIXED_POINT_TYPE_P (rhs1_type))
4359 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4360 || !useless_type_conversion_p (lhs_type, rhs3_type)
4361 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4362 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4364 error ("type mismatch in %qs", code_name);
4365 debug_generic_expr (lhs_type);
4366 debug_generic_expr (rhs1_type);
4367 debug_generic_expr (rhs2_type);
4368 debug_generic_expr (rhs3_type);
4369 return true;
4371 break;
4373 case VEC_COND_EXPR:
4374 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4375 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4376 TYPE_VECTOR_SUBPARTS (lhs_type)))
4378 error ("the first argument of a %qs must be of a "
4379 "boolean vector type of the same number of elements "
4380 "as the result", code_name);
4381 debug_generic_expr (lhs_type);
4382 debug_generic_expr (rhs1_type);
4383 return true;
4385 /* Fallthrough. */
4386 case COND_EXPR:
4387 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4388 || !useless_type_conversion_p (lhs_type, rhs3_type))
4390 error ("type mismatch in %qs", code_name);
4391 debug_generic_expr (lhs_type);
4392 debug_generic_expr (rhs2_type);
4393 debug_generic_expr (rhs3_type);
4394 return true;
4396 break;
4398 case VEC_PERM_EXPR:
4399 /* If permute is constant, then we allow for lhs and rhs
4400 to have different vector types, provided:
4401 (1) lhs, rhs1, rhs2 have same element type.
4402 (2) rhs3 vector is constant and has integer element type.
4403 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4405 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4406 || TREE_CODE (rhs1_type) != VECTOR_TYPE
4407 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4408 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4410 error ("vector types expected in %qs", code_name);
4411 debug_generic_expr (lhs_type);
4412 debug_generic_expr (rhs1_type);
4413 debug_generic_expr (rhs2_type);
4414 debug_generic_expr (rhs3_type);
4415 return true;
4418 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4419 as long as lhs, rhs1 and rhs2 have same element type. */
4420 if (TREE_CONSTANT (rhs3)
4421 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs1_type))
4422 || !useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs2_type)))
4423 : (!useless_type_conversion_p (lhs_type, rhs1_type)
4424 || !useless_type_conversion_p (lhs_type, rhs2_type)))
4426 error ("type mismatch in %qs", code_name);
4427 debug_generic_expr (lhs_type);
4428 debug_generic_expr (rhs1_type);
4429 debug_generic_expr (rhs2_type);
4430 debug_generic_expr (rhs3_type);
4431 return true;
4434 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4435 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4436 TYPE_VECTOR_SUBPARTS (rhs2_type))
4437 || (!TREE_CONSTANT(rhs3)
4438 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4439 TYPE_VECTOR_SUBPARTS (rhs3_type)))
4440 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4441 TYPE_VECTOR_SUBPARTS (lhs_type)))
4443 error ("vectors with different element number found in %qs",
4444 code_name);
4445 debug_generic_expr (lhs_type);
4446 debug_generic_expr (rhs1_type);
4447 debug_generic_expr (rhs2_type);
4448 debug_generic_expr (rhs3_type);
4449 return true;
4452 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4453 || (TREE_CODE (rhs3) != VECTOR_CST
4454 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4455 (TREE_TYPE (rhs3_type)))
4456 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4457 (TREE_TYPE (rhs1_type))))))
4459 error ("invalid mask type in %qs", code_name);
4460 debug_generic_expr (lhs_type);
4461 debug_generic_expr (rhs1_type);
4462 debug_generic_expr (rhs2_type);
4463 debug_generic_expr (rhs3_type);
4464 return true;
4467 return false;
4469 case SAD_EXPR:
4470 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4471 || !useless_type_conversion_p (lhs_type, rhs3_type)
4472 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4473 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4475 error ("type mismatch in %qs", code_name);
4476 debug_generic_expr (lhs_type);
4477 debug_generic_expr (rhs1_type);
4478 debug_generic_expr (rhs2_type);
4479 debug_generic_expr (rhs3_type);
4480 return true;
4483 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4484 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4485 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4487 error ("vector types expected in %qs", code_name);
4488 debug_generic_expr (lhs_type);
4489 debug_generic_expr (rhs1_type);
4490 debug_generic_expr (rhs2_type);
4491 debug_generic_expr (rhs3_type);
4492 return true;
4495 return false;
4497 case BIT_INSERT_EXPR:
4498 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4500 error ("type mismatch in %qs", code_name);
4501 debug_generic_expr (lhs_type);
4502 debug_generic_expr (rhs1_type);
4503 return true;
4505 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4506 && INTEGRAL_TYPE_P (rhs2_type))
4507 /* Vector element insert. */
4508 || (VECTOR_TYPE_P (rhs1_type)
4509 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4510 /* Aligned sub-vector insert. */
4511 || (VECTOR_TYPE_P (rhs1_type)
4512 && VECTOR_TYPE_P (rhs2_type)
4513 && types_compatible_p (TREE_TYPE (rhs1_type),
4514 TREE_TYPE (rhs2_type))
4515 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4516 TYPE_VECTOR_SUBPARTS (rhs2_type))
4517 && multiple_p (wi::to_poly_offset (rhs3),
4518 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4520 error ("not allowed type combination in %qs", code_name);
4521 debug_generic_expr (rhs1_type);
4522 debug_generic_expr (rhs2_type);
4523 return true;
4525 if (! tree_fits_uhwi_p (rhs3)
4526 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4527 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4529 error ("invalid position or size in %qs", code_name);
4530 return true;
4532 if (INTEGRAL_TYPE_P (rhs1_type)
4533 && !type_has_mode_precision_p (rhs1_type))
4535 error ("%qs into non-mode-precision operand", code_name);
4536 return true;
4538 if (INTEGRAL_TYPE_P (rhs1_type))
4540 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4541 if (bitpos >= TYPE_PRECISION (rhs1_type)
4542 || (bitpos + TYPE_PRECISION (rhs2_type)
4543 > TYPE_PRECISION (rhs1_type)))
4545 error ("insertion out of range in %qs", code_name);
4546 return true;
4549 else if (VECTOR_TYPE_P (rhs1_type))
4551 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4552 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4553 if (bitpos % bitsize != 0)
4555 error ("%qs not at element boundary", code_name);
4556 return true;
4559 return false;
4561 case DOT_PROD_EXPR:
4563 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4564 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4565 && ((!INTEGRAL_TYPE_P (rhs1_type)
4566 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4567 || (!INTEGRAL_TYPE_P (lhs_type)
4568 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4569 /* rhs1_type and rhs2_type may differ in sign. */
4570 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4571 || !useless_type_conversion_p (lhs_type, rhs3_type)
4572 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4573 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4575 error ("type mismatch in %qs", code_name);
4576 debug_generic_expr (lhs_type);
4577 debug_generic_expr (rhs1_type);
4578 debug_generic_expr (rhs2_type);
4579 return true;
4581 return false;
4584 case REALIGN_LOAD_EXPR:
4585 /* FIXME. */
4586 return false;
4588 default:
4589 gcc_unreachable ();
4591 return false;
4594 /* Verify a gimple assignment statement STMT with a single rhs.
4595 Returns true if anything is wrong. */
4597 static bool
4598 verify_gimple_assign_single (gassign *stmt)
4600 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4601 tree lhs = gimple_assign_lhs (stmt);
4602 tree lhs_type = TREE_TYPE (lhs);
4603 tree rhs1 = gimple_assign_rhs1 (stmt);
4604 tree rhs1_type = TREE_TYPE (rhs1);
4605 bool res = false;
4607 const char* const code_name = get_tree_code_name (rhs_code);
4609 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4611 error ("non-trivial conversion in %qs", code_name);
4612 debug_generic_expr (lhs_type);
4613 debug_generic_expr (rhs1_type);
4614 return true;
4617 if (gimple_clobber_p (stmt)
4618 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4620 error ("%qs LHS in clobber statement",
4621 get_tree_code_name (TREE_CODE (lhs)));
4622 debug_generic_expr (lhs);
4623 return true;
4626 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4628 error ("%qs LHS in assignment statement",
4629 get_tree_code_name (TREE_CODE (lhs)));
4630 debug_generic_expr (lhs);
4631 return true;
4634 if (handled_component_p (lhs)
4635 || TREE_CODE (lhs) == MEM_REF
4636 || TREE_CODE (lhs) == TARGET_MEM_REF)
4637 res |= verify_types_in_gimple_reference (lhs, true);
4639 /* Special codes we cannot handle via their class. */
4640 switch (rhs_code)
4642 case ADDR_EXPR:
4644 tree op = TREE_OPERAND (rhs1, 0);
4645 if (!is_gimple_addressable (op))
4647 error ("invalid operand in %qs", code_name);
4648 return true;
4651 /* Technically there is no longer a need for matching types, but
4652 gimple hygiene asks for this check. In LTO we can end up
4653 combining incompatible units and thus end up with addresses
4654 of globals that change their type to a common one. */
4655 if (!in_lto_p
4656 && !types_compatible_p (TREE_TYPE (op),
4657 TREE_TYPE (TREE_TYPE (rhs1)))
4658 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4659 TREE_TYPE (op)))
4661 error ("type mismatch in %qs", code_name);
4662 debug_generic_stmt (TREE_TYPE (rhs1));
4663 debug_generic_stmt (TREE_TYPE (op));
4664 return true;
4667 return (verify_address (rhs1, true)
4668 || verify_types_in_gimple_reference (op, true));
4671 /* tcc_reference */
4672 case INDIRECT_REF:
4673 error ("%qs in gimple IL", code_name);
4674 return true;
4676 case WITH_SIZE_EXPR:
4677 if (!is_gimple_val (TREE_OPERAND (rhs1, 1)))
4679 error ("invalid %qs size argument in load", code_name);
4680 debug_generic_stmt (lhs);
4681 debug_generic_stmt (rhs1);
4682 return true;
4684 rhs1 = TREE_OPERAND (rhs1, 0);
4685 /* Fallthru. */
4686 case COMPONENT_REF:
4687 case BIT_FIELD_REF:
4688 case ARRAY_REF:
4689 case ARRAY_RANGE_REF:
4690 case VIEW_CONVERT_EXPR:
4691 case REALPART_EXPR:
4692 case IMAGPART_EXPR:
4693 case TARGET_MEM_REF:
4694 case MEM_REF:
4695 if (!is_gimple_reg (lhs)
4696 && is_gimple_reg_type (TREE_TYPE (lhs)))
4698 error ("invalid RHS for gimple memory store: %qs", code_name);
4699 debug_generic_stmt (lhs);
4700 debug_generic_stmt (rhs1);
4701 return true;
4703 return res || verify_types_in_gimple_reference (rhs1, false);
4705 /* tcc_constant */
4706 case SSA_NAME:
4707 case INTEGER_CST:
4708 case REAL_CST:
4709 case FIXED_CST:
4710 case COMPLEX_CST:
4711 case VECTOR_CST:
4712 case STRING_CST:
4713 return res;
4715 /* tcc_declaration */
4716 case CONST_DECL:
4717 return res;
4718 case VAR_DECL:
4719 case PARM_DECL:
4720 if (!is_gimple_reg (lhs)
4721 && !is_gimple_reg (rhs1)
4722 && is_gimple_reg_type (TREE_TYPE (lhs)))
4724 error ("invalid RHS for gimple memory store: %qs", code_name);
4725 debug_generic_stmt (lhs);
4726 debug_generic_stmt (rhs1);
4727 return true;
4729 return res;
4731 case CONSTRUCTOR:
4732 if (VECTOR_TYPE_P (rhs1_type))
4734 unsigned int i;
4735 tree elt_i, elt_v, elt_t = NULL_TREE;
4737 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4738 return res;
4739 /* For vector CONSTRUCTORs we require that either it is empty
4740 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4741 (then the element count must be correct to cover the whole
4742 outer vector and index must be NULL on all elements, or it is
4743 a CONSTRUCTOR of scalar elements, where we as an exception allow
4744 smaller number of elements (assuming zero filling) and
4745 consecutive indexes as compared to NULL indexes (such
4746 CONSTRUCTORs can appear in the IL from FEs). */
4747 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4749 if (elt_t == NULL_TREE)
4751 elt_t = TREE_TYPE (elt_v);
4752 if (VECTOR_TYPE_P (elt_t))
4754 tree elt_t = TREE_TYPE (elt_v);
4755 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4756 TREE_TYPE (elt_t)))
4758 error ("incorrect type of vector %qs elements",
4759 code_name);
4760 debug_generic_stmt (rhs1);
4761 return true;
4763 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4764 * TYPE_VECTOR_SUBPARTS (elt_t),
4765 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4767 error ("incorrect number of vector %qs elements",
4768 code_name);
4769 debug_generic_stmt (rhs1);
4770 return true;
4773 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4774 elt_t))
4776 error ("incorrect type of vector %qs elements",
4777 code_name);
4778 debug_generic_stmt (rhs1);
4779 return true;
4781 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4782 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4784 error ("incorrect number of vector %qs elements",
4785 code_name);
4786 debug_generic_stmt (rhs1);
4787 return true;
4790 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4792 error ("incorrect type of vector CONSTRUCTOR elements");
4793 debug_generic_stmt (rhs1);
4794 return true;
4796 if (elt_i != NULL_TREE
4797 && (VECTOR_TYPE_P (elt_t)
4798 || TREE_CODE (elt_i) != INTEGER_CST
4799 || compare_tree_int (elt_i, i) != 0))
4801 error ("vector %qs with non-NULL element index",
4802 code_name);
4803 debug_generic_stmt (rhs1);
4804 return true;
4806 if (!is_gimple_val (elt_v))
4808 error ("vector %qs element is not a GIMPLE value",
4809 code_name);
4810 debug_generic_stmt (rhs1);
4811 return true;
4815 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4817 error ("non-vector %qs with elements", code_name);
4818 debug_generic_stmt (rhs1);
4819 return true;
4821 return res;
4823 case OBJ_TYPE_REF:
4824 /* FIXME. */
4825 return res;
4827 default:;
4830 return res;
4833 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4834 is a problem, otherwise false. */
4836 static bool
4837 verify_gimple_assign (gassign *stmt)
4839 switch (gimple_assign_rhs_class (stmt))
4841 case GIMPLE_SINGLE_RHS:
4842 return verify_gimple_assign_single (stmt);
4844 case GIMPLE_UNARY_RHS:
4845 return verify_gimple_assign_unary (stmt);
4847 case GIMPLE_BINARY_RHS:
4848 return verify_gimple_assign_binary (stmt);
4850 case GIMPLE_TERNARY_RHS:
4851 return verify_gimple_assign_ternary (stmt);
4853 default:
4854 gcc_unreachable ();
4858 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4859 is a problem, otherwise false. */
4861 static bool
4862 verify_gimple_return (greturn *stmt)
4864 tree op = gimple_return_retval (stmt);
4865 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4867 /* We cannot test for present return values as we do not fix up missing
4868 return values from the original source. */
4869 if (op == NULL)
4870 return false;
4872 if (!is_gimple_val (op)
4873 && TREE_CODE (op) != RESULT_DECL)
4875 error ("invalid operand in return statement");
4876 debug_generic_stmt (op);
4877 return true;
4880 if ((TREE_CODE (op) == RESULT_DECL
4881 && DECL_BY_REFERENCE (op))
4882 || (TREE_CODE (op) == SSA_NAME
4883 && SSA_NAME_VAR (op)
4884 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4885 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4886 op = TREE_TYPE (op);
4888 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4890 error ("invalid conversion in return statement");
4891 debug_generic_stmt (restype);
4892 debug_generic_stmt (TREE_TYPE (op));
4893 return true;
4896 return false;
4900 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4901 is a problem, otherwise false. */
4903 static bool
4904 verify_gimple_goto (ggoto *stmt)
4906 tree dest = gimple_goto_dest (stmt);
4908 /* ??? We have two canonical forms of direct goto destinations, a
4909 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4910 if (TREE_CODE (dest) != LABEL_DECL
4911 && (!is_gimple_val (dest)
4912 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4914 error ("goto destination is neither a label nor a pointer");
4915 return true;
4918 return false;
4921 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4922 is a problem, otherwise false. */
4924 static bool
4925 verify_gimple_switch (gswitch *stmt)
4927 unsigned int i, n;
4928 tree elt, prev_upper_bound = NULL_TREE;
4929 tree index_type, elt_type = NULL_TREE;
4931 if (!is_gimple_val (gimple_switch_index (stmt)))
4933 error ("invalid operand to switch statement");
4934 debug_generic_stmt (gimple_switch_index (stmt));
4935 return true;
4938 index_type = TREE_TYPE (gimple_switch_index (stmt));
4939 if (! INTEGRAL_TYPE_P (index_type))
4941 error ("non-integral type switch statement");
4942 debug_generic_expr (index_type);
4943 return true;
4946 elt = gimple_switch_label (stmt, 0);
4947 if (CASE_LOW (elt) != NULL_TREE
4948 || CASE_HIGH (elt) != NULL_TREE
4949 || CASE_CHAIN (elt) != NULL_TREE)
4951 error ("invalid default case label in switch statement");
4952 debug_generic_expr (elt);
4953 return true;
4956 n = gimple_switch_num_labels (stmt);
4957 for (i = 1; i < n; i++)
4959 elt = gimple_switch_label (stmt, i);
4961 if (CASE_CHAIN (elt))
4963 error ("invalid %<CASE_CHAIN%>");
4964 debug_generic_expr (elt);
4965 return true;
4967 if (! CASE_LOW (elt))
4969 error ("invalid case label in switch statement");
4970 debug_generic_expr (elt);
4971 return true;
4973 if (CASE_HIGH (elt)
4974 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4976 error ("invalid case range in switch statement");
4977 debug_generic_expr (elt);
4978 return true;
4981 if (! elt_type)
4983 elt_type = TREE_TYPE (CASE_LOW (elt));
4984 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4986 error ("type precision mismatch in switch statement");
4987 return true;
4990 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4991 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4993 error ("type mismatch for case label in switch statement");
4994 debug_generic_expr (elt);
4995 return true;
4998 if (prev_upper_bound)
5000 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
5002 error ("case labels not sorted in switch statement");
5003 return true;
5007 prev_upper_bound = CASE_HIGH (elt);
5008 if (! prev_upper_bound)
5009 prev_upper_bound = CASE_LOW (elt);
5012 return false;
5015 /* Verify a gimple debug statement STMT.
5016 Returns true if anything is wrong. */
5018 static bool
5019 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5021 /* There isn't much that could be wrong in a gimple debug stmt. A
5022 gimple debug bind stmt, for example, maps a tree, that's usually
5023 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5024 component or member of an aggregate type, to another tree, that
5025 can be an arbitrary expression. These stmts expand into debug
5026 insns, and are converted to debug notes by var-tracking.cc. */
5027 return false;
5030 /* Verify a gimple label statement STMT.
5031 Returns true if anything is wrong. */
5033 static bool
5034 verify_gimple_label (glabel *stmt)
5036 tree decl = gimple_label_label (stmt);
5037 int uid;
5038 bool err = false;
5040 if (TREE_CODE (decl) != LABEL_DECL)
5041 return true;
5042 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5043 && DECL_CONTEXT (decl) != current_function_decl)
5045 error ("label context is not the current function declaration");
5046 err |= true;
5049 uid = LABEL_DECL_UID (decl);
5050 if (cfun->cfg
5051 && (uid == -1
5052 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5054 error ("incorrect entry in %<label_to_block_map%>");
5055 err |= true;
5058 uid = EH_LANDING_PAD_NR (decl);
5059 if (uid)
5061 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5062 if (decl != lp->post_landing_pad)
5064 error ("incorrect setting of landing pad number");
5065 err |= true;
5069 return err;
5072 /* Verify a gimple cond statement STMT.
5073 Returns true if anything is wrong. */
5075 static bool
5076 verify_gimple_cond (gcond *stmt)
5078 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5080 error ("invalid comparison code in gimple cond");
5081 return true;
5083 if (!(!gimple_cond_true_label (stmt)
5084 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5085 || !(!gimple_cond_false_label (stmt)
5086 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5088 error ("invalid labels in gimple cond");
5089 return true;
5092 return verify_gimple_comparison (boolean_type_node,
5093 gimple_cond_lhs (stmt),
5094 gimple_cond_rhs (stmt),
5095 gimple_cond_code (stmt));
5098 /* Verify the GIMPLE statement STMT. Returns true if there is an
5099 error, otherwise false. */
5101 static bool
5102 verify_gimple_stmt (gimple *stmt)
5104 switch (gimple_code (stmt))
5106 case GIMPLE_ASSIGN:
5107 return verify_gimple_assign (as_a <gassign *> (stmt));
5109 case GIMPLE_LABEL:
5110 return verify_gimple_label (as_a <glabel *> (stmt));
5112 case GIMPLE_CALL:
5113 return verify_gimple_call (as_a <gcall *> (stmt));
5115 case GIMPLE_COND:
5116 return verify_gimple_cond (as_a <gcond *> (stmt));
5118 case GIMPLE_GOTO:
5119 return verify_gimple_goto (as_a <ggoto *> (stmt));
5121 case GIMPLE_SWITCH:
5122 return verify_gimple_switch (as_a <gswitch *> (stmt));
5124 case GIMPLE_RETURN:
5125 return verify_gimple_return (as_a <greturn *> (stmt));
5127 case GIMPLE_ASM:
5128 return false;
5130 case GIMPLE_TRANSACTION:
5131 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5133 /* Tuples that do not have tree operands. */
5134 case GIMPLE_NOP:
5135 case GIMPLE_PREDICT:
5136 case GIMPLE_RESX:
5137 case GIMPLE_EH_DISPATCH:
5138 case GIMPLE_EH_MUST_NOT_THROW:
5139 return false;
5141 CASE_GIMPLE_OMP:
5142 /* OpenMP directives are validated by the FE and never operated
5143 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5144 non-gimple expressions when the main index variable has had
5145 its address taken. This does not affect the loop itself
5146 because the header of an GIMPLE_OMP_FOR is merely used to determine
5147 how to setup the parallel iteration. */
5148 return false;
5150 case GIMPLE_ASSUME:
5151 return false;
5153 case GIMPLE_DEBUG:
5154 return verify_gimple_debug (stmt);
5156 default:
5157 gcc_unreachable ();
5161 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5162 and false otherwise. */
5164 static bool
5165 verify_gimple_phi (gphi *phi)
5167 bool err = false;
5168 unsigned i;
5169 tree phi_result = gimple_phi_result (phi);
5170 bool virtual_p;
5172 if (!phi_result)
5174 error ("invalid %<PHI%> result");
5175 return true;
5178 virtual_p = virtual_operand_p (phi_result);
5179 if (TREE_CODE (phi_result) != SSA_NAME
5180 || (virtual_p
5181 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5183 error ("invalid %<PHI%> result");
5184 err = true;
5187 for (i = 0; i < gimple_phi_num_args (phi); i++)
5189 tree t = gimple_phi_arg_def (phi, i);
5191 if (!t)
5193 error ("missing %<PHI%> def");
5194 err |= true;
5195 continue;
5197 /* Addressable variables do have SSA_NAMEs but they
5198 are not considered gimple values. */
5199 else if ((TREE_CODE (t) == SSA_NAME
5200 && virtual_p != virtual_operand_p (t))
5201 || (virtual_p
5202 && (TREE_CODE (t) != SSA_NAME
5203 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5204 || (!virtual_p
5205 && !is_gimple_val (t)))
5207 error ("invalid %<PHI%> argument");
5208 debug_generic_expr (t);
5209 err |= true;
5211 #ifdef ENABLE_TYPES_CHECKING
5212 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5214 error ("incompatible types in %<PHI%> argument %u", i);
5215 debug_generic_stmt (TREE_TYPE (phi_result));
5216 debug_generic_stmt (TREE_TYPE (t));
5217 err |= true;
5219 #endif
5222 return err;
5225 /* Verify the GIMPLE statements inside the sequence STMTS. */
5227 static bool
5228 verify_gimple_in_seq_2 (gimple_seq stmts)
5230 gimple_stmt_iterator ittr;
5231 bool err = false;
5233 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5235 gimple *stmt = gsi_stmt (ittr);
5237 switch (gimple_code (stmt))
5239 case GIMPLE_BIND:
5240 err |= verify_gimple_in_seq_2 (
5241 gimple_bind_body (as_a <gbind *> (stmt)));
5242 break;
5244 case GIMPLE_TRY:
5245 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5246 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5247 break;
5249 case GIMPLE_EH_FILTER:
5250 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5251 break;
5253 case GIMPLE_EH_ELSE:
5255 geh_else *eh_else = as_a <geh_else *> (stmt);
5256 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5257 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5259 break;
5261 case GIMPLE_CATCH:
5262 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5263 as_a <gcatch *> (stmt)));
5264 break;
5266 case GIMPLE_ASSUME:
5267 err |= verify_gimple_in_seq_2 (gimple_assume_body (stmt));
5268 break;
5270 case GIMPLE_TRANSACTION:
5271 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5272 break;
5274 default:
5276 bool err2 = verify_gimple_stmt (stmt);
5277 if (err2)
5278 debug_gimple_stmt (stmt);
5279 err |= err2;
5284 return err;
5287 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5288 is a problem, otherwise false. */
5290 static bool
5291 verify_gimple_transaction (gtransaction *stmt)
5293 tree lab;
5295 lab = gimple_transaction_label_norm (stmt);
5296 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5297 return true;
5298 lab = gimple_transaction_label_uninst (stmt);
5299 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5300 return true;
5301 lab = gimple_transaction_label_over (stmt);
5302 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5303 return true;
5305 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5309 /* Verify the GIMPLE statements inside the statement list STMTS. */
5311 DEBUG_FUNCTION bool
5312 verify_gimple_in_seq (gimple_seq stmts, bool ice)
5314 timevar_push (TV_TREE_STMT_VERIFY);
5315 bool res = verify_gimple_in_seq_2 (stmts);
5316 if (res && ice)
5317 internal_error ("%<verify_gimple%> failed");
5318 timevar_pop (TV_TREE_STMT_VERIFY);
5319 return res;
5322 /* Return true when the T can be shared. */
5324 static bool
5325 tree_node_can_be_shared (tree t)
5327 if (IS_TYPE_OR_DECL_P (t)
5328 || TREE_CODE (t) == SSA_NAME
5329 || TREE_CODE (t) == IDENTIFIER_NODE
5330 || TREE_CODE (t) == CASE_LABEL_EXPR
5331 || is_gimple_min_invariant (t))
5332 return true;
5334 if (t == error_mark_node)
5335 return true;
5337 return false;
5340 /* Called via walk_tree. Verify tree sharing. */
5342 static tree
5343 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5345 hash_set<void *> *visited = (hash_set<void *> *) data;
5347 if (tree_node_can_be_shared (*tp))
5349 *walk_subtrees = false;
5350 return NULL;
5353 if (visited->add (*tp))
5354 return *tp;
5356 return NULL;
5359 /* Called via walk_gimple_stmt. Verify tree sharing. */
5361 static tree
5362 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5364 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5365 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5368 static bool eh_error_found;
5369 bool
5370 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5371 hash_set<gimple *> *visited)
5373 if (!visited->contains (stmt))
5375 error ("dead statement in EH table");
5376 debug_gimple_stmt (stmt);
5377 eh_error_found = true;
5379 return true;
5382 /* Verify if the location LOCs block is in BLOCKS. */
5384 static bool
5385 verify_location (hash_set<tree> *blocks, location_t loc)
5387 tree block = LOCATION_BLOCK (loc);
5388 if (block != NULL_TREE
5389 && !blocks->contains (block))
5391 error ("location references block not in block tree");
5392 return true;
5394 if (block != NULL_TREE)
5395 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5396 return false;
5399 /* Called via walk_tree. Verify that expressions have no blocks. */
5401 static tree
5402 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5404 if (!EXPR_P (*tp))
5406 *walk_subtrees = false;
5407 return NULL;
5410 location_t loc = EXPR_LOCATION (*tp);
5411 if (LOCATION_BLOCK (loc) != NULL)
5412 return *tp;
5414 return NULL;
5417 /* Called via walk_tree. Verify locations of expressions. */
5419 static tree
5420 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5422 hash_set<tree> *blocks = (hash_set<tree> *) data;
5423 tree t = *tp;
5425 /* ??? This doesn't really belong here but there's no good place to
5426 stick this remainder of old verify_expr. */
5427 /* ??? This barfs on debug stmts which contain binds to vars with
5428 different function context. */
5429 #if 0
5430 if (VAR_P (t)
5431 || TREE_CODE (t) == PARM_DECL
5432 || TREE_CODE (t) == RESULT_DECL)
5434 tree context = decl_function_context (t);
5435 if (context != cfun->decl
5436 && !SCOPE_FILE_SCOPE_P (context)
5437 && !TREE_STATIC (t)
5438 && !DECL_EXTERNAL (t))
5440 error ("local declaration from a different function");
5441 return t;
5444 #endif
5446 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5448 tree x = DECL_DEBUG_EXPR (t);
5449 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5450 if (addr)
5451 return addr;
5453 if ((VAR_P (t)
5454 || TREE_CODE (t) == PARM_DECL
5455 || TREE_CODE (t) == RESULT_DECL)
5456 && DECL_HAS_VALUE_EXPR_P (t))
5458 tree x = DECL_VALUE_EXPR (t);
5459 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5460 if (addr)
5461 return addr;
5464 if (!EXPR_P (t))
5466 *walk_subtrees = false;
5467 return NULL;
5470 location_t loc = EXPR_LOCATION (t);
5471 if (verify_location (blocks, loc))
5472 return t;
5474 return NULL;
5477 /* Called via walk_gimple_op. Verify locations of expressions. */
5479 static tree
5480 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5482 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5483 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5486 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5488 static void
5489 collect_subblocks (hash_set<tree> *blocks, tree block)
5491 tree t;
5492 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5494 blocks->add (t);
5495 collect_subblocks (blocks, t);
5499 /* Disable warnings about missing quoting in GCC diagnostics for
5500 the verification errors. Their format strings don't follow
5501 GCC diagnostic conventions and trigger an ICE in the end. */
5502 #if __GNUC__ >= 10
5503 # pragma GCC diagnostic push
5504 # pragma GCC diagnostic ignored "-Wformat-diag"
5505 #endif
5507 /* Verify the GIMPLE statements in the CFG of FN. */
5509 DEBUG_FUNCTION bool
5510 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow, bool ice)
5512 basic_block bb;
5513 bool err = false;
5515 timevar_push (TV_TREE_STMT_VERIFY);
5516 hash_set<void *> visited;
5517 hash_set<gimple *> visited_throwing_stmts;
5519 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5520 hash_set<tree> blocks;
5521 if (DECL_INITIAL (fn->decl))
5523 blocks.add (DECL_INITIAL (fn->decl));
5524 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5527 FOR_EACH_BB_FN (bb, fn)
5529 gimple_stmt_iterator gsi;
5530 edge_iterator ei;
5531 edge e;
5533 for (gphi_iterator gpi = gsi_start_phis (bb);
5534 !gsi_end_p (gpi);
5535 gsi_next (&gpi))
5537 gphi *phi = gpi.phi ();
5538 bool err2 = false;
5539 unsigned i;
5541 if (gimple_bb (phi) != bb)
5543 error ("gimple_bb (phi) is set to a wrong basic block");
5544 err2 = true;
5547 err2 |= verify_gimple_phi (phi);
5549 /* Only PHI arguments have locations. */
5550 if (gimple_location (phi) != UNKNOWN_LOCATION)
5552 error ("PHI node with location");
5553 err2 = true;
5556 for (i = 0; i < gimple_phi_num_args (phi); i++)
5558 tree arg = gimple_phi_arg_def (phi, i);
5559 tree addr = walk_tree (&arg, verify_node_sharing_1,
5560 &visited, NULL);
5561 if (addr)
5563 error ("incorrect sharing of tree nodes");
5564 debug_generic_expr (addr);
5565 err2 |= true;
5567 location_t loc = gimple_phi_arg_location (phi, i);
5568 if (virtual_operand_p (gimple_phi_result (phi))
5569 && loc != UNKNOWN_LOCATION)
5571 error ("virtual PHI with argument locations");
5572 err2 = true;
5574 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5575 if (addr)
5577 debug_generic_expr (addr);
5578 err2 = true;
5580 err2 |= verify_location (&blocks, loc);
5583 if (err2)
5584 debug_gimple_stmt (phi);
5585 err |= err2;
5588 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5590 gimple *stmt = gsi_stmt (gsi);
5591 bool err2 = false;
5592 struct walk_stmt_info wi;
5593 tree addr;
5594 int lp_nr;
5596 if (gimple_bb (stmt) != bb)
5598 error ("gimple_bb (stmt) is set to a wrong basic block");
5599 err2 = true;
5602 err2 |= verify_gimple_stmt (stmt);
5603 err2 |= verify_location (&blocks, gimple_location (stmt));
5605 memset (&wi, 0, sizeof (wi));
5606 wi.info = (void *) &visited;
5607 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5608 if (addr)
5610 error ("incorrect sharing of tree nodes");
5611 debug_generic_expr (addr);
5612 err2 |= true;
5615 memset (&wi, 0, sizeof (wi));
5616 wi.info = (void *) &blocks;
5617 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5618 if (addr)
5620 debug_generic_expr (addr);
5621 err2 |= true;
5624 /* If the statement is marked as part of an EH region, then it is
5625 expected that the statement could throw. Verify that when we
5626 have optimizations that simplify statements such that we prove
5627 that they cannot throw, that we update other data structures
5628 to match. */
5629 lp_nr = lookup_stmt_eh_lp (stmt);
5630 if (lp_nr != 0)
5631 visited_throwing_stmts.add (stmt);
5632 if (lp_nr > 0)
5634 if (!stmt_could_throw_p (cfun, stmt))
5636 if (verify_nothrow)
5638 error ("statement marked for throw, but doesn%'t");
5639 err2 |= true;
5642 else if (!gsi_one_before_end_p (gsi))
5644 error ("statement marked for throw in middle of block");
5645 err2 |= true;
5649 if (err2)
5650 debug_gimple_stmt (stmt);
5651 err |= err2;
5654 FOR_EACH_EDGE (e, ei, bb->succs)
5655 if (e->goto_locus != UNKNOWN_LOCATION)
5656 err |= verify_location (&blocks, e->goto_locus);
5659 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5660 eh_error_found = false;
5661 if (eh_table)
5662 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5663 (&visited_throwing_stmts);
5665 if (ice && (err || eh_error_found))
5666 internal_error ("verify_gimple failed");
5668 verify_histograms ();
5669 timevar_pop (TV_TREE_STMT_VERIFY);
5671 return (err || eh_error_found);
5675 /* Verifies that the flow information is OK. */
5677 static bool
5678 gimple_verify_flow_info (void)
5680 bool err = false;
5681 basic_block bb;
5682 gimple_stmt_iterator gsi;
5683 gimple *stmt;
5684 edge e;
5685 edge_iterator ei;
5687 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5688 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5690 error ("ENTRY_BLOCK has IL associated with it");
5691 err = true;
5694 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5695 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5697 error ("EXIT_BLOCK has IL associated with it");
5698 err = true;
5701 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5702 if (e->flags & EDGE_FALLTHRU)
5704 error ("fallthru to exit from bb %d", e->src->index);
5705 err = true;
5707 if (cfun->cfg->full_profile
5708 && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5710 error ("entry block count not initialized");
5711 err = true;
5713 if (cfun->cfg->full_profile
5714 && !EXIT_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5716 error ("exit block count not initialized");
5717 err = true;
5719 if (cfun->cfg->full_profile
5720 && !single_succ_edge
5721 (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability.initialized_p ())
5723 error ("probability of edge from entry block not initialized");
5724 err = true;
5728 FOR_EACH_BB_FN (bb, cfun)
5730 bool found_ctrl_stmt = false;
5732 stmt = NULL;
5734 if (cfun->cfg->full_profile)
5736 if (!bb->count.initialized_p ())
5738 error ("count of bb %d not initialized", bb->index);
5739 err = true;
5741 FOR_EACH_EDGE (e, ei, bb->succs)
5742 if (!e->probability.initialized_p ())
5744 error ("probability of edge %d->%d not initialized",
5745 bb->index, e->dest->index);
5746 err = true;
5750 /* Skip labels on the start of basic block. */
5751 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5753 tree label;
5754 gimple *prev_stmt = stmt;
5756 stmt = gsi_stmt (gsi);
5758 if (gimple_code (stmt) != GIMPLE_LABEL)
5759 break;
5761 label = gimple_label_label (as_a <glabel *> (stmt));
5762 if (prev_stmt && DECL_NONLOCAL (label))
5764 error ("nonlocal label %qD is not first in a sequence "
5765 "of labels in bb %d", label, bb->index);
5766 err = true;
5769 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5771 error ("EH landing pad label %qD is not first in a sequence "
5772 "of labels in bb %d", label, bb->index);
5773 err = true;
5776 if (label_to_block (cfun, label) != bb)
5778 error ("label %qD to block does not match in bb %d",
5779 label, bb->index);
5780 err = true;
5783 if (decl_function_context (label) != current_function_decl)
5785 error ("label %qD has incorrect context in bb %d",
5786 label, bb->index);
5787 err = true;
5791 /* Verify that body of basic block BB is free of control flow. */
5792 bool seen_nondebug_stmt = false;
5793 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5795 gimple *stmt = gsi_stmt (gsi);
5797 /* Do NOT disregard debug stmts after found_ctrl_stmt. */
5798 if (found_ctrl_stmt)
5800 error ("control flow in the middle of basic block %d",
5801 bb->index);
5802 err = true;
5805 if (stmt_ends_bb_p (stmt))
5806 found_ctrl_stmt = true;
5808 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5810 error ("label %qD in the middle of basic block %d",
5811 gimple_label_label (label_stmt), bb->index);
5812 err = true;
5815 /* Check that no statements appear between a returns_twice call
5816 and its associated abnormal edge. */
5817 if (gimple_code (stmt) == GIMPLE_CALL
5818 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
5820 const char *misplaced = NULL;
5821 /* TM is an exception: it points abnormal edges just after the
5822 call that starts a transaction, i.e. it must end the BB. */
5823 if (gimple_call_builtin_p (stmt, BUILT_IN_TM_START))
5825 if (single_succ_p (bb)
5826 && bb_has_abnormal_pred (single_succ (bb))
5827 && !gsi_one_nondebug_before_end_p (gsi))
5828 misplaced = "not last";
5830 else
5832 if (seen_nondebug_stmt
5833 && bb_has_abnormal_pred (bb))
5834 misplaced = "not first";
5836 if (misplaced)
5838 error ("returns_twice call is %s in basic block %d",
5839 misplaced, bb->index);
5840 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
5841 err = true;
5844 if (!is_gimple_debug (stmt))
5845 seen_nondebug_stmt = true;
5848 gsi = gsi_last_nondebug_bb (bb);
5849 if (gsi_end_p (gsi))
5850 continue;
5852 stmt = gsi_stmt (gsi);
5854 if (gimple_code (stmt) == GIMPLE_LABEL)
5855 continue;
5857 if (verify_eh_edges (stmt))
5858 err = true;
5860 if (is_ctrl_stmt (stmt))
5862 FOR_EACH_EDGE (e, ei, bb->succs)
5863 if (e->flags & EDGE_FALLTHRU)
5865 error ("fallthru edge after a control statement in bb %d",
5866 bb->index);
5867 err = true;
5871 if (gimple_code (stmt) != GIMPLE_COND)
5873 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5874 after anything else but if statement. */
5875 FOR_EACH_EDGE (e, ei, bb->succs)
5876 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5878 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5879 bb->index);
5880 err = true;
5884 switch (gimple_code (stmt))
5886 case GIMPLE_COND:
5888 edge true_edge;
5889 edge false_edge;
5891 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5893 if (!true_edge
5894 || !false_edge
5895 || !(true_edge->flags & EDGE_TRUE_VALUE)
5896 || !(false_edge->flags & EDGE_FALSE_VALUE)
5897 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5898 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5899 || EDGE_COUNT (bb->succs) >= 3)
5901 error ("wrong outgoing edge flags at end of bb %d",
5902 bb->index);
5903 err = true;
5906 break;
5908 case GIMPLE_GOTO:
5909 if (simple_goto_p (stmt))
5911 error ("explicit goto at end of bb %d", bb->index);
5912 err = true;
5914 else
5916 /* FIXME. We should double check that the labels in the
5917 destination blocks have their address taken. */
5918 FOR_EACH_EDGE (e, ei, bb->succs)
5919 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5920 | EDGE_FALSE_VALUE))
5921 || !(e->flags & EDGE_ABNORMAL))
5923 error ("wrong outgoing edge flags at end of bb %d",
5924 bb->index);
5925 err = true;
5928 break;
5930 case GIMPLE_CALL:
5931 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5932 break;
5933 /* fallthru */
5934 case GIMPLE_RETURN:
5935 if (!single_succ_p (bb)
5936 || (single_succ_edge (bb)->flags
5937 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5938 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5940 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5941 err = true;
5943 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5945 error ("return edge does not point to exit in bb %d",
5946 bb->index);
5947 err = true;
5949 break;
5951 case GIMPLE_SWITCH:
5953 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5954 tree prev;
5955 edge e;
5956 size_t i, n;
5958 n = gimple_switch_num_labels (switch_stmt);
5960 /* Mark all the destination basic blocks. */
5961 for (i = 0; i < n; ++i)
5963 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5964 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5965 label_bb->aux = (void *)1;
5968 /* Verify that the case labels are sorted. */
5969 prev = gimple_switch_label (switch_stmt, 0);
5970 for (i = 1; i < n; ++i)
5972 tree c = gimple_switch_label (switch_stmt, i);
5973 if (!CASE_LOW (c))
5975 error ("found default case not at the start of "
5976 "case vector");
5977 err = true;
5978 continue;
5980 if (CASE_LOW (prev)
5981 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5983 error ("case labels not sorted: ");
5984 print_generic_expr (stderr, prev);
5985 fprintf (stderr," is greater than ");
5986 print_generic_expr (stderr, c);
5987 fprintf (stderr," but comes before it.\n");
5988 err = true;
5990 prev = c;
5992 /* VRP will remove the default case if it can prove it will
5993 never be executed. So do not verify there always exists
5994 a default case here. */
5996 FOR_EACH_EDGE (e, ei, bb->succs)
5998 if (!e->dest->aux)
6000 error ("extra outgoing edge %d->%d",
6001 bb->index, e->dest->index);
6002 err = true;
6005 e->dest->aux = (void *)2;
6006 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
6007 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6009 error ("wrong outgoing edge flags at end of bb %d",
6010 bb->index);
6011 err = true;
6015 /* Check that we have all of them. */
6016 for (i = 0; i < n; ++i)
6018 basic_block label_bb = gimple_switch_label_bb (cfun,
6019 switch_stmt, i);
6021 if (label_bb->aux != (void *)2)
6023 error ("missing edge %i->%i", bb->index, label_bb->index);
6024 err = true;
6028 FOR_EACH_EDGE (e, ei, bb->succs)
6029 e->dest->aux = (void *)0;
6031 break;
6033 case GIMPLE_EH_DISPATCH:
6034 if (verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt)))
6035 err = true;
6036 break;
6038 default:
6039 break;
6043 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6044 verify_dominators (CDI_DOMINATORS);
6046 return err;
6049 #if __GNUC__ >= 10
6050 # pragma GCC diagnostic pop
6051 #endif
6053 /* Updates phi nodes after creating a forwarder block joined
6054 by edge FALLTHRU. */
6056 static void
6057 gimple_make_forwarder_block (edge fallthru)
6059 edge e;
6060 edge_iterator ei;
6061 basic_block dummy, bb;
6062 tree var;
6063 gphi_iterator gsi;
6064 bool forward_location_p;
6066 dummy = fallthru->src;
6067 bb = fallthru->dest;
6069 if (single_pred_p (bb))
6070 return;
6072 /* We can forward location info if we have only one predecessor. */
6073 forward_location_p = single_pred_p (dummy);
6075 /* If we redirected a branch we must create new PHI nodes at the
6076 start of BB. */
6077 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6079 gphi *phi, *new_phi;
6081 phi = gsi.phi ();
6082 var = gimple_phi_result (phi);
6083 new_phi = create_phi_node (var, bb);
6084 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
6085 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
6086 forward_location_p
6087 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
6090 /* Add the arguments we have stored on edges. */
6091 FOR_EACH_EDGE (e, ei, bb->preds)
6093 if (e == fallthru)
6094 continue;
6096 flush_pending_stmts (e);
6101 /* Return a non-special label in the head of basic block BLOCK.
6102 Create one if it doesn't exist. */
6104 tree
6105 gimple_block_label (basic_block bb)
6107 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6108 bool first = true;
6109 tree label;
6110 glabel *stmt;
6112 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6114 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6115 if (!stmt)
6116 break;
6117 label = gimple_label_label (stmt);
6118 if (!DECL_NONLOCAL (label))
6120 if (!first)
6121 gsi_move_before (&i, &s);
6122 return label;
6126 label = create_artificial_label (UNKNOWN_LOCATION);
6127 stmt = gimple_build_label (label);
6128 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6129 return label;
6133 /* Attempt to perform edge redirection by replacing a possibly complex
6134 jump instruction by a goto or by removing the jump completely.
6135 This can apply only if all edges now point to the same block. The
6136 parameters and return values are equivalent to
6137 redirect_edge_and_branch. */
6139 static edge
6140 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6142 basic_block src = e->src;
6143 gimple_stmt_iterator i;
6144 gimple *stmt;
6146 /* We can replace or remove a complex jump only when we have exactly
6147 two edges. */
6148 if (EDGE_COUNT (src->succs) != 2
6149 /* Verify that all targets will be TARGET. Specifically, the
6150 edge that is not E must also go to TARGET. */
6151 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6152 return NULL;
6154 i = gsi_last_bb (src);
6155 if (gsi_end_p (i))
6156 return NULL;
6158 stmt = gsi_stmt (i);
6160 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6162 gsi_remove (&i, true);
6163 e = ssa_redirect_edge (e, target);
6164 e->flags = EDGE_FALLTHRU;
6165 return e;
6168 return NULL;
6172 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6173 edge representing the redirected branch. */
6175 static edge
6176 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6178 basic_block bb = e->src;
6179 gimple_stmt_iterator gsi;
6180 edge ret;
6181 gimple *stmt;
6183 if (e->flags & EDGE_ABNORMAL)
6184 return NULL;
6186 if (e->dest == dest)
6187 return NULL;
6189 if (e->flags & EDGE_EH)
6190 return redirect_eh_edge (e, dest);
6192 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6194 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6195 if (ret)
6196 return ret;
6199 gsi = gsi_last_nondebug_bb (bb);
6200 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6202 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6204 case GIMPLE_COND:
6205 /* For COND_EXPR, we only need to redirect the edge. */
6206 break;
6208 case GIMPLE_GOTO:
6209 /* No non-abnormal edges should lead from a non-simple goto, and
6210 simple ones should be represented implicitly. */
6211 gcc_unreachable ();
6213 case GIMPLE_SWITCH:
6215 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6216 tree label = gimple_block_label (dest);
6217 tree cases = get_cases_for_edge (e, switch_stmt);
6219 /* If we have a list of cases associated with E, then use it
6220 as it's a lot faster than walking the entire case vector. */
6221 if (cases)
6223 edge e2 = find_edge (e->src, dest);
6224 tree last, first;
6226 first = cases;
6227 while (cases)
6229 last = cases;
6230 CASE_LABEL (cases) = label;
6231 cases = CASE_CHAIN (cases);
6234 /* If there was already an edge in the CFG, then we need
6235 to move all the cases associated with E to E2. */
6236 if (e2)
6238 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6240 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6241 CASE_CHAIN (cases2) = first;
6243 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6245 else
6247 size_t i, n = gimple_switch_num_labels (switch_stmt);
6249 for (i = 0; i < n; i++)
6251 tree elt = gimple_switch_label (switch_stmt, i);
6252 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6253 CASE_LABEL (elt) = label;
6257 break;
6259 case GIMPLE_ASM:
6261 gasm *asm_stmt = as_a <gasm *> (stmt);
6262 int i, n = gimple_asm_nlabels (asm_stmt);
6263 tree label = NULL;
6265 for (i = 0; i < n; ++i)
6267 tree cons = gimple_asm_label_op (asm_stmt, i);
6268 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6270 if (!label)
6271 label = gimple_block_label (dest);
6272 TREE_VALUE (cons) = label;
6276 /* If we didn't find any label matching the former edge in the
6277 asm labels, we must be redirecting the fallthrough
6278 edge. */
6279 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6281 break;
6283 case GIMPLE_RETURN:
6284 gsi_remove (&gsi, true);
6285 e->flags |= EDGE_FALLTHRU;
6286 break;
6288 case GIMPLE_OMP_RETURN:
6289 case GIMPLE_OMP_CONTINUE:
6290 case GIMPLE_OMP_SECTIONS_SWITCH:
6291 case GIMPLE_OMP_FOR:
6292 /* The edges from OMP constructs can be simply redirected. */
6293 break;
6295 case GIMPLE_EH_DISPATCH:
6296 if (!(e->flags & EDGE_FALLTHRU))
6297 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6298 break;
6300 case GIMPLE_TRANSACTION:
6301 if (e->flags & EDGE_TM_ABORT)
6302 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6303 gimple_block_label (dest));
6304 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6305 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6306 gimple_block_label (dest));
6307 else
6308 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6309 gimple_block_label (dest));
6310 break;
6312 default:
6313 /* Otherwise it must be a fallthru edge, and we don't need to
6314 do anything besides redirecting it. */
6315 gcc_assert (e->flags & EDGE_FALLTHRU);
6316 break;
6319 /* Update/insert PHI nodes as necessary. */
6321 /* Now update the edges in the CFG. */
6322 e = ssa_redirect_edge (e, dest);
6324 return e;
6327 /* Returns true if it is possible to remove edge E by redirecting
6328 it to the destination of the other edge from E->src. */
6330 static bool
6331 gimple_can_remove_branch_p (const_edge e)
6333 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6334 return false;
6336 return true;
6339 /* Simple wrapper, as we can always redirect fallthru edges. */
6341 static basic_block
6342 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6344 e = gimple_redirect_edge_and_branch (e, dest);
6345 gcc_assert (e);
6347 return NULL;
6351 /* Splits basic block BB after statement STMT (but at least after the
6352 labels). If STMT is NULL, BB is split just after the labels. */
6354 static basic_block
6355 gimple_split_block (basic_block bb, void *stmt)
6357 gimple_stmt_iterator gsi;
6358 gimple_stmt_iterator gsi_tgt;
6359 gimple_seq list;
6360 basic_block new_bb;
6361 edge e;
6362 edge_iterator ei;
6364 new_bb = create_empty_bb (bb);
6366 /* Redirect the outgoing edges. */
6367 new_bb->succs = bb->succs;
6368 bb->succs = NULL;
6369 FOR_EACH_EDGE (e, ei, new_bb->succs)
6370 e->src = new_bb;
6372 /* Get a stmt iterator pointing to the first stmt to move. */
6373 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6374 gsi = gsi_after_labels (bb);
6375 else
6377 gsi = gsi_for_stmt ((gimple *) stmt);
6378 gsi_next (&gsi);
6381 /* Move everything from GSI to the new basic block. */
6382 if (gsi_end_p (gsi))
6383 return new_bb;
6385 /* Split the statement list - avoid re-creating new containers as this
6386 brings ugly quadratic memory consumption in the inliner.
6387 (We are still quadratic since we need to update stmt BB pointers,
6388 sadly.) */
6389 gsi_split_seq_before (&gsi, &list);
6390 set_bb_seq (new_bb, list);
6391 for (gsi_tgt = gsi_start (list);
6392 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6393 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6395 return new_bb;
6399 /* Moves basic block BB after block AFTER. */
6401 static bool
6402 gimple_move_block_after (basic_block bb, basic_block after)
6404 if (bb->prev_bb == after)
6405 return true;
6407 unlink_block (bb);
6408 link_block (bb, after);
6410 return true;
6414 /* Return TRUE if block BB has no executable statements, otherwise return
6415 FALSE. */
6417 static bool
6418 gimple_empty_block_p (basic_block bb)
6420 /* BB must have no executable statements. */
6421 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6422 if (phi_nodes (bb))
6423 return false;
6424 while (!gsi_end_p (gsi))
6426 gimple *stmt = gsi_stmt (gsi);
6427 if (is_gimple_debug (stmt))
6429 else if (gimple_code (stmt) == GIMPLE_NOP
6430 || gimple_code (stmt) == GIMPLE_PREDICT)
6432 else
6433 return false;
6434 gsi_next (&gsi);
6436 return true;
6440 /* Split a basic block if it ends with a conditional branch and if the
6441 other part of the block is not empty. */
6443 static basic_block
6444 gimple_split_block_before_cond_jump (basic_block bb)
6446 gimple *last, *split_point;
6447 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6448 if (gsi_end_p (gsi))
6449 return NULL;
6450 last = gsi_stmt (gsi);
6451 if (gimple_code (last) != GIMPLE_COND
6452 && gimple_code (last) != GIMPLE_SWITCH)
6453 return NULL;
6454 gsi_prev (&gsi);
6455 split_point = gsi_stmt (gsi);
6456 return split_block (bb, split_point)->dest;
6460 /* Return true if basic_block can be duplicated. */
6462 static bool
6463 gimple_can_duplicate_bb_p (const_basic_block bb)
6465 gimple *last = last_nondebug_stmt (CONST_CAST_BB (bb));
6467 /* Do checks that can only fail for the last stmt, to minimize the work in the
6468 stmt loop. */
6469 if (last) {
6470 /* A transaction is a single entry multiple exit region. It
6471 must be duplicated in its entirety or not at all. */
6472 if (gimple_code (last) == GIMPLE_TRANSACTION)
6473 return false;
6475 /* An IFN_UNIQUE call must be duplicated as part of its group,
6476 or not at all. */
6477 if (is_gimple_call (last)
6478 && gimple_call_internal_p (last)
6479 && gimple_call_internal_unique_p (last))
6480 return false;
6483 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6484 !gsi_end_p (gsi); gsi_next (&gsi))
6486 gimple *g = gsi_stmt (gsi);
6488 /* Prohibit duplication of returns_twice calls, otherwise associated
6489 abnormal edges also need to be duplicated properly.
6490 An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6491 duplicated as part of its group, or not at all.
6492 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6493 group, so the same holds there. */
6494 if (is_gimple_call (g)
6495 && (gimple_call_flags (g) & ECF_RETURNS_TWICE
6496 || gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6497 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6498 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6499 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6500 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6501 return false;
6504 return true;
6507 /* Create a duplicate of the basic block BB. NOTE: This does not
6508 preserve SSA form. */
6510 static basic_block
6511 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6513 basic_block new_bb;
6514 gimple_stmt_iterator gsi_tgt;
6516 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6518 /* Copy the PHI nodes. We ignore PHI node arguments here because
6519 the incoming edges have not been setup yet. */
6520 for (gphi_iterator gpi = gsi_start_phis (bb);
6521 !gsi_end_p (gpi);
6522 gsi_next (&gpi))
6524 gphi *phi, *copy;
6525 phi = gpi.phi ();
6526 copy = create_phi_node (NULL_TREE, new_bb);
6527 create_new_def_for (gimple_phi_result (phi), copy,
6528 gimple_phi_result_ptr (copy));
6529 gimple_set_uid (copy, gimple_uid (phi));
6532 gsi_tgt = gsi_start_bb (new_bb);
6533 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6534 !gsi_end_p (gsi);
6535 gsi_next (&gsi))
6537 def_operand_p def_p;
6538 ssa_op_iter op_iter;
6539 tree lhs;
6540 gimple *stmt, *copy;
6542 stmt = gsi_stmt (gsi);
6543 if (gimple_code (stmt) == GIMPLE_LABEL)
6544 continue;
6546 /* Don't duplicate label debug stmts. */
6547 if (gimple_debug_bind_p (stmt)
6548 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6549 == LABEL_DECL)
6550 continue;
6552 /* Create a new copy of STMT and duplicate STMT's virtual
6553 operands. */
6554 copy = gimple_copy (stmt);
6555 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6557 maybe_duplicate_eh_stmt (copy, stmt);
6558 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6560 /* When copying around a stmt writing into a local non-user
6561 aggregate, make sure it won't share stack slot with other
6562 vars. */
6563 lhs = gimple_get_lhs (stmt);
6564 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6566 tree base = get_base_address (lhs);
6567 if (base
6568 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6569 && DECL_IGNORED_P (base)
6570 && !TREE_STATIC (base)
6571 && !DECL_EXTERNAL (base)
6572 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6573 DECL_NONSHAREABLE (base) = 1;
6576 /* If requested remap dependence info of cliques brought in
6577 via inlining. */
6578 if (id)
6579 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6581 tree op = gimple_op (copy, i);
6582 if (!op)
6583 continue;
6584 if (TREE_CODE (op) == ADDR_EXPR
6585 || TREE_CODE (op) == WITH_SIZE_EXPR)
6586 op = TREE_OPERAND (op, 0);
6587 while (handled_component_p (op))
6588 op = TREE_OPERAND (op, 0);
6589 if ((TREE_CODE (op) == MEM_REF
6590 || TREE_CODE (op) == TARGET_MEM_REF)
6591 && MR_DEPENDENCE_CLIQUE (op) > 1
6592 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6594 if (!id->dependence_map)
6595 id->dependence_map = new hash_map<dependence_hash,
6596 unsigned short>;
6597 bool existed;
6598 unsigned short &newc = id->dependence_map->get_or_insert
6599 (MR_DEPENDENCE_CLIQUE (op), &existed);
6600 if (!existed)
6602 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6603 newc = get_new_clique (cfun);
6605 MR_DEPENDENCE_CLIQUE (op) = newc;
6609 /* Create new names for all the definitions created by COPY and
6610 add replacement mappings for each new name. */
6611 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6612 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6615 return new_bb;
6618 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6620 static void
6621 add_phi_args_after_copy_edge (edge e_copy)
6623 basic_block bb, bb_copy = e_copy->src, dest;
6624 edge e;
6625 edge_iterator ei;
6626 gphi *phi, *phi_copy;
6627 tree def;
6628 gphi_iterator psi, psi_copy;
6630 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6631 return;
6633 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6635 if (e_copy->dest->flags & BB_DUPLICATED)
6636 dest = get_bb_original (e_copy->dest);
6637 else
6638 dest = e_copy->dest;
6640 e = find_edge (bb, dest);
6641 if (!e)
6643 /* During loop unrolling the target of the latch edge is copied.
6644 In this case we are not looking for edge to dest, but to
6645 duplicated block whose original was dest. */
6646 FOR_EACH_EDGE (e, ei, bb->succs)
6648 if ((e->dest->flags & BB_DUPLICATED)
6649 && get_bb_original (e->dest) == dest)
6650 break;
6653 gcc_assert (e != NULL);
6656 for (psi = gsi_start_phis (e->dest),
6657 psi_copy = gsi_start_phis (e_copy->dest);
6658 !gsi_end_p (psi);
6659 gsi_next (&psi), gsi_next (&psi_copy))
6661 phi = psi.phi ();
6662 phi_copy = psi_copy.phi ();
6663 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6664 add_phi_arg (phi_copy, def, e_copy,
6665 gimple_phi_arg_location_from_edge (phi, e));
6670 /* Basic block BB_COPY was created by code duplication. Add phi node
6671 arguments for edges going out of BB_COPY. The blocks that were
6672 duplicated have BB_DUPLICATED set. */
6674 void
6675 add_phi_args_after_copy_bb (basic_block bb_copy)
6677 edge e_copy;
6678 edge_iterator ei;
6680 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6682 add_phi_args_after_copy_edge (e_copy);
6686 /* Blocks in REGION_COPY array of length N_REGION were created by
6687 duplication of basic blocks. Add phi node arguments for edges
6688 going from these blocks. If E_COPY is not NULL, also add
6689 phi node arguments for its destination.*/
6691 void
6692 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6693 edge e_copy)
6695 unsigned i;
6697 for (i = 0; i < n_region; i++)
6698 region_copy[i]->flags |= BB_DUPLICATED;
6700 for (i = 0; i < n_region; i++)
6701 add_phi_args_after_copy_bb (region_copy[i]);
6702 if (e_copy)
6703 add_phi_args_after_copy_edge (e_copy);
6705 for (i = 0; i < n_region; i++)
6706 region_copy[i]->flags &= ~BB_DUPLICATED;
6709 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6710 important exit edge EXIT. By important we mean that no SSA name defined
6711 inside region is live over the other exit edges of the region. All entry
6712 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6713 to the duplicate of the region. Dominance and loop information is
6714 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6715 UPDATE_DOMINANCE is false then we assume that the caller will update the
6716 dominance information after calling this function. The new basic
6717 blocks are stored to REGION_COPY in the same order as they had in REGION,
6718 provided that REGION_COPY is not NULL.
6719 The function returns false if it is unable to copy the region,
6720 true otherwise.
6722 It is callers responsibility to update profile. */
6724 bool
6725 gimple_duplicate_seme_region (edge entry, edge exit,
6726 basic_block *region, unsigned n_region,
6727 basic_block *region_copy,
6728 bool update_dominance)
6730 unsigned i;
6731 bool free_region_copy = false, copying_header = false;
6732 class loop *loop = entry->dest->loop_father;
6733 edge exit_copy;
6734 edge redirected;
6736 if (!can_copy_bbs_p (region, n_region))
6737 return false;
6739 /* Some sanity checking. Note that we do not check for all possible
6740 missuses of the functions. I.e. if you ask to copy something weird,
6741 it will work, but the state of structures probably will not be
6742 correct. */
6743 for (i = 0; i < n_region; i++)
6745 /* We do not handle subloops, i.e. all the blocks must belong to the
6746 same loop. */
6747 if (region[i]->loop_father != loop)
6748 return false;
6750 if (region[i] != entry->dest
6751 && region[i] == loop->header)
6752 return false;
6755 /* In case the function is used for loop header copying (which is the primary
6756 use), ensure that EXIT and its copy will be new latch and entry edges. */
6757 if (loop->header == entry->dest)
6759 copying_header = true;
6761 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6762 return false;
6764 for (i = 0; i < n_region; i++)
6765 if (region[i] != exit->src
6766 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6767 return false;
6770 initialize_original_copy_tables ();
6772 if (copying_header)
6773 set_loop_copy (loop, loop_outer (loop));
6774 else
6775 set_loop_copy (loop, loop);
6777 if (!region_copy)
6779 region_copy = XNEWVEC (basic_block, n_region);
6780 free_region_copy = true;
6783 /* Record blocks outside the region that are dominated by something
6784 inside. */
6785 auto_vec<basic_block> doms;
6786 if (update_dominance)
6787 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6789 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6790 split_edge_bb_loc (entry), update_dominance);
6792 if (copying_header)
6794 loop->header = exit->dest;
6795 loop->latch = exit->src;
6798 /* Redirect the entry and add the phi node arguments. */
6799 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6800 gcc_assert (redirected != NULL);
6801 flush_pending_stmts (entry);
6803 /* Concerning updating of dominators: We must recount dominators
6804 for entry block and its copy. Anything that is outside of the
6805 region, but was dominated by something inside needs recounting as
6806 well. */
6807 if (update_dominance)
6809 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6810 doms.safe_push (get_bb_original (entry->dest));
6811 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6814 /* Add the other PHI node arguments. */
6815 add_phi_args_after_copy (region_copy, n_region, NULL);
6817 if (free_region_copy)
6818 free (region_copy);
6820 free_original_copy_tables ();
6821 return true;
6824 /* Checks if BB is part of the region defined by N_REGION BBS. */
6825 static bool
6826 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6828 unsigned int n;
6830 for (n = 0; n < n_region; n++)
6832 if (bb == bbs[n])
6833 return true;
6835 return false;
6839 /* For each PHI in BB, copy the argument associated with SRC_E to TGT_E.
6840 Assuming the argument exists, just does not have a value. */
6842 void
6843 copy_phi_arg_into_existing_phi (edge src_e, edge tgt_e)
6845 int src_idx = src_e->dest_idx;
6846 int tgt_idx = tgt_e->dest_idx;
6848 /* Iterate over each PHI in e->dest. */
6849 for (gphi_iterator gsi = gsi_start_phis (src_e->dest),
6850 gsi2 = gsi_start_phis (tgt_e->dest);
6851 !gsi_end_p (gsi);
6852 gsi_next (&gsi), gsi_next (&gsi2))
6854 gphi *src_phi = gsi.phi ();
6855 gphi *dest_phi = gsi2.phi ();
6856 tree val = gimple_phi_arg_def (src_phi, src_idx);
6857 location_t locus = gimple_phi_arg_location (src_phi, src_idx);
6859 SET_PHI_ARG_DEF (dest_phi, tgt_idx, val);
6860 gimple_phi_arg_set_location (dest_phi, tgt_idx, locus);
6864 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6865 are stored to REGION_COPY in the same order in that they appear
6866 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6867 the region, EXIT an exit from it. The condition guarding EXIT
6868 is moved to ENTRY. Returns true if duplication succeeds, false
6869 otherwise.
6871 For example,
6873 some_code;
6874 if (cond)
6876 else
6879 is transformed to
6881 if (cond)
6883 some_code;
6886 else
6888 some_code;
6893 bool
6894 gimple_duplicate_sese_tail (edge entry, edge exit,
6895 basic_block *region, unsigned n_region,
6896 basic_block *region_copy)
6898 unsigned i;
6899 bool free_region_copy = false;
6900 class loop *loop = exit->dest->loop_father;
6901 class loop *orig_loop = entry->dest->loop_father;
6902 basic_block switch_bb, entry_bb, nentry_bb;
6903 profile_count total_count = profile_count::uninitialized (),
6904 exit_count = profile_count::uninitialized ();
6905 edge exits[2], nexits[2], e;
6906 gimple_stmt_iterator gsi;
6907 edge sorig, snew;
6908 basic_block exit_bb;
6909 class loop *target, *aloop, *cloop;
6911 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6912 exits[0] = exit;
6913 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6915 if (!can_copy_bbs_p (region, n_region))
6916 return false;
6918 initialize_original_copy_tables ();
6919 set_loop_copy (orig_loop, loop);
6921 target= loop;
6922 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6924 if (bb_part_of_region_p (aloop->header, region, n_region))
6926 cloop = duplicate_loop (aloop, target);
6927 duplicate_subloops (aloop, cloop);
6931 if (!region_copy)
6933 region_copy = XNEWVEC (basic_block, n_region);
6934 free_region_copy = true;
6937 gcc_assert (!need_ssa_update_p (cfun));
6939 /* Record blocks outside the region that are dominated by something
6940 inside. */
6941 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6942 n_region);
6944 total_count = exit->src->count;
6945 exit_count = exit->count ();
6946 /* Fix up corner cases, to avoid division by zero or creation of negative
6947 frequencies. */
6948 if (exit_count > total_count)
6949 exit_count = total_count;
6951 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6952 split_edge_bb_loc (exit), true);
6953 if (total_count.initialized_p () && exit_count.initialized_p ())
6955 scale_bbs_frequencies_profile_count (region, n_region,
6956 total_count - exit_count,
6957 total_count);
6958 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6959 total_count);
6962 /* Create the switch block, and put the exit condition to it. */
6963 entry_bb = entry->dest;
6964 nentry_bb = get_bb_copy (entry_bb);
6965 if (!*gsi_last_bb (entry->src)
6966 || !stmt_ends_bb_p (*gsi_last_bb (entry->src)))
6967 switch_bb = entry->src;
6968 else
6969 switch_bb = split_edge (entry);
6970 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6972 gcond *cond_stmt = as_a <gcond *> (*gsi_last_bb (exit->src));
6973 cond_stmt = as_a <gcond *> (gimple_copy (cond_stmt));
6975 gsi = gsi_last_bb (switch_bb);
6976 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6978 sorig = single_succ_edge (switch_bb);
6979 sorig->flags = exits[1]->flags;
6980 sorig->probability = exits[1]->probability;
6981 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6982 snew->probability = exits[0]->probability;
6985 /* Register the new edge from SWITCH_BB in loop exit lists. */
6986 rescan_loop_exit (snew, true, false);
6988 /* Add the PHI node arguments. */
6989 add_phi_args_after_copy (region_copy, n_region, snew);
6991 /* Get rid of now superfluous conditions and associated edges (and phi node
6992 arguments). */
6993 exit_bb = exit->dest;
6995 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6996 PENDING_STMT (e) = NULL;
6998 /* The latch of ORIG_LOOP was copied, and so was the backedge
6999 to the original header. We redirect this backedge to EXIT_BB. */
7000 for (i = 0; i < n_region; i++)
7001 if (get_bb_original (region_copy[i]) == orig_loop->latch)
7003 gcc_assert (single_succ_edge (region_copy[i]));
7004 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
7005 PENDING_STMT (e) = NULL;
7006 copy_phi_arg_into_existing_phi (nexits[0], e);
7008 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
7009 PENDING_STMT (e) = NULL;
7011 /* Anything that is outside of the region, but was dominated by something
7012 inside needs to update dominance info. */
7013 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
7015 if (free_region_copy)
7016 free (region_copy);
7018 free_original_copy_tables ();
7019 return true;
7022 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
7023 adding blocks when the dominator traversal reaches EXIT. This
7024 function silently assumes that ENTRY strictly dominates EXIT. */
7026 void
7027 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
7028 vec<basic_block> *bbs_p)
7030 basic_block son;
7032 for (son = first_dom_son (CDI_DOMINATORS, entry);
7033 son;
7034 son = next_dom_son (CDI_DOMINATORS, son))
7036 bbs_p->safe_push (son);
7037 if (son != exit)
7038 gather_blocks_in_sese_region (son, exit, bbs_p);
7042 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
7043 The duplicates are recorded in VARS_MAP. */
7045 static void
7046 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
7047 tree to_context)
7049 tree t = *tp, new_t;
7050 struct function *f = DECL_STRUCT_FUNCTION (to_context);
7052 if (DECL_CONTEXT (t) == to_context)
7053 return;
7055 bool existed;
7056 tree &loc = vars_map->get_or_insert (t, &existed);
7058 if (!existed)
7060 if (SSA_VAR_P (t))
7062 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
7063 add_local_decl (f, new_t);
7065 else
7067 gcc_assert (TREE_CODE (t) == CONST_DECL);
7068 new_t = copy_node (t);
7070 DECL_CONTEXT (new_t) = to_context;
7072 loc = new_t;
7074 else
7075 new_t = loc;
7077 *tp = new_t;
7081 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7082 VARS_MAP maps old ssa names and var_decls to the new ones. */
7084 static tree
7085 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
7086 tree to_context)
7088 tree new_name;
7090 gcc_assert (!virtual_operand_p (name));
7092 tree *loc = vars_map->get (name);
7094 if (!loc)
7096 tree decl = SSA_NAME_VAR (name);
7097 if (decl)
7099 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
7100 replace_by_duplicate_decl (&decl, vars_map, to_context);
7101 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7102 decl, SSA_NAME_DEF_STMT (name));
7104 else
7105 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7106 name, SSA_NAME_DEF_STMT (name));
7108 /* Now that we've used the def stmt to define new_name, make sure it
7109 doesn't define name anymore. */
7110 SSA_NAME_DEF_STMT (name) = NULL;
7112 vars_map->put (name, new_name);
7114 else
7115 new_name = *loc;
7117 return new_name;
7120 struct move_stmt_d
7122 tree orig_block;
7123 tree new_block;
7124 tree from_context;
7125 tree to_context;
7126 hash_map<tree, tree> *vars_map;
7127 htab_t new_label_map;
7128 hash_map<void *, void *> *eh_map;
7129 bool remap_decls_p;
7132 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7133 contained in *TP if it has been ORIG_BLOCK previously and change the
7134 DECL_CONTEXT of every local variable referenced in *TP. */
7136 static tree
7137 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
7139 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7140 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7141 tree t = *tp;
7143 if (EXPR_P (t))
7145 tree block = TREE_BLOCK (t);
7146 if (block == NULL_TREE)
7148 else if (block == p->orig_block
7149 || p->orig_block == NULL_TREE)
7151 /* tree_node_can_be_shared says we can share invariant
7152 addresses but unshare_expr copies them anyways. Make sure
7153 to unshare before adjusting the block in place - we do not
7154 always see a copy here. */
7155 if (TREE_CODE (t) == ADDR_EXPR
7156 && is_gimple_min_invariant (t))
7157 *tp = t = unshare_expr (t);
7158 TREE_SET_BLOCK (t, p->new_block);
7160 else if (flag_checking)
7162 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7163 block = BLOCK_SUPERCONTEXT (block);
7164 gcc_assert (block == p->orig_block);
7167 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7169 if (TREE_CODE (t) == SSA_NAME)
7170 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7171 else if (TREE_CODE (t) == PARM_DECL
7172 && gimple_in_ssa_p (cfun))
7173 *tp = *(p->vars_map->get (t));
7174 else if (TREE_CODE (t) == LABEL_DECL)
7176 if (p->new_label_map)
7178 struct tree_map in, *out;
7179 in.base.from = t;
7180 out = (struct tree_map *)
7181 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7182 if (out)
7183 *tp = t = out->to;
7186 /* For FORCED_LABELs we can end up with references from other
7187 functions if some SESE regions are outlined. It is UB to
7188 jump in between them, but they could be used just for printing
7189 addresses etc. In that case, DECL_CONTEXT on the label should
7190 be the function containing the glabel stmt with that LABEL_DECL,
7191 rather than whatever function a reference to the label was seen
7192 last time. */
7193 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7194 DECL_CONTEXT (t) = p->to_context;
7196 else if (p->remap_decls_p)
7198 /* Replace T with its duplicate. T should no longer appear in the
7199 parent function, so this looks wasteful; however, it may appear
7200 in referenced_vars, and more importantly, as virtual operands of
7201 statements, and in alias lists of other variables. It would be
7202 quite difficult to expunge it from all those places. ??? It might
7203 suffice to do this for addressable variables. */
7204 if ((VAR_P (t) && !is_global_var (t))
7205 || TREE_CODE (t) == CONST_DECL)
7206 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7208 *walk_subtrees = 0;
7210 else if (TYPE_P (t))
7211 *walk_subtrees = 0;
7213 return NULL_TREE;
7216 /* Helper for move_stmt_r. Given an EH region number for the source
7217 function, map that to the duplicate EH regio number in the dest. */
7219 static int
7220 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7222 eh_region old_r, new_r;
7224 old_r = get_eh_region_from_number (old_nr);
7225 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7227 return new_r->index;
7230 /* Similar, but operate on INTEGER_CSTs. */
7232 static tree
7233 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7235 int old_nr, new_nr;
7237 old_nr = tree_to_shwi (old_t_nr);
7238 new_nr = move_stmt_eh_region_nr (old_nr, p);
7240 return build_int_cst (integer_type_node, new_nr);
7243 /* Like move_stmt_op, but for gimple statements.
7245 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7246 contained in the current statement in *GSI_P and change the
7247 DECL_CONTEXT of every local variable referenced in the current
7248 statement. */
7250 static tree
7251 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7252 struct walk_stmt_info *wi)
7254 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7255 gimple *stmt = gsi_stmt (*gsi_p);
7256 tree block = gimple_block (stmt);
7258 if (block == p->orig_block
7259 || (p->orig_block == NULL_TREE
7260 && block != NULL_TREE))
7261 gimple_set_block (stmt, p->new_block);
7263 switch (gimple_code (stmt))
7265 case GIMPLE_CALL:
7266 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7268 tree r, fndecl = gimple_call_fndecl (stmt);
7269 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7270 switch (DECL_FUNCTION_CODE (fndecl))
7272 case BUILT_IN_EH_COPY_VALUES:
7273 r = gimple_call_arg (stmt, 1);
7274 r = move_stmt_eh_region_tree_nr (r, p);
7275 gimple_call_set_arg (stmt, 1, r);
7276 /* FALLTHRU */
7278 case BUILT_IN_EH_POINTER:
7279 case BUILT_IN_EH_FILTER:
7280 r = gimple_call_arg (stmt, 0);
7281 r = move_stmt_eh_region_tree_nr (r, p);
7282 gimple_call_set_arg (stmt, 0, r);
7283 break;
7285 default:
7286 break;
7289 break;
7291 case GIMPLE_RESX:
7293 gresx *resx_stmt = as_a <gresx *> (stmt);
7294 int r = gimple_resx_region (resx_stmt);
7295 r = move_stmt_eh_region_nr (r, p);
7296 gimple_resx_set_region (resx_stmt, r);
7298 break;
7300 case GIMPLE_EH_DISPATCH:
7302 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7303 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7304 r = move_stmt_eh_region_nr (r, p);
7305 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7307 break;
7309 case GIMPLE_OMP_RETURN:
7310 case GIMPLE_OMP_CONTINUE:
7311 break;
7313 case GIMPLE_LABEL:
7315 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7316 so that such labels can be referenced from other regions.
7317 Make sure to update it when seeing a GIMPLE_LABEL though,
7318 that is the owner of the label. */
7319 walk_gimple_op (stmt, move_stmt_op, wi);
7320 *handled_ops_p = true;
7321 tree label = gimple_label_label (as_a <glabel *> (stmt));
7322 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7323 DECL_CONTEXT (label) = p->to_context;
7325 break;
7327 default:
7328 if (is_gimple_omp (stmt))
7330 /* Do not remap variables inside OMP directives. Variables
7331 referenced in clauses and directive header belong to the
7332 parent function and should not be moved into the child
7333 function. */
7334 bool save_remap_decls_p = p->remap_decls_p;
7335 p->remap_decls_p = false;
7336 *handled_ops_p = true;
7338 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7339 move_stmt_op, wi);
7341 p->remap_decls_p = save_remap_decls_p;
7343 break;
7346 return NULL_TREE;
7349 /* Move basic block BB from function CFUN to function DEST_FN. The
7350 block is moved out of the original linked list and placed after
7351 block AFTER in the new list. Also, the block is removed from the
7352 original array of blocks and placed in DEST_FN's array of blocks.
7353 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7354 updated to reflect the moved edges.
7356 The local variables are remapped to new instances, VARS_MAP is used
7357 to record the mapping. */
7359 static void
7360 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7361 basic_block after, bool update_edge_count_p,
7362 struct move_stmt_d *d)
7364 struct control_flow_graph *cfg;
7365 edge_iterator ei;
7366 edge e;
7367 gimple_stmt_iterator si;
7368 unsigned old_len;
7370 /* Remove BB from dominance structures. */
7371 delete_from_dominance_info (CDI_DOMINATORS, bb);
7373 /* Move BB from its current loop to the copy in the new function. */
7374 if (current_loops)
7376 class loop *new_loop = (class loop *)bb->loop_father->aux;
7377 if (new_loop)
7378 bb->loop_father = new_loop;
7381 /* Link BB to the new linked list. */
7382 move_block_after (bb, after);
7384 /* Update the edge count in the corresponding flowgraphs. */
7385 if (update_edge_count_p)
7386 FOR_EACH_EDGE (e, ei, bb->succs)
7388 cfun->cfg->x_n_edges--;
7389 dest_cfun->cfg->x_n_edges++;
7392 /* Remove BB from the original basic block array. */
7393 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7394 cfun->cfg->x_n_basic_blocks--;
7396 /* Grow DEST_CFUN's basic block array if needed. */
7397 cfg = dest_cfun->cfg;
7398 cfg->x_n_basic_blocks++;
7399 if (bb->index >= cfg->x_last_basic_block)
7400 cfg->x_last_basic_block = bb->index + 1;
7402 old_len = vec_safe_length (cfg->x_basic_block_info);
7403 if ((unsigned) cfg->x_last_basic_block >= old_len)
7404 vec_safe_grow_cleared (cfg->x_basic_block_info,
7405 cfg->x_last_basic_block + 1);
7407 (*cfg->x_basic_block_info)[bb->index] = bb;
7409 /* Remap the variables in phi nodes. */
7410 for (gphi_iterator psi = gsi_start_phis (bb);
7411 !gsi_end_p (psi); )
7413 gphi *phi = psi.phi ();
7414 use_operand_p use;
7415 tree op = PHI_RESULT (phi);
7416 ssa_op_iter oi;
7417 unsigned i;
7419 if (virtual_operand_p (op))
7421 /* Remove the phi nodes for virtual operands (alias analysis will be
7422 run for the new function, anyway). But replace all uses that
7423 might be outside of the region we move. */
7424 use_operand_p use_p;
7425 imm_use_iterator iter;
7426 gimple *use_stmt;
7427 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7428 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7429 SET_USE (use_p, SSA_NAME_VAR (op));
7430 remove_phi_node (&psi, true);
7431 continue;
7434 SET_PHI_RESULT (phi,
7435 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7436 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7438 op = USE_FROM_PTR (use);
7439 if (TREE_CODE (op) == SSA_NAME)
7440 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7443 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7445 location_t locus = gimple_phi_arg_location (phi, i);
7446 tree block = LOCATION_BLOCK (locus);
7448 if (locus == UNKNOWN_LOCATION)
7449 continue;
7450 if (d->orig_block == NULL_TREE || block == d->orig_block)
7452 locus = set_block (locus, d->new_block);
7453 gimple_phi_arg_set_location (phi, i, locus);
7457 gsi_next (&psi);
7460 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7462 gimple *stmt = gsi_stmt (si);
7463 struct walk_stmt_info wi;
7465 memset (&wi, 0, sizeof (wi));
7466 wi.info = d;
7467 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7469 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7471 tree label = gimple_label_label (label_stmt);
7472 int uid = LABEL_DECL_UID (label);
7474 gcc_assert (uid > -1);
7476 old_len = vec_safe_length (cfg->x_label_to_block_map);
7477 if (old_len <= (unsigned) uid)
7478 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7480 (*cfg->x_label_to_block_map)[uid] = bb;
7481 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7483 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7485 if (uid >= dest_cfun->cfg->last_label_uid)
7486 dest_cfun->cfg->last_label_uid = uid + 1;
7489 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7490 remove_stmt_from_eh_lp_fn (cfun, stmt);
7492 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7493 gimple_remove_stmt_histograms (cfun, stmt);
7495 /* We cannot leave any operands allocated from the operand caches of
7496 the current function. */
7497 free_stmt_operands (cfun, stmt);
7498 push_cfun (dest_cfun);
7499 update_stmt (stmt);
7500 if (is_gimple_call (stmt))
7501 notice_special_calls (as_a <gcall *> (stmt));
7502 pop_cfun ();
7505 FOR_EACH_EDGE (e, ei, bb->succs)
7506 if (e->goto_locus != UNKNOWN_LOCATION)
7508 tree block = LOCATION_BLOCK (e->goto_locus);
7509 if (d->orig_block == NULL_TREE
7510 || block == d->orig_block)
7511 e->goto_locus = set_block (e->goto_locus, d->new_block);
7515 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7516 the outermost EH region. Use REGION as the incoming base EH region.
7517 If there is no single outermost region, return NULL and set *ALL to
7518 true. */
7520 static eh_region
7521 find_outermost_region_in_block (struct function *src_cfun,
7522 basic_block bb, eh_region region,
7523 bool *all)
7525 gimple_stmt_iterator si;
7527 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7529 gimple *stmt = gsi_stmt (si);
7530 eh_region stmt_region;
7531 int lp_nr;
7533 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7534 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7535 if (stmt_region)
7537 if (region == NULL)
7538 region = stmt_region;
7539 else if (stmt_region != region)
7541 region = eh_region_outermost (src_cfun, stmt_region, region);
7542 if (region == NULL)
7544 *all = true;
7545 return NULL;
7551 return region;
7554 static tree
7555 new_label_mapper (tree decl, void *data)
7557 htab_t hash = (htab_t) data;
7558 struct tree_map *m;
7559 void **slot;
7561 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7563 m = XNEW (struct tree_map);
7564 m->hash = DECL_UID (decl);
7565 m->base.from = decl;
7566 m->to = create_artificial_label (UNKNOWN_LOCATION);
7567 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7568 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7569 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7571 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7572 gcc_assert (*slot == NULL);
7574 *slot = m;
7576 return m->to;
7579 /* Tree walker to replace the decls used inside value expressions by
7580 duplicates. */
7582 static tree
7583 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7585 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7587 switch (TREE_CODE (*tp))
7589 case VAR_DECL:
7590 case PARM_DECL:
7591 case RESULT_DECL:
7592 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7593 break;
7594 default:
7595 break;
7598 if (IS_TYPE_OR_DECL_P (*tp))
7599 *walk_subtrees = false;
7601 return NULL;
7604 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7605 subblocks. */
7607 static void
7608 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7609 tree to_context)
7611 tree *tp, t;
7613 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7615 t = *tp;
7616 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7617 continue;
7618 replace_by_duplicate_decl (&t, vars_map, to_context);
7619 if (t != *tp)
7621 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7623 tree x = DECL_VALUE_EXPR (*tp);
7624 struct replace_decls_d rd = { vars_map, to_context };
7625 unshare_expr (x);
7626 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7627 SET_DECL_VALUE_EXPR (t, x);
7628 DECL_HAS_VALUE_EXPR_P (t) = 1;
7630 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7631 *tp = t;
7635 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7636 replace_block_vars_by_duplicates (block, vars_map, to_context);
7639 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7640 from FN1 to FN2. */
7642 static void
7643 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7644 class loop *loop)
7646 /* Discard it from the old loop array. */
7647 (*get_loops (fn1))[loop->num] = NULL;
7649 /* Place it in the new loop array, assigning it a new number. */
7650 loop->num = number_of_loops (fn2);
7651 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7653 /* Recurse to children. */
7654 for (loop = loop->inner; loop; loop = loop->next)
7655 fixup_loop_arrays_after_move (fn1, fn2, loop);
7658 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7659 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7661 DEBUG_FUNCTION void
7662 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7664 basic_block bb;
7665 edge_iterator ei;
7666 edge e;
7667 bitmap bbs = BITMAP_ALLOC (NULL);
7668 int i;
7670 gcc_assert (entry != NULL);
7671 gcc_assert (entry != exit);
7672 gcc_assert (bbs_p != NULL);
7674 gcc_assert (bbs_p->length () > 0);
7676 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7677 bitmap_set_bit (bbs, bb->index);
7679 gcc_assert (bitmap_bit_p (bbs, entry->index));
7680 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7682 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7684 if (bb == entry)
7686 gcc_assert (single_pred_p (entry));
7687 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7689 else
7690 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7692 e = ei_edge (ei);
7693 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7696 if (bb == exit)
7698 gcc_assert (single_succ_p (exit));
7699 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7701 else
7702 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7704 e = ei_edge (ei);
7705 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7709 BITMAP_FREE (bbs);
7712 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7714 bool
7715 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7717 bitmap release_names = (bitmap)data;
7719 if (TREE_CODE (from) != SSA_NAME)
7720 return true;
7722 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7723 return true;
7726 /* Return LOOP_DIST_ALIAS call if present in BB. */
7728 static gimple *
7729 find_loop_dist_alias (basic_block bb)
7731 gimple_stmt_iterator gsi = gsi_last_bb (bb);
7732 if (!safe_is_a <gcond *> (*gsi))
7733 return NULL;
7735 gsi_prev (&gsi);
7736 if (gsi_end_p (gsi))
7737 return NULL;
7739 gimple *g = gsi_stmt (gsi);
7740 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7741 return g;
7742 return NULL;
7745 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7746 to VALUE and update any immediate uses of it's LHS. */
7748 void
7749 fold_loop_internal_call (gimple *g, tree value)
7751 tree lhs = gimple_call_lhs (g);
7752 use_operand_p use_p;
7753 imm_use_iterator iter;
7754 gimple *use_stmt;
7755 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7757 replace_call_with_value (&gsi, value);
7758 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7760 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7761 SET_USE (use_p, value);
7762 update_stmt (use_stmt);
7763 /* If we turn conditional to constant, scale profile counts.
7764 We know that the conditional was created by loop distribution
7765 and all basic blocks dominated by the taken edge are part of
7766 the loop distributed. */
7767 if (gimple_code (use_stmt) == GIMPLE_COND)
7769 edge true_edge, false_edge;
7770 extract_true_false_edges_from_block (gimple_bb (use_stmt),
7771 &true_edge, &false_edge);
7772 edge taken_edge = NULL, other_edge = NULL;
7773 if (gimple_cond_true_p (as_a <gcond *>(use_stmt)))
7775 taken_edge = true_edge;
7776 other_edge = false_edge;
7778 else if (gimple_cond_false_p (as_a <gcond *>(use_stmt)))
7780 taken_edge = false_edge;
7781 other_edge = true_edge;
7783 if (taken_edge
7784 && !(taken_edge->probability == profile_probability::always ()))
7786 profile_count old_count = taken_edge->count ();
7787 profile_count new_count = taken_edge->src->count;
7788 taken_edge->probability = profile_probability::always ();
7789 other_edge->probability = profile_probability::never ();
7790 /* If we have multiple predecessors, we can't use the dominance
7791 test. This should not happen as the guarded code should
7792 start with pre-header. */
7793 gcc_assert (single_pred_edge (taken_edge->dest));
7794 if (old_count.nonzero_p ())
7796 taken_edge->dest->count
7797 = taken_edge->dest->count.apply_scale (new_count,
7798 old_count);
7799 scale_strictly_dominated_blocks (taken_edge->dest,
7800 new_count, old_count);
7807 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7808 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7809 single basic block in the original CFG and the new basic block is
7810 returned. DEST_CFUN must not have a CFG yet.
7812 Note that the region need not be a pure SESE region. Blocks inside
7813 the region may contain calls to abort/exit. The only restriction
7814 is that ENTRY_BB should be the only entry point and it must
7815 dominate EXIT_BB.
7817 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7818 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7819 to the new function.
7821 All local variables referenced in the region are assumed to be in
7822 the corresponding BLOCK_VARS and unexpanded variable lists
7823 associated with DEST_CFUN.
7825 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7826 reimplement move_sese_region_to_fn by duplicating the region rather than
7827 moving it. */
7829 basic_block
7830 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7831 basic_block exit_bb, tree orig_block)
7833 vec<basic_block> bbs;
7834 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7835 basic_block after, bb, *entry_pred, *exit_succ, abb;
7836 struct function *saved_cfun = cfun;
7837 int *entry_flag, *exit_flag;
7838 profile_probability *entry_prob, *exit_prob;
7839 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7840 edge e;
7841 edge_iterator ei;
7842 htab_t new_label_map;
7843 hash_map<void *, void *> *eh_map;
7844 class loop *loop = entry_bb->loop_father;
7845 class loop *loop0 = get_loop (saved_cfun, 0);
7846 struct move_stmt_d d;
7848 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7849 region. */
7850 gcc_assert (entry_bb != exit_bb
7851 && (!exit_bb
7852 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7854 /* Collect all the blocks in the region. Manually add ENTRY_BB
7855 because it won't be added by dfs_enumerate_from. */
7856 bbs.create (0);
7857 bbs.safe_push (entry_bb);
7858 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7860 if (flag_checking)
7861 verify_sese (entry_bb, exit_bb, &bbs);
7863 /* The blocks that used to be dominated by something in BBS will now be
7864 dominated by the new block. */
7865 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7866 bbs.address (),
7867 bbs.length ());
7869 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7870 the predecessor edges to ENTRY_BB and the successor edges to
7871 EXIT_BB so that we can re-attach them to the new basic block that
7872 will replace the region. */
7873 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7874 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7875 entry_flag = XNEWVEC (int, num_entry_edges);
7876 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7877 i = 0;
7878 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7880 entry_prob[i] = e->probability;
7881 entry_flag[i] = e->flags;
7882 entry_pred[i++] = e->src;
7883 remove_edge (e);
7886 if (exit_bb)
7888 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7889 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7890 exit_flag = XNEWVEC (int, num_exit_edges);
7891 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7892 i = 0;
7893 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7895 exit_prob[i] = e->probability;
7896 exit_flag[i] = e->flags;
7897 exit_succ[i++] = e->dest;
7898 remove_edge (e);
7901 else
7903 num_exit_edges = 0;
7904 exit_succ = NULL;
7905 exit_flag = NULL;
7906 exit_prob = NULL;
7909 /* Switch context to the child function to initialize DEST_FN's CFG. */
7910 gcc_assert (dest_cfun->cfg == NULL);
7911 push_cfun (dest_cfun);
7913 init_empty_tree_cfg ();
7915 /* Initialize EH information for the new function. */
7916 eh_map = NULL;
7917 new_label_map = NULL;
7918 if (saved_cfun->eh)
7920 eh_region region = NULL;
7921 bool all = false;
7923 FOR_EACH_VEC_ELT (bbs, i, bb)
7925 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7926 if (all)
7927 break;
7930 init_eh_for_function ();
7931 if (region != NULL || all)
7933 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7934 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7935 new_label_mapper, new_label_map);
7939 /* Initialize an empty loop tree. */
7940 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7941 init_loops_structure (dest_cfun, loops, 1);
7942 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7943 set_loops_for_fn (dest_cfun, loops);
7945 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7947 /* Move the outlined loop tree part. */
7948 num_nodes = bbs.length ();
7949 FOR_EACH_VEC_ELT (bbs, i, bb)
7951 if (bb->loop_father->header == bb)
7953 class loop *this_loop = bb->loop_father;
7954 /* Avoid the need to remap SSA names used in nb_iterations. */
7955 free_numbers_of_iterations_estimates (this_loop);
7956 class loop *outer = loop_outer (this_loop);
7957 if (outer == loop
7958 /* If the SESE region contains some bbs ending with
7959 a noreturn call, those are considered to belong
7960 to the outermost loop in saved_cfun, rather than
7961 the entry_bb's loop_father. */
7962 || outer == loop0)
7964 if (outer != loop)
7965 num_nodes -= this_loop->num_nodes;
7966 flow_loop_tree_node_remove (bb->loop_father);
7967 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7968 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7971 else if (bb->loop_father == loop0 && loop0 != loop)
7972 num_nodes--;
7974 /* Remove loop exits from the outlined region. */
7975 if (loops_for_fn (saved_cfun)->exits)
7976 FOR_EACH_EDGE (e, ei, bb->succs)
7978 struct loops *l = loops_for_fn (saved_cfun);
7979 loop_exit **slot
7980 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7981 NO_INSERT);
7982 if (slot)
7983 l->exits->clear_slot (slot);
7987 /* Adjust the number of blocks in the tree root of the outlined part. */
7988 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7990 /* Setup a mapping to be used by move_block_to_fn. */
7991 loop->aux = current_loops->tree_root;
7992 loop0->aux = current_loops->tree_root;
7994 /* Fix up orig_loop_num. If the block referenced in it has been moved
7995 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7996 signed char *moved_orig_loop_num = NULL;
7997 for (auto dloop : loops_list (dest_cfun, 0))
7998 if (dloop->orig_loop_num)
8000 if (moved_orig_loop_num == NULL)
8001 moved_orig_loop_num
8002 = XCNEWVEC (signed char, vec_safe_length (larray));
8003 if ((*larray)[dloop->orig_loop_num] != NULL
8004 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
8006 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
8007 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
8008 moved_orig_loop_num[dloop->orig_loop_num]++;
8009 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
8011 else
8013 moved_orig_loop_num[dloop->orig_loop_num] = -1;
8014 dloop->orig_loop_num = 0;
8017 pop_cfun ();
8019 if (moved_orig_loop_num)
8021 FOR_EACH_VEC_ELT (bbs, i, bb)
8023 gimple *g = find_loop_dist_alias (bb);
8024 if (g == NULL)
8025 continue;
8027 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
8028 gcc_assert (orig_loop_num
8029 && (unsigned) orig_loop_num < vec_safe_length (larray));
8030 if (moved_orig_loop_num[orig_loop_num] == 2)
8032 /* If we have moved both loops with this orig_loop_num into
8033 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
8034 too, update the first argument. */
8035 gcc_assert ((*larray)[orig_loop_num] != NULL
8036 && (get_loop (saved_cfun, orig_loop_num) == NULL));
8037 tree t = build_int_cst (integer_type_node,
8038 (*larray)[orig_loop_num]->num);
8039 gimple_call_set_arg (g, 0, t);
8040 update_stmt (g);
8041 /* Make sure the following loop will not update it. */
8042 moved_orig_loop_num[orig_loop_num] = 0;
8044 else
8045 /* Otherwise at least one of the loops stayed in saved_cfun.
8046 Remove the LOOP_DIST_ALIAS call. */
8047 fold_loop_internal_call (g, gimple_call_arg (g, 1));
8049 FOR_EACH_BB_FN (bb, saved_cfun)
8051 gimple *g = find_loop_dist_alias (bb);
8052 if (g == NULL)
8053 continue;
8054 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
8055 gcc_assert (orig_loop_num
8056 && (unsigned) orig_loop_num < vec_safe_length (larray));
8057 if (moved_orig_loop_num[orig_loop_num])
8058 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
8059 of the corresponding loops was moved, remove it. */
8060 fold_loop_internal_call (g, gimple_call_arg (g, 1));
8062 XDELETEVEC (moved_orig_loop_num);
8064 ggc_free (larray);
8066 /* Move blocks from BBS into DEST_CFUN. */
8067 gcc_assert (bbs.length () >= 2);
8068 after = dest_cfun->cfg->x_entry_block_ptr;
8069 hash_map<tree, tree> vars_map;
8071 memset (&d, 0, sizeof (d));
8072 d.orig_block = orig_block;
8073 d.new_block = DECL_INITIAL (dest_cfun->decl);
8074 d.from_context = cfun->decl;
8075 d.to_context = dest_cfun->decl;
8076 d.vars_map = &vars_map;
8077 d.new_label_map = new_label_map;
8078 d.eh_map = eh_map;
8079 d.remap_decls_p = true;
8081 if (gimple_in_ssa_p (cfun))
8082 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
8084 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
8085 set_ssa_default_def (dest_cfun, arg, narg);
8086 vars_map.put (arg, narg);
8089 FOR_EACH_VEC_ELT (bbs, i, bb)
8091 /* No need to update edge counts on the last block. It has
8092 already been updated earlier when we detached the region from
8093 the original CFG. */
8094 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
8095 after = bb;
8098 /* Adjust the maximum clique used. */
8099 dest_cfun->last_clique = saved_cfun->last_clique;
8101 loop->aux = NULL;
8102 loop0->aux = NULL;
8103 /* Loop sizes are no longer correct, fix them up. */
8104 loop->num_nodes -= num_nodes;
8105 for (class loop *outer = loop_outer (loop);
8106 outer; outer = loop_outer (outer))
8107 outer->num_nodes -= num_nodes;
8108 loop0->num_nodes -= bbs.length () - num_nodes;
8110 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
8112 class loop *aloop;
8113 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
8114 if (aloop != NULL)
8116 if (aloop->simduid)
8118 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
8119 d.to_context);
8120 dest_cfun->has_simduid_loops = true;
8122 if (aloop->force_vectorize)
8123 dest_cfun->has_force_vectorize_loops = true;
8127 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8128 if (orig_block)
8130 tree block;
8131 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8132 == NULL_TREE);
8133 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8134 = BLOCK_SUBBLOCKS (orig_block);
8135 for (block = BLOCK_SUBBLOCKS (orig_block);
8136 block; block = BLOCK_CHAIN (block))
8137 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
8138 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
8141 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
8142 &vars_map, dest_cfun->decl);
8144 if (new_label_map)
8145 htab_delete (new_label_map);
8146 if (eh_map)
8147 delete eh_map;
8149 /* We need to release ssa-names in a defined order, so first find them,
8150 and then iterate in ascending version order. */
8151 bitmap release_names = BITMAP_ALLOC (NULL);
8152 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
8153 bitmap_iterator bi;
8154 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
8155 release_ssa_name (ssa_name (i));
8156 BITMAP_FREE (release_names);
8158 /* Rewire the entry and exit blocks. The successor to the entry
8159 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8160 the child function. Similarly, the predecessor of DEST_FN's
8161 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8162 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8163 various CFG manipulation function get to the right CFG.
8165 FIXME, this is silly. The CFG ought to become a parameter to
8166 these helpers. */
8167 push_cfun (dest_cfun);
8168 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
8169 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
8170 if (exit_bb)
8172 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
8173 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
8175 else
8176 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
8177 pop_cfun ();
8179 /* Back in the original function, the SESE region has disappeared,
8180 create a new basic block in its place. */
8181 bb = create_empty_bb (entry_pred[0]);
8182 if (current_loops)
8183 add_bb_to_loop (bb, loop);
8184 profile_count count = profile_count::zero ();
8185 for (i = 0; i < num_entry_edges; i++)
8187 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8188 e->probability = entry_prob[i];
8189 count += e->count ();
8191 bb->count = count;
8193 for (i = 0; i < num_exit_edges; i++)
8195 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8196 e->probability = exit_prob[i];
8199 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8200 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8201 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8203 if (exit_bb)
8205 free (exit_prob);
8206 free (exit_flag);
8207 free (exit_succ);
8209 free (entry_prob);
8210 free (entry_flag);
8211 free (entry_pred);
8212 bbs.release ();
8214 return bb;
8217 /* Dump default def DEF to file FILE using FLAGS and indentation
8218 SPC. */
8220 static void
8221 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8223 for (int i = 0; i < spc; ++i)
8224 fprintf (file, " ");
8225 dump_ssaname_info_to_file (file, def, spc);
8227 print_generic_expr (file, TREE_TYPE (def), flags);
8228 fprintf (file, " ");
8229 print_generic_expr (file, def, flags);
8230 fprintf (file, " = ");
8231 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8232 fprintf (file, ";\n");
8235 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8237 static void
8238 print_no_sanitize_attr_value (FILE *file, tree value)
8240 unsigned int flags = tree_to_uhwi (value);
8241 bool first = true;
8242 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8244 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8246 if (!first)
8247 fprintf (file, " | ");
8248 fprintf (file, "%s", sanitizer_opts[i].name);
8249 first = false;
8254 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8257 void
8258 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8260 tree arg, var, old_current_fndecl = current_function_decl;
8261 struct function *dsf;
8262 bool ignore_topmost_bind = false, any_var = false;
8263 basic_block bb;
8264 tree chain;
8265 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8266 && decl_is_tm_clone (fndecl));
8267 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8269 tree fntype = TREE_TYPE (fndecl);
8270 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8272 for (int i = 0; i != 2; ++i)
8274 if (!attrs[i])
8275 continue;
8277 fprintf (file, "__attribute__((");
8279 bool first = true;
8280 tree chain;
8281 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8283 if (!first)
8284 fprintf (file, ", ");
8286 tree name = get_attribute_name (chain);
8287 print_generic_expr (file, name, dump_flags);
8288 if (TREE_VALUE (chain) != NULL_TREE)
8290 fprintf (file, " (");
8292 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8293 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8294 else if (!strcmp (IDENTIFIER_POINTER (name),
8295 "omp declare variant base"))
8297 tree a = TREE_VALUE (chain);
8298 print_generic_expr (file, TREE_PURPOSE (a), dump_flags);
8299 fprintf (file, " match ");
8300 print_omp_context_selector (file, TREE_VALUE (a),
8301 dump_flags);
8303 else
8304 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8305 fprintf (file, ")");
8309 fprintf (file, "))\n");
8312 current_function_decl = fndecl;
8313 if (flags & TDF_GIMPLE)
8315 static bool hotness_bb_param_printed = false;
8316 if (profile_info != NULL
8317 && !hotness_bb_param_printed)
8319 hotness_bb_param_printed = true;
8320 fprintf (file,
8321 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8322 " */\n", get_hot_bb_threshold ());
8325 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8326 dump_flags | TDF_SLIM);
8327 fprintf (file, " __GIMPLE (%s",
8328 (fun->curr_properties & PROP_ssa) ? "ssa"
8329 : (fun->curr_properties & PROP_cfg) ? "cfg"
8330 : "");
8332 if (fun && fun->cfg)
8334 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8335 if (bb->count.initialized_p ())
8336 fprintf (file, ",%s(%" PRIu64 ")",
8337 profile_quality_as_string (bb->count.quality ()),
8338 bb->count.value ());
8339 if (dump_flags & TDF_UID)
8340 fprintf (file, ")\n%sD_%u (", function_name (fun),
8341 DECL_UID (fndecl));
8342 else
8343 fprintf (file, ")\n%s (", function_name (fun));
8346 else
8348 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8349 if (dump_flags & TDF_UID)
8350 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8351 tmclone ? "[tm-clone] " : "");
8352 else
8353 fprintf (file, " %s %s(", function_name (fun),
8354 tmclone ? "[tm-clone] " : "");
8357 arg = DECL_ARGUMENTS (fndecl);
8358 while (arg)
8360 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8361 fprintf (file, " ");
8362 print_generic_expr (file, arg, dump_flags);
8363 if (DECL_CHAIN (arg))
8364 fprintf (file, ", ");
8365 arg = DECL_CHAIN (arg);
8367 fprintf (file, ")\n");
8369 dsf = DECL_STRUCT_FUNCTION (fndecl);
8370 if (dsf && (flags & TDF_EH))
8371 dump_eh_tree (file, dsf);
8373 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8375 dump_node (fndecl, TDF_SLIM | flags, file);
8376 current_function_decl = old_current_fndecl;
8377 return;
8380 /* When GIMPLE is lowered, the variables are no longer available in
8381 BIND_EXPRs, so display them separately. */
8382 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8384 unsigned ix;
8385 ignore_topmost_bind = true;
8387 fprintf (file, "{\n");
8388 if (gimple_in_ssa_p (fun)
8389 && (flags & TDF_ALIAS))
8391 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8392 arg = DECL_CHAIN (arg))
8394 tree def = ssa_default_def (fun, arg);
8395 if (def)
8396 dump_default_def (file, def, 2, flags);
8399 tree res = DECL_RESULT (fun->decl);
8400 if (res != NULL_TREE
8401 && DECL_BY_REFERENCE (res))
8403 tree def = ssa_default_def (fun, res);
8404 if (def)
8405 dump_default_def (file, def, 2, flags);
8408 tree static_chain = fun->static_chain_decl;
8409 if (static_chain != NULL_TREE)
8411 tree def = ssa_default_def (fun, static_chain);
8412 if (def)
8413 dump_default_def (file, def, 2, flags);
8417 if (!vec_safe_is_empty (fun->local_decls))
8418 FOR_EACH_LOCAL_DECL (fun, ix, var)
8420 print_generic_decl (file, var, flags);
8421 fprintf (file, "\n");
8423 any_var = true;
8426 tree name;
8428 if (gimple_in_ssa_p (fun))
8429 FOR_EACH_SSA_NAME (ix, name, fun)
8431 if (!SSA_NAME_VAR (name)
8432 /* SSA name with decls without a name still get
8433 dumped as _N, list those explicitely as well even
8434 though we've dumped the decl declaration as D.xxx
8435 above. */
8436 || !SSA_NAME_IDENTIFIER (name))
8438 fprintf (file, " ");
8439 print_generic_expr (file, TREE_TYPE (name), flags);
8440 fprintf (file, " ");
8441 print_generic_expr (file, name, flags);
8442 fprintf (file, ";\n");
8444 any_var = true;
8449 if (fun && fun->decl == fndecl
8450 && fun->cfg
8451 && basic_block_info_for_fn (fun))
8453 /* If the CFG has been built, emit a CFG-based dump. */
8454 if (!ignore_topmost_bind)
8455 fprintf (file, "{\n");
8457 if (any_var && n_basic_blocks_for_fn (fun))
8458 fprintf (file, "\n");
8460 FOR_EACH_BB_FN (bb, fun)
8461 dump_bb (file, bb, 2, flags);
8463 fprintf (file, "}\n");
8465 else if (fun && (fun->curr_properties & PROP_gimple_any))
8467 /* The function is now in GIMPLE form but the CFG has not been
8468 built yet. Emit the single sequence of GIMPLE statements
8469 that make up its body. */
8470 gimple_seq body = gimple_body (fndecl);
8472 if (gimple_seq_first_stmt (body)
8473 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8474 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8475 print_gimple_seq (file, body, 0, flags);
8476 else
8478 if (!ignore_topmost_bind)
8479 fprintf (file, "{\n");
8481 if (any_var)
8482 fprintf (file, "\n");
8484 print_gimple_seq (file, body, 2, flags);
8485 fprintf (file, "}\n");
8488 else
8490 int indent;
8492 /* Make a tree based dump. */
8493 chain = DECL_SAVED_TREE (fndecl);
8494 if (chain && TREE_CODE (chain) == BIND_EXPR)
8496 if (ignore_topmost_bind)
8498 chain = BIND_EXPR_BODY (chain);
8499 indent = 2;
8501 else
8502 indent = 0;
8504 else
8506 if (!ignore_topmost_bind)
8508 fprintf (file, "{\n");
8509 /* No topmost bind, pretend it's ignored for later. */
8510 ignore_topmost_bind = true;
8512 indent = 2;
8515 if (any_var)
8516 fprintf (file, "\n");
8518 print_generic_stmt_indented (file, chain, flags, indent);
8519 if (ignore_topmost_bind)
8520 fprintf (file, "}\n");
8523 if (flags & TDF_ENUMERATE_LOCALS)
8524 dump_enumerated_decls (file, flags);
8525 fprintf (file, "\n\n");
8527 current_function_decl = old_current_fndecl;
8530 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8532 DEBUG_FUNCTION void
8533 debug_function (tree fn, dump_flags_t flags)
8535 dump_function_to_file (fn, stderr, flags);
8539 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8541 static void
8542 print_pred_bbs (FILE *file, basic_block bb)
8544 edge e;
8545 edge_iterator ei;
8547 FOR_EACH_EDGE (e, ei, bb->preds)
8548 fprintf (file, "bb_%d ", e->src->index);
8552 /* Print on FILE the indexes for the successors of basic_block BB. */
8554 static void
8555 print_succ_bbs (FILE *file, basic_block bb)
8557 edge e;
8558 edge_iterator ei;
8560 FOR_EACH_EDGE (e, ei, bb->succs)
8561 fprintf (file, "bb_%d ", e->dest->index);
8564 /* Print to FILE the basic block BB following the VERBOSITY level. */
8566 void
8567 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8569 char *s_indent = (char *) alloca ((size_t) indent + 1);
8570 memset ((void *) s_indent, ' ', (size_t) indent);
8571 s_indent[indent] = '\0';
8573 /* Print basic_block's header. */
8574 if (verbosity >= 2)
8576 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8577 print_pred_bbs (file, bb);
8578 fprintf (file, "}, succs = {");
8579 print_succ_bbs (file, bb);
8580 fprintf (file, "})\n");
8583 /* Print basic_block's body. */
8584 if (verbosity >= 3)
8586 fprintf (file, "%s {\n", s_indent);
8587 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8588 fprintf (file, "%s }\n", s_indent);
8592 /* Print loop information. */
8594 void
8595 print_loop_info (FILE *file, const class loop *loop, const char *prefix)
8597 if (loop->can_be_parallel)
8598 fprintf (file, ", can_be_parallel");
8599 if (loop->warned_aggressive_loop_optimizations)
8600 fprintf (file, ", warned_aggressive_loop_optimizations");
8601 if (loop->dont_vectorize)
8602 fprintf (file, ", dont_vectorize");
8603 if (loop->force_vectorize)
8604 fprintf (file, ", force_vectorize");
8605 if (loop->in_oacc_kernels_region)
8606 fprintf (file, ", in_oacc_kernels_region");
8607 if (loop->finite_p)
8608 fprintf (file, ", finite_p");
8609 if (loop->unroll)
8610 fprintf (file, "\n%sunroll %d", prefix, loop->unroll);
8611 if (loop->nb_iterations)
8613 fprintf (file, "\n%sniter ", prefix);
8614 print_generic_expr (file, loop->nb_iterations);
8617 if (loop->any_upper_bound)
8619 fprintf (file, "\n%supper_bound ", prefix);
8620 print_decu (loop->nb_iterations_upper_bound, file);
8622 if (loop->any_likely_upper_bound)
8624 fprintf (file, "\n%slikely_upper_bound ", prefix);
8625 print_decu (loop->nb_iterations_likely_upper_bound, file);
8628 if (loop->any_estimate)
8630 fprintf (file, "\n%sestimate ", prefix);
8631 print_decu (loop->nb_iterations_estimate, file);
8633 bool reliable;
8634 sreal iterations;
8635 if (loop->num && expected_loop_iterations_by_profile (loop, &iterations, &reliable))
8637 fprintf (file, "\n%siterations by profile: %f (%s%s) entry count:", prefix,
8638 iterations.to_double (), reliable ? "reliable" : "unreliable",
8639 maybe_flat_loop_profile (loop) ? ", maybe flat" : "");
8640 loop_count_in (loop).dump (file, cfun);
8645 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8647 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8648 VERBOSITY level this outputs the contents of the loop, or just its
8649 structure. */
8651 static void
8652 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8654 char *s_indent;
8655 basic_block bb;
8657 if (loop == NULL)
8658 return;
8660 s_indent = (char *) alloca ((size_t) indent + 1);
8661 memset ((void *) s_indent, ' ', (size_t) indent);
8662 s_indent[indent] = '\0';
8664 /* Print loop's header. */
8665 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8666 if (loop->header)
8667 fprintf (file, "header = %d", loop->header->index);
8668 else
8670 fprintf (file, "deleted)\n");
8671 return;
8673 if (loop->latch)
8674 fprintf (file, ", latch = %d", loop->latch->index);
8675 else
8676 fprintf (file, ", multiple latches");
8677 print_loop_info (file, loop, s_indent);
8678 fprintf (file, ")\n");
8680 /* Print loop's body. */
8681 if (verbosity >= 1)
8683 fprintf (file, "%s{\n", s_indent);
8684 FOR_EACH_BB_FN (bb, cfun)
8685 if (bb->loop_father == loop)
8686 print_loops_bb (file, bb, indent, verbosity);
8688 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8689 fprintf (file, "%s}\n", s_indent);
8693 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8694 spaces. Following VERBOSITY level this outputs the contents of the
8695 loop, or just its structure. */
8697 static void
8698 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8699 int verbosity)
8701 if (loop == NULL)
8702 return;
8704 print_loop (file, loop, indent, verbosity);
8705 print_loop_and_siblings (file, loop->next, indent, verbosity);
8708 /* Follow a CFG edge from the entry point of the program, and on entry
8709 of a loop, pretty print the loop structure on FILE. */
8711 void
8712 print_loops (FILE *file, int verbosity)
8714 basic_block bb;
8716 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8717 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8718 if (bb && bb->loop_father)
8719 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8722 /* Dump a loop. */
8724 DEBUG_FUNCTION void
8725 debug (class loop &ref)
8727 print_loop (stderr, &ref, 0, /*verbosity*/0);
8730 DEBUG_FUNCTION void
8731 debug (class loop *ptr)
8733 if (ptr)
8734 debug (*ptr);
8735 else
8736 fprintf (stderr, "<nil>\n");
8739 /* Dump a loop verbosely. */
8741 DEBUG_FUNCTION void
8742 debug_verbose (class loop &ref)
8744 print_loop (stderr, &ref, 0, /*verbosity*/3);
8747 DEBUG_FUNCTION void
8748 debug_verbose (class loop *ptr)
8750 if (ptr)
8751 debug (*ptr);
8752 else
8753 fprintf (stderr, "<nil>\n");
8757 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8759 DEBUG_FUNCTION void
8760 debug_loops (int verbosity)
8762 print_loops (stderr, verbosity);
8765 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8767 DEBUG_FUNCTION void
8768 debug_loop (class loop *loop, int verbosity)
8770 print_loop (stderr, loop, 0, verbosity);
8773 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8774 level. */
8776 DEBUG_FUNCTION void
8777 debug_loop_num (unsigned num, int verbosity)
8779 debug_loop (get_loop (cfun, num), verbosity);
8782 /* Return true if BB ends with a call, possibly followed by some
8783 instructions that must stay with the call. Return false,
8784 otherwise. */
8786 static bool
8787 gimple_block_ends_with_call_p (basic_block bb)
8789 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8790 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8794 /* Return true if BB ends with a conditional branch. Return false,
8795 otherwise. */
8797 static bool
8798 gimple_block_ends_with_condjump_p (const_basic_block bb)
8800 return safe_is_a <gcond *> (*gsi_last_bb (const_cast <basic_block> (bb)));
8804 /* Return true if statement T may terminate execution of BB in ways not
8805 explicitly represtented in the CFG. */
8807 bool
8808 stmt_can_terminate_bb_p (gimple *t)
8810 tree fndecl = NULL_TREE;
8811 int call_flags = 0;
8813 /* Eh exception not handled internally terminates execution of the whole
8814 function. */
8815 if (stmt_can_throw_external (cfun, t))
8816 return true;
8818 /* NORETURN and LONGJMP calls already have an edge to exit.
8819 CONST and PURE calls do not need one.
8820 We don't currently check for CONST and PURE here, although
8821 it would be a good idea, because those attributes are
8822 figured out from the RTL in mark_constant_function, and
8823 the counter incrementation code from -fprofile-arcs
8824 leads to different results from -fbranch-probabilities. */
8825 if (is_gimple_call (t))
8827 fndecl = gimple_call_fndecl (t);
8828 call_flags = gimple_call_flags (t);
8831 if (is_gimple_call (t)
8832 && fndecl
8833 && fndecl_built_in_p (fndecl)
8834 && (call_flags & ECF_NOTHROW)
8835 && !(call_flags & ECF_RETURNS_TWICE)
8836 /* fork() doesn't really return twice, but the effect of
8837 wrapping it in __gcov_fork() which calls __gcov_dump() and
8838 __gcov_reset() and clears the counters before forking has the same
8839 effect as returning twice. Force a fake edge. */
8840 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8841 return false;
8843 if (is_gimple_call (t))
8845 edge_iterator ei;
8846 edge e;
8847 basic_block bb;
8849 if (call_flags & (ECF_PURE | ECF_CONST)
8850 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8851 return false;
8853 /* Function call may do longjmp, terminate program or do other things.
8854 Special case noreturn that have non-abnormal edges out as in this case
8855 the fact is sufficiently represented by lack of edges out of T. */
8856 if (!(call_flags & ECF_NORETURN))
8857 return true;
8859 bb = gimple_bb (t);
8860 FOR_EACH_EDGE (e, ei, bb->succs)
8861 if ((e->flags & EDGE_FAKE) == 0)
8862 return true;
8865 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8866 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8867 return true;
8869 return false;
8873 /* Add fake edges to the function exit for any non constant and non
8874 noreturn calls (or noreturn calls with EH/abnormal edges),
8875 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8876 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8877 that were split.
8879 The goal is to expose cases in which entering a basic block does
8880 not imply that all subsequent instructions must be executed. */
8882 static int
8883 gimple_flow_call_edges_add (sbitmap blocks)
8885 int i;
8886 int blocks_split = 0;
8887 int last_bb = last_basic_block_for_fn (cfun);
8888 bool check_last_block = false;
8890 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8891 return 0;
8893 if (! blocks)
8894 check_last_block = true;
8895 else
8896 check_last_block = bitmap_bit_p (blocks,
8897 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8899 /* In the last basic block, before epilogue generation, there will be
8900 a fallthru edge to EXIT. Special care is required if the last insn
8901 of the last basic block is a call because make_edge folds duplicate
8902 edges, which would result in the fallthru edge also being marked
8903 fake, which would result in the fallthru edge being removed by
8904 remove_fake_edges, which would result in an invalid CFG.
8906 Moreover, we can't elide the outgoing fake edge, since the block
8907 profiler needs to take this into account in order to solve the minimal
8908 spanning tree in the case that the call doesn't return.
8910 Handle this by adding a dummy instruction in a new last basic block. */
8911 if (check_last_block)
8913 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8914 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8915 gimple *t = NULL;
8917 if (!gsi_end_p (gsi))
8918 t = gsi_stmt (gsi);
8920 if (t && stmt_can_terminate_bb_p (t))
8922 edge e;
8924 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8925 if (e)
8927 gsi_insert_on_edge (e, gimple_build_nop ());
8928 gsi_commit_edge_inserts ();
8933 /* Now add fake edges to the function exit for any non constant
8934 calls since there is no way that we can determine if they will
8935 return or not... */
8936 for (i = 0; i < last_bb; i++)
8938 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8939 gimple_stmt_iterator gsi;
8940 gimple *stmt, *last_stmt;
8942 if (!bb)
8943 continue;
8945 if (blocks && !bitmap_bit_p (blocks, i))
8946 continue;
8948 gsi = gsi_last_nondebug_bb (bb);
8949 if (!gsi_end_p (gsi))
8951 last_stmt = gsi_stmt (gsi);
8954 stmt = gsi_stmt (gsi);
8955 if (stmt_can_terminate_bb_p (stmt))
8957 edge e;
8959 /* The handling above of the final block before the
8960 epilogue should be enough to verify that there is
8961 no edge to the exit block in CFG already.
8962 Calling make_edge in such case would cause us to
8963 mark that edge as fake and remove it later. */
8964 if (flag_checking && stmt == last_stmt)
8966 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8967 gcc_assert (e == NULL);
8970 /* Note that the following may create a new basic block
8971 and renumber the existing basic blocks. */
8972 if (stmt != last_stmt)
8974 e = split_block (bb, stmt);
8975 if (e)
8976 blocks_split++;
8978 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8979 e->probability = profile_probability::guessed_never ();
8981 gsi_prev (&gsi);
8983 while (!gsi_end_p (gsi));
8987 if (blocks_split)
8988 checking_verify_flow_info ();
8990 return blocks_split;
8993 /* Removes edge E and all the blocks dominated by it, and updates dominance
8994 information. The IL in E->src needs to be updated separately.
8995 If dominance info is not available, only the edge E is removed.*/
8997 void
8998 remove_edge_and_dominated_blocks (edge e)
9000 vec<basic_block> bbs_to_fix_dom = vNULL;
9001 edge f;
9002 edge_iterator ei;
9003 bool none_removed = false;
9004 unsigned i;
9005 basic_block bb, dbb;
9006 bitmap_iterator bi;
9008 /* If we are removing a path inside a non-root loop that may change
9009 loop ownership of blocks or remove loops. Mark loops for fixup. */
9010 if (current_loops
9011 && loop_outer (e->src->loop_father) != NULL
9012 && e->src->loop_father == e->dest->loop_father)
9013 loops_state_set (LOOPS_NEED_FIXUP);
9015 if (!dom_info_available_p (CDI_DOMINATORS))
9017 remove_edge (e);
9018 return;
9021 /* No updating is needed for edges to exit. */
9022 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9024 if (cfgcleanup_altered_bbs)
9025 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
9026 remove_edge (e);
9027 return;
9030 /* First, we find the basic blocks to remove. If E->dest has a predecessor
9031 that is not dominated by E->dest, then this set is empty. Otherwise,
9032 all the basic blocks dominated by E->dest are removed.
9034 Also, to DF_IDOM we store the immediate dominators of the blocks in
9035 the dominance frontier of E (i.e., of the successors of the
9036 removed blocks, if there are any, and of E->dest otherwise). */
9037 FOR_EACH_EDGE (f, ei, e->dest->preds)
9039 if (f == e)
9040 continue;
9042 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
9044 none_removed = true;
9045 break;
9049 auto_bitmap df, df_idom;
9050 auto_vec<basic_block> bbs_to_remove;
9051 if (none_removed)
9052 bitmap_set_bit (df_idom,
9053 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
9054 else
9056 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
9057 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
9059 FOR_EACH_EDGE (f, ei, bb->succs)
9061 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
9062 bitmap_set_bit (df, f->dest->index);
9065 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
9066 bitmap_clear_bit (df, bb->index);
9068 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
9070 bb = BASIC_BLOCK_FOR_FN (cfun, i);
9071 bitmap_set_bit (df_idom,
9072 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
9076 if (cfgcleanup_altered_bbs)
9078 /* Record the set of the altered basic blocks. */
9079 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
9080 bitmap_ior_into (cfgcleanup_altered_bbs, df);
9083 /* Remove E and the cancelled blocks. */
9084 if (none_removed)
9085 remove_edge (e);
9086 else
9088 /* Walk backwards so as to get a chance to substitute all
9089 released DEFs into debug stmts. See
9090 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
9091 details. */
9092 for (i = bbs_to_remove.length (); i-- > 0; )
9093 delete_basic_block (bbs_to_remove[i]);
9096 /* Update the dominance information. The immediate dominator may change only
9097 for blocks whose immediate dominator belongs to DF_IDOM:
9099 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
9100 removal. Let Z the arbitrary block such that idom(Z) = Y and
9101 Z dominates X after the removal. Before removal, there exists a path P
9102 from Y to X that avoids Z. Let F be the last edge on P that is
9103 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
9104 dominates W, and because of P, Z does not dominate W), and W belongs to
9105 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
9106 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
9108 bb = BASIC_BLOCK_FOR_FN (cfun, i);
9109 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
9110 dbb;
9111 dbb = next_dom_son (CDI_DOMINATORS, dbb))
9112 bbs_to_fix_dom.safe_push (dbb);
9115 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
9117 bbs_to_fix_dom.release ();
9120 /* Purge dead EH edges from basic block BB. */
9122 bool
9123 gimple_purge_dead_eh_edges (basic_block bb)
9125 bool changed = false;
9126 edge e;
9127 edge_iterator ei;
9128 gimple *stmt = *gsi_last_bb (bb);
9130 if (stmt && stmt_can_throw_internal (cfun, stmt))
9131 return false;
9133 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9135 if (e->flags & EDGE_EH)
9137 remove_edge_and_dominated_blocks (e);
9138 changed = true;
9140 else
9141 ei_next (&ei);
9144 return changed;
9147 /* Purge dead EH edges from basic block listed in BLOCKS. */
9149 bool
9150 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
9152 bool changed = false;
9153 unsigned i;
9154 bitmap_iterator bi;
9156 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9158 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9160 /* Earlier gimple_purge_dead_eh_edges could have removed
9161 this basic block already. */
9162 gcc_assert (bb || changed);
9163 if (bb != NULL)
9164 changed |= gimple_purge_dead_eh_edges (bb);
9167 return changed;
9170 /* Purge dead abnormal call edges from basic block BB. */
9172 bool
9173 gimple_purge_dead_abnormal_call_edges (basic_block bb)
9175 bool changed = false;
9176 edge e;
9177 edge_iterator ei;
9178 gimple *stmt = *gsi_last_bb (bb);
9180 if (stmt && stmt_can_make_abnormal_goto (stmt))
9181 return false;
9183 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9185 if (e->flags & EDGE_ABNORMAL)
9187 if (e->flags & EDGE_FALLTHRU)
9188 e->flags &= ~EDGE_ABNORMAL;
9189 else
9190 remove_edge_and_dominated_blocks (e);
9191 changed = true;
9193 else
9194 ei_next (&ei);
9197 return changed;
9200 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9202 bool
9203 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
9205 bool changed = false;
9206 unsigned i;
9207 bitmap_iterator bi;
9209 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9211 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9213 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9214 this basic block already. */
9215 gcc_assert (bb || changed);
9216 if (bb != NULL)
9217 changed |= gimple_purge_dead_abnormal_call_edges (bb);
9220 return changed;
9223 /* This function is called whenever a new edge is created or
9224 redirected. */
9226 static void
9227 gimple_execute_on_growing_pred (edge e)
9229 basic_block bb = e->dest;
9231 if (!gimple_seq_empty_p (phi_nodes (bb)))
9232 reserve_phi_args_for_new_edge (bb);
9235 /* This function is called immediately before edge E is removed from
9236 the edge vector E->dest->preds. */
9238 static void
9239 gimple_execute_on_shrinking_pred (edge e)
9241 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9242 remove_phi_args (e);
9245 /*---------------------------------------------------------------------------
9246 Helper functions for Loop versioning
9247 ---------------------------------------------------------------------------*/
9249 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9250 of 'first'. Both of them are dominated by 'new_head' basic block. When
9251 'new_head' was created by 'second's incoming edge it received phi arguments
9252 on the edge by split_edge(). Later, additional edge 'e' was created to
9253 connect 'new_head' and 'first'. Now this routine adds phi args on this
9254 additional edge 'e' that new_head to second edge received as part of edge
9255 splitting. */
9257 static void
9258 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9259 basic_block new_head, edge e)
9261 gphi *phi1, *phi2;
9262 gphi_iterator psi1, psi2;
9263 tree def;
9264 edge e2 = find_edge (new_head, second);
9266 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9267 edge, we should always have an edge from NEW_HEAD to SECOND. */
9268 gcc_assert (e2 != NULL);
9270 /* Browse all 'second' basic block phi nodes and add phi args to
9271 edge 'e' for 'first' head. PHI args are always in correct order. */
9273 for (psi2 = gsi_start_phis (second),
9274 psi1 = gsi_start_phis (first);
9275 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9276 gsi_next (&psi2), gsi_next (&psi1))
9278 phi1 = psi1.phi ();
9279 phi2 = psi2.phi ();
9280 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9281 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9286 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9287 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9288 the destination of the ELSE part. */
9290 static void
9291 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9292 basic_block second_head ATTRIBUTE_UNUSED,
9293 basic_block cond_bb, void *cond_e)
9295 gimple_stmt_iterator gsi;
9296 gimple *new_cond_expr;
9297 tree cond_expr = (tree) cond_e;
9298 edge e0;
9300 /* Build new conditional expr */
9301 gsi = gsi_last_bb (cond_bb);
9303 cond_expr = force_gimple_operand_gsi_1 (&gsi, cond_expr,
9304 is_gimple_condexpr_for_cond,
9305 NULL_TREE, false,
9306 GSI_CONTINUE_LINKING);
9307 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9308 NULL_TREE, NULL_TREE);
9310 /* Add new cond in cond_bb. */
9311 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9313 /* Adjust edges appropriately to connect new head with first head
9314 as well as second head. */
9315 e0 = single_succ_edge (cond_bb);
9316 e0->flags &= ~EDGE_FALLTHRU;
9317 e0->flags |= EDGE_FALSE_VALUE;
9321 /* Do book-keeping of basic block BB for the profile consistency checker.
9322 Store the counting in RECORD. */
9323 static void
9324 gimple_account_profile_record (basic_block bb,
9325 struct profile_record *record)
9327 gimple_stmt_iterator i;
9328 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9329 gsi_next_nondebug (&i))
9331 record->size
9332 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9333 if (profile_info)
9335 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9336 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9337 && bb->count.ipa ().initialized_p ())
9338 record->time
9339 += estimate_num_insns (gsi_stmt (i),
9340 &eni_time_weights)
9341 * bb->count.ipa ().to_gcov_type ();
9343 else if (bb->count.initialized_p ()
9344 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9345 record->time
9346 += estimate_num_insns
9347 (gsi_stmt (i),
9348 &eni_time_weights)
9349 * bb->count.to_sreal_scale
9350 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9351 else
9352 record->time
9353 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9357 struct cfg_hooks gimple_cfg_hooks = {
9358 "gimple",
9359 gimple_verify_flow_info,
9360 gimple_dump_bb, /* dump_bb */
9361 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9362 create_bb, /* create_basic_block */
9363 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9364 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9365 gimple_can_remove_branch_p, /* can_remove_branch_p */
9366 remove_bb, /* delete_basic_block */
9367 gimple_split_block, /* split_block */
9368 gimple_move_block_after, /* move_block_after */
9369 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9370 gimple_merge_blocks, /* merge_blocks */
9371 gimple_predict_edge, /* predict_edge */
9372 gimple_predicted_by_p, /* predicted_by_p */
9373 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9374 gimple_duplicate_bb, /* duplicate_block */
9375 gimple_split_edge, /* split_edge */
9376 gimple_make_forwarder_block, /* make_forward_block */
9377 NULL, /* tidy_fallthru_edge */
9378 NULL, /* force_nonfallthru */
9379 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9380 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9381 gimple_flow_call_edges_add, /* flow_call_edges_add */
9382 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9383 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9384 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9385 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9386 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9387 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9388 flush_pending_stmts, /* flush_pending_stmts */
9389 gimple_empty_block_p, /* block_empty_p */
9390 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9391 gimple_account_profile_record,
9395 /* Split all critical edges. Split some extra (not necessarily critical) edges
9396 if FOR_EDGE_INSERTION_P is true. */
9398 unsigned int
9399 split_critical_edges (bool for_edge_insertion_p /* = false */)
9401 basic_block bb;
9402 edge e;
9403 edge_iterator ei;
9405 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9406 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9407 mappings around the calls to split_edge. */
9408 start_recording_case_labels ();
9409 FOR_ALL_BB_FN (bb, cfun)
9411 FOR_EACH_EDGE (e, ei, bb->succs)
9413 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9414 split_edge (e);
9415 /* PRE inserts statements to edges and expects that
9416 since split_critical_edges was done beforehand, committing edge
9417 insertions will not split more edges. In addition to critical
9418 edges we must split edges that have multiple successors and
9419 end by control flow statements, such as RESX.
9420 Go ahead and split them too. This matches the logic in
9421 gimple_find_edge_insert_loc. */
9422 else if (for_edge_insertion_p
9423 && (!single_pred_p (e->dest)
9424 || !gimple_seq_empty_p (phi_nodes (e->dest))
9425 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9426 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9427 && !(e->flags & EDGE_ABNORMAL))
9429 gimple_stmt_iterator gsi;
9431 gsi = gsi_last_bb (e->src);
9432 if (!gsi_end_p (gsi)
9433 && stmt_ends_bb_p (gsi_stmt (gsi))
9434 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9435 && !gimple_call_builtin_p (gsi_stmt (gsi),
9436 BUILT_IN_RETURN)))
9437 split_edge (e);
9441 end_recording_case_labels ();
9442 return 0;
9445 namespace {
9447 const pass_data pass_data_split_crit_edges =
9449 GIMPLE_PASS, /* type */
9450 "crited", /* name */
9451 OPTGROUP_NONE, /* optinfo_flags */
9452 TV_TREE_SPLIT_EDGES, /* tv_id */
9453 PROP_cfg, /* properties_required */
9454 PROP_no_crit_edges, /* properties_provided */
9455 0, /* properties_destroyed */
9456 0, /* todo_flags_start */
9457 0, /* todo_flags_finish */
9460 class pass_split_crit_edges : public gimple_opt_pass
9462 public:
9463 pass_split_crit_edges (gcc::context *ctxt)
9464 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9467 /* opt_pass methods: */
9468 unsigned int execute (function *) final override
9470 return split_critical_edges ();
9473 opt_pass * clone () final override
9475 return new pass_split_crit_edges (m_ctxt);
9477 }; // class pass_split_crit_edges
9479 } // anon namespace
9481 gimple_opt_pass *
9482 make_pass_split_crit_edges (gcc::context *ctxt)
9484 return new pass_split_crit_edges (ctxt);
9488 /* Insert COND expression which is GIMPLE_COND after STMT
9489 in basic block BB with appropriate basic block split
9490 and creation of a new conditionally executed basic block.
9491 Update profile so the new bb is visited with probability PROB.
9492 Return created basic block. */
9493 basic_block
9494 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9495 profile_probability prob)
9497 edge fall = split_block (bb, stmt);
9498 gimple_stmt_iterator iter = gsi_last_bb (bb);
9499 basic_block new_bb;
9501 /* Insert cond statement. */
9502 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9503 if (gsi_end_p (iter))
9504 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9505 else
9506 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9508 /* Create conditionally executed block. */
9509 new_bb = create_empty_bb (bb);
9510 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9511 e->probability = prob;
9512 new_bb->count = e->count ();
9513 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9515 /* Fix edge for split bb. */
9516 fall->flags = EDGE_FALSE_VALUE;
9517 fall->probability -= e->probability;
9519 /* Update dominance info. */
9520 if (dom_info_available_p (CDI_DOMINATORS))
9522 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9523 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9526 /* Update loop info. */
9527 if (current_loops)
9528 add_bb_to_loop (new_bb, bb->loop_father);
9530 return new_bb;
9535 /* Given a basic block B which ends with a conditional and has
9536 precisely two successors, determine which of the edges is taken if
9537 the conditional is true and which is taken if the conditional is
9538 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9540 void
9541 extract_true_false_edges_from_block (basic_block b,
9542 edge *true_edge,
9543 edge *false_edge)
9545 edge e = EDGE_SUCC (b, 0);
9547 if (e->flags & EDGE_TRUE_VALUE)
9549 *true_edge = e;
9550 *false_edge = EDGE_SUCC (b, 1);
9552 else
9554 *false_edge = e;
9555 *true_edge = EDGE_SUCC (b, 1);
9560 /* From a controlling predicate in the immediate dominator DOM of
9561 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9562 predicate evaluates to true and false and store them to
9563 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9564 they are non-NULL. Returns true if the edges can be determined,
9565 else return false. */
9567 bool
9568 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9569 edge *true_controlled_edge,
9570 edge *false_controlled_edge)
9572 basic_block bb = phiblock;
9573 edge true_edge, false_edge, tem;
9574 edge e0 = NULL, e1 = NULL;
9576 /* We have to verify that one edge into the PHI node is dominated
9577 by the true edge of the predicate block and the other edge
9578 dominated by the false edge. This ensures that the PHI argument
9579 we are going to take is completely determined by the path we
9580 take from the predicate block.
9581 We can only use BB dominance checks below if the destination of
9582 the true/false edges are dominated by their edge, thus only
9583 have a single predecessor. */
9584 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9585 tem = EDGE_PRED (bb, 0);
9586 if (tem == true_edge
9587 || (single_pred_p (true_edge->dest)
9588 && (tem->src == true_edge->dest
9589 || dominated_by_p (CDI_DOMINATORS,
9590 tem->src, true_edge->dest))))
9591 e0 = tem;
9592 else if (tem == false_edge
9593 || (single_pred_p (false_edge->dest)
9594 && (tem->src == false_edge->dest
9595 || dominated_by_p (CDI_DOMINATORS,
9596 tem->src, false_edge->dest))))
9597 e1 = tem;
9598 else
9599 return false;
9600 tem = EDGE_PRED (bb, 1);
9601 if (tem == true_edge
9602 || (single_pred_p (true_edge->dest)
9603 && (tem->src == true_edge->dest
9604 || dominated_by_p (CDI_DOMINATORS,
9605 tem->src, true_edge->dest))))
9606 e0 = tem;
9607 else if (tem == false_edge
9608 || (single_pred_p (false_edge->dest)
9609 && (tem->src == false_edge->dest
9610 || dominated_by_p (CDI_DOMINATORS,
9611 tem->src, false_edge->dest))))
9612 e1 = tem;
9613 else
9614 return false;
9615 if (!e0 || !e1)
9616 return false;
9618 if (true_controlled_edge)
9619 *true_controlled_edge = e0;
9620 if (false_controlled_edge)
9621 *false_controlled_edge = e1;
9623 return true;
9626 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9627 range [low, high]. Place associated stmts before *GSI. */
9629 void
9630 generate_range_test (basic_block bb, tree index, tree low, tree high,
9631 tree *lhs, tree *rhs)
9633 tree type = TREE_TYPE (index);
9634 tree utype = range_check_type (type);
9636 low = fold_convert (utype, low);
9637 high = fold_convert (utype, high);
9639 gimple_seq seq = NULL;
9640 index = gimple_convert (&seq, utype, index);
9641 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9642 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9644 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9645 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9648 /* Return the basic block that belongs to label numbered INDEX
9649 of a switch statement. */
9651 basic_block
9652 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9654 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9657 /* Return the default basic block of a switch statement. */
9659 basic_block
9660 gimple_switch_default_bb (function *ifun, gswitch *gs)
9662 return gimple_switch_label_bb (ifun, gs, 0);
9665 /* Return the edge that belongs to label numbered INDEX
9666 of a switch statement. */
9668 edge
9669 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9671 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9674 /* Return the default edge of a switch statement. */
9676 edge
9677 gimple_switch_default_edge (function *ifun, gswitch *gs)
9679 return gimple_switch_edge (ifun, gs, 0);
9682 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9684 bool
9685 cond_only_block_p (basic_block bb)
9687 /* BB must have no executable statements. */
9688 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9689 if (phi_nodes (bb))
9690 return false;
9691 while (!gsi_end_p (gsi))
9693 gimple *stmt = gsi_stmt (gsi);
9694 if (is_gimple_debug (stmt))
9696 else if (gimple_code (stmt) == GIMPLE_NOP
9697 || gimple_code (stmt) == GIMPLE_PREDICT
9698 || gimple_code (stmt) == GIMPLE_COND)
9700 else
9701 return false;
9702 gsi_next (&gsi);
9704 return true;
9708 /* Emit return warnings. */
9710 namespace {
9712 const pass_data pass_data_warn_function_return =
9714 GIMPLE_PASS, /* type */
9715 "*warn_function_return", /* name */
9716 OPTGROUP_NONE, /* optinfo_flags */
9717 TV_NONE, /* tv_id */
9718 PROP_cfg, /* properties_required */
9719 0, /* properties_provided */
9720 0, /* properties_destroyed */
9721 0, /* todo_flags_start */
9722 0, /* todo_flags_finish */
9725 class pass_warn_function_return : public gimple_opt_pass
9727 public:
9728 pass_warn_function_return (gcc::context *ctxt)
9729 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9732 /* opt_pass methods: */
9733 unsigned int execute (function *) final override;
9735 }; // class pass_warn_function_return
9737 unsigned int
9738 pass_warn_function_return::execute (function *fun)
9740 location_t location;
9741 gimple *last;
9742 edge e;
9743 edge_iterator ei;
9745 if (!targetm.warn_func_return (fun->decl))
9746 return 0;
9748 /* If we have a path to EXIT, then we do return. */
9749 if (TREE_THIS_VOLATILE (fun->decl)
9750 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9752 location = UNKNOWN_LOCATION;
9753 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9754 (e = ei_safe_edge (ei)); )
9756 last = *gsi_last_bb (e->src);
9757 if ((gimple_code (last) == GIMPLE_RETURN
9758 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9759 && location == UNKNOWN_LOCATION
9760 && ((location = LOCATION_LOCUS (gimple_location (last)))
9761 != UNKNOWN_LOCATION)
9762 && !optimize)
9763 break;
9764 /* When optimizing, replace return stmts in noreturn functions
9765 with __builtin_unreachable () call. */
9766 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9768 location_t loc = gimple_location (last);
9769 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
9770 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9771 gsi_replace (&gsi, new_stmt, true);
9772 remove_edge (e);
9774 else
9775 ei_next (&ei);
9777 if (location == UNKNOWN_LOCATION)
9778 location = cfun->function_end_locus;
9779 warning_at (location, 0, "%<noreturn%> function does return");
9782 /* If we see "return;" in some basic block, then we do reach the end
9783 without returning a value. */
9784 else if (warn_return_type > 0
9785 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9786 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9788 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9790 greturn *return_stmt = dyn_cast <greturn *> (*gsi_last_bb (e->src));
9791 if (return_stmt
9792 && gimple_return_retval (return_stmt) == NULL
9793 && !warning_suppressed_p (return_stmt, OPT_Wreturn_type))
9795 location = gimple_location (return_stmt);
9796 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9797 location = fun->function_end_locus;
9798 if (warning_at (location, OPT_Wreturn_type,
9799 "control reaches end of non-void function"))
9800 suppress_warning (fun->decl, OPT_Wreturn_type);
9801 break;
9804 /* The C++ FE turns fallthrough from the end of non-void function
9805 into __builtin_unreachable () call with BUILTINS_LOCATION.
9806 Recognize those as well as calls from ubsan_instrument_return. */
9807 basic_block bb;
9808 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9809 FOR_EACH_BB_FN (bb, fun)
9810 if (EDGE_COUNT (bb->succs) == 0)
9812 gimple *last = *gsi_last_bb (bb);
9813 const enum built_in_function ubsan_missing_ret
9814 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9815 if (last
9816 && ((LOCATION_LOCUS (gimple_location (last))
9817 == BUILTINS_LOCATION
9818 && (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
9819 || gimple_call_builtin_p (last,
9820 BUILT_IN_UNREACHABLE_TRAP)
9821 || gimple_call_builtin_p (last, BUILT_IN_TRAP)))
9822 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9824 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9825 gsi_prev_nondebug (&gsi);
9826 gimple *prev = gsi_stmt (gsi);
9827 if (prev == NULL)
9828 location = UNKNOWN_LOCATION;
9829 else
9830 location = gimple_location (prev);
9831 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9832 location = fun->function_end_locus;
9833 if (warning_at (location, OPT_Wreturn_type,
9834 "control reaches end of non-void function"))
9835 suppress_warning (fun->decl, OPT_Wreturn_type);
9836 break;
9840 return 0;
9843 } // anon namespace
9845 gimple_opt_pass *
9846 make_pass_warn_function_return (gcc::context *ctxt)
9848 return new pass_warn_function_return (ctxt);
9851 /* Walk a gimplified function and warn for functions whose return value is
9852 ignored and attribute((warn_unused_result)) is set. This is done before
9853 inlining, so we don't have to worry about that. */
9855 static void
9856 do_warn_unused_result (gimple_seq seq)
9858 tree fdecl, ftype;
9859 gimple_stmt_iterator i;
9861 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9863 gimple *g = gsi_stmt (i);
9865 switch (gimple_code (g))
9867 case GIMPLE_BIND:
9868 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9869 break;
9870 case GIMPLE_TRY:
9871 do_warn_unused_result (gimple_try_eval (g));
9872 do_warn_unused_result (gimple_try_cleanup (g));
9873 break;
9874 case GIMPLE_CATCH:
9875 do_warn_unused_result (gimple_catch_handler (
9876 as_a <gcatch *> (g)));
9877 break;
9878 case GIMPLE_EH_FILTER:
9879 do_warn_unused_result (gimple_eh_filter_failure (g));
9880 break;
9882 case GIMPLE_CALL:
9883 if (gimple_call_lhs (g))
9884 break;
9885 if (gimple_call_internal_p (g))
9886 break;
9888 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9889 LHS. All calls whose value is ignored should be
9890 represented like this. Look for the attribute. */
9891 fdecl = gimple_call_fndecl (g);
9892 ftype = gimple_call_fntype (g);
9894 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9896 location_t loc = gimple_location (g);
9898 if (fdecl)
9899 warning_at (loc, OPT_Wunused_result,
9900 "ignoring return value of %qD "
9901 "declared with attribute %<warn_unused_result%>",
9902 fdecl);
9903 else
9904 warning_at (loc, OPT_Wunused_result,
9905 "ignoring return value of function "
9906 "declared with attribute %<warn_unused_result%>");
9908 break;
9910 default:
9911 /* Not a container, not a call, or a call whose value is used. */
9912 break;
9917 namespace {
9919 const pass_data pass_data_warn_unused_result =
9921 GIMPLE_PASS, /* type */
9922 "*warn_unused_result", /* name */
9923 OPTGROUP_NONE, /* optinfo_flags */
9924 TV_NONE, /* tv_id */
9925 PROP_gimple_any, /* properties_required */
9926 0, /* properties_provided */
9927 0, /* properties_destroyed */
9928 0, /* todo_flags_start */
9929 0, /* todo_flags_finish */
9932 class pass_warn_unused_result : public gimple_opt_pass
9934 public:
9935 pass_warn_unused_result (gcc::context *ctxt)
9936 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9939 /* opt_pass methods: */
9940 bool gate (function *) final override { return flag_warn_unused_result; }
9941 unsigned int execute (function *) final override
9943 do_warn_unused_result (gimple_body (current_function_decl));
9944 return 0;
9947 }; // class pass_warn_unused_result
9949 } // anon namespace
9951 gimple_opt_pass *
9952 make_pass_warn_unused_result (gcc::context *ctxt)
9954 return new pass_warn_unused_result (ctxt);
9957 /* Maybe Remove stores to variables we marked write-only.
9958 Return true if a store was removed. */
9959 static bool
9960 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9961 bitmap dce_ssa_names)
9963 /* Keep access when store has side effect, i.e. in case when source
9964 is volatile. */
9965 if (!gimple_store_p (stmt)
9966 || gimple_has_side_effects (stmt)
9967 || optimize_debug)
9968 return false;
9970 tree lhs = get_base_address (gimple_get_lhs (stmt));
9972 if (!VAR_P (lhs)
9973 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9974 || !varpool_node::get (lhs)->writeonly)
9975 return false;
9977 if (dump_file && (dump_flags & TDF_DETAILS))
9979 fprintf (dump_file, "Removing statement, writes"
9980 " to write only var:\n");
9981 print_gimple_stmt (dump_file, stmt, 0,
9982 TDF_VOPS|TDF_MEMSYMS);
9985 /* Mark ssa name defining to be checked for simple dce. */
9986 if (gimple_assign_single_p (stmt))
9988 tree rhs = gimple_assign_rhs1 (stmt);
9989 if (TREE_CODE (rhs) == SSA_NAME
9990 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9991 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9993 unlink_stmt_vdef (stmt);
9994 gsi_remove (&gsi, true);
9995 release_defs (stmt);
9996 return true;
9999 /* IPA passes, compilation of earlier functions or inlining
10000 might have changed some properties, such as marked functions nothrow,
10001 pure, const or noreturn.
10002 Remove redundant edges and basic blocks, and create new ones if necessary. */
10004 unsigned int
10005 execute_fixup_cfg (void)
10007 basic_block bb;
10008 gimple_stmt_iterator gsi;
10009 int todo = 0;
10010 cgraph_node *node = cgraph_node::get (current_function_decl);
10011 /* Same scaling is also done by ipa_merge_profiles. */
10012 profile_count num = node->count;
10013 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
10014 bool scale = num.initialized_p () && !(num == den);
10015 auto_bitmap dce_ssa_names;
10017 if (scale)
10019 profile_count::adjust_for_ipa_scaling (&num, &den);
10020 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
10021 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
10022 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
10025 FOR_EACH_BB_FN (bb, cfun)
10027 if (scale)
10028 bb->count = bb->count.apply_scale (num, den);
10029 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
10031 gimple *stmt = gsi_stmt (gsi);
10032 tree decl = is_gimple_call (stmt)
10033 ? gimple_call_fndecl (stmt)
10034 : NULL;
10035 if (decl)
10037 int flags = gimple_call_flags (stmt);
10038 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
10040 if (gimple_in_ssa_p (cfun))
10042 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10043 update_stmt (stmt);
10046 if (flags & ECF_NORETURN
10047 && fixup_noreturn_call (stmt))
10048 todo |= TODO_cleanup_cfg;
10051 /* Remove stores to variables we marked write-only. */
10052 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
10054 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10055 continue;
10058 /* For calls we can simply remove LHS when it is known
10059 to be write-only. */
10060 if (is_gimple_call (stmt)
10061 && gimple_get_lhs (stmt))
10063 tree lhs = get_base_address (gimple_get_lhs (stmt));
10065 if (VAR_P (lhs)
10066 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
10067 && varpool_node::get (lhs)->writeonly)
10069 gimple_call_set_lhs (stmt, NULL);
10070 update_stmt (stmt);
10071 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10075 gsi_next (&gsi);
10077 if (gimple *last = *gsi_last_bb (bb))
10079 if (maybe_clean_eh_stmt (last)
10080 && gimple_purge_dead_eh_edges (bb))
10081 todo |= TODO_cleanup_cfg;
10082 if (gimple_purge_dead_abnormal_call_edges (bb))
10083 todo |= TODO_cleanup_cfg;
10086 /* If we have a basic block with no successors that does not
10087 end with a control statement or a noreturn call end it with
10088 a call to __builtin_unreachable. This situation can occur
10089 when inlining a noreturn call that does in fact return. */
10090 if (EDGE_COUNT (bb->succs) == 0)
10092 gimple *stmt = last_nondebug_stmt (bb);
10093 if (!stmt
10094 || (!is_ctrl_stmt (stmt)
10095 && (!is_gimple_call (stmt)
10096 || !gimple_call_noreturn_p (stmt))))
10098 if (stmt && is_gimple_call (stmt))
10099 gimple_call_set_ctrl_altering (stmt, false);
10100 stmt = gimple_build_builtin_unreachable (UNKNOWN_LOCATION);
10101 gimple_stmt_iterator gsi = gsi_last_bb (bb);
10102 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
10103 if (!cfun->after_inlining)
10104 if (tree fndecl = gimple_call_fndecl (stmt))
10106 gcall *call_stmt = dyn_cast <gcall *> (stmt);
10107 node->create_edge (cgraph_node::get_create (fndecl),
10108 call_stmt, bb->count);
10113 if (scale)
10115 update_max_bb_count ();
10116 compute_function_frequency ();
10119 if (current_loops
10120 && (todo & TODO_cleanup_cfg))
10121 loops_state_set (LOOPS_NEED_FIXUP);
10123 simple_dce_from_worklist (dce_ssa_names);
10125 return todo;
10128 namespace {
10130 const pass_data pass_data_fixup_cfg =
10132 GIMPLE_PASS, /* type */
10133 "fixup_cfg", /* name */
10134 OPTGROUP_NONE, /* optinfo_flags */
10135 TV_NONE, /* tv_id */
10136 PROP_cfg, /* properties_required */
10137 0, /* properties_provided */
10138 0, /* properties_destroyed */
10139 0, /* todo_flags_start */
10140 0, /* todo_flags_finish */
10143 class pass_fixup_cfg : public gimple_opt_pass
10145 public:
10146 pass_fixup_cfg (gcc::context *ctxt)
10147 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
10150 /* opt_pass methods: */
10151 opt_pass * clone () final override { return new pass_fixup_cfg (m_ctxt); }
10152 unsigned int execute (function *) final override
10154 return execute_fixup_cfg ();
10157 }; // class pass_fixup_cfg
10159 } // anon namespace
10161 gimple_opt_pass *
10162 make_pass_fixup_cfg (gcc::context *ctxt)
10164 return new pass_fixup_cfg (ctxt);
10167 /* Garbage collection support for edge_def. */
10169 extern void gt_ggc_mx (tree&);
10170 extern void gt_ggc_mx (gimple *&);
10171 extern void gt_ggc_mx (rtx&);
10172 extern void gt_ggc_mx (basic_block&);
10174 static void
10175 gt_ggc_mx (rtx_insn *& x)
10177 if (x)
10178 gt_ggc_mx_rtx_def ((void *) x);
10181 void
10182 gt_ggc_mx (edge_def *e)
10184 tree block = LOCATION_BLOCK (e->goto_locus);
10185 gt_ggc_mx (e->src);
10186 gt_ggc_mx (e->dest);
10187 if (current_ir_type () == IR_GIMPLE)
10188 gt_ggc_mx (e->insns.g);
10189 else
10190 gt_ggc_mx (e->insns.r);
10191 gt_ggc_mx (block);
10194 /* PCH support for edge_def. */
10196 extern void gt_pch_nx (tree&);
10197 extern void gt_pch_nx (gimple *&);
10198 extern void gt_pch_nx (rtx&);
10199 extern void gt_pch_nx (basic_block&);
10201 static void
10202 gt_pch_nx (rtx_insn *& x)
10204 if (x)
10205 gt_pch_nx_rtx_def ((void *) x);
10208 void
10209 gt_pch_nx (edge_def *e)
10211 tree block = LOCATION_BLOCK (e->goto_locus);
10212 gt_pch_nx (e->src);
10213 gt_pch_nx (e->dest);
10214 if (current_ir_type () == IR_GIMPLE)
10215 gt_pch_nx (e->insns.g);
10216 else
10217 gt_pch_nx (e->insns.r);
10218 gt_pch_nx (block);
10221 void
10222 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
10224 tree block = LOCATION_BLOCK (e->goto_locus);
10225 op (&(e->src), NULL, cookie);
10226 op (&(e->dest), NULL, cookie);
10227 if (current_ir_type () == IR_GIMPLE)
10228 op (&(e->insns.g), NULL, cookie);
10229 else
10230 op (&(e->insns.r), NULL, cookie);
10231 op (&(block), &(block), cookie);
10234 #if CHECKING_P
10236 namespace selftest {
10238 /* Helper function for CFG selftests: create a dummy function decl
10239 and push it as cfun. */
10241 static tree
10242 push_fndecl (const char *name)
10244 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
10245 /* FIXME: this uses input_location: */
10246 tree fndecl = build_fn_decl (name, fn_type);
10247 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
10248 NULL_TREE, integer_type_node);
10249 DECL_RESULT (fndecl) = retval;
10250 push_struct_function (fndecl);
10251 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10252 ASSERT_TRUE (fun != NULL);
10253 init_empty_tree_cfg_for_function (fun);
10254 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10255 ASSERT_EQ (0, n_edges_for_fn (fun));
10256 return fndecl;
10259 /* These tests directly create CFGs.
10260 Compare with the static fns within tree-cfg.cc:
10261 - build_gimple_cfg
10262 - make_blocks: calls create_basic_block (seq, bb);
10263 - make_edges. */
10265 /* Verify a simple cfg of the form:
10266 ENTRY -> A -> B -> C -> EXIT. */
10268 static void
10269 test_linear_chain ()
10271 gimple_register_cfg_hooks ();
10273 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10274 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10276 /* Create some empty blocks. */
10277 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10278 basic_block bb_b = create_empty_bb (bb_a);
10279 basic_block bb_c = create_empty_bb (bb_b);
10281 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10282 ASSERT_EQ (0, n_edges_for_fn (fun));
10284 /* Create some edges: a simple linear chain of BBs. */
10285 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10286 make_edge (bb_a, bb_b, 0);
10287 make_edge (bb_b, bb_c, 0);
10288 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10290 /* Verify the edges. */
10291 ASSERT_EQ (4, n_edges_for_fn (fun));
10292 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10293 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10294 ASSERT_EQ (1, bb_a->preds->length ());
10295 ASSERT_EQ (1, bb_a->succs->length ());
10296 ASSERT_EQ (1, bb_b->preds->length ());
10297 ASSERT_EQ (1, bb_b->succs->length ());
10298 ASSERT_EQ (1, bb_c->preds->length ());
10299 ASSERT_EQ (1, bb_c->succs->length ());
10300 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10301 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10303 /* Verify the dominance information
10304 Each BB in our simple chain should be dominated by the one before
10305 it. */
10306 calculate_dominance_info (CDI_DOMINATORS);
10307 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10308 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10309 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10310 ASSERT_EQ (1, dom_by_b.length ());
10311 ASSERT_EQ (bb_c, dom_by_b[0]);
10312 free_dominance_info (CDI_DOMINATORS);
10314 /* Similarly for post-dominance: each BB in our chain is post-dominated
10315 by the one after it. */
10316 calculate_dominance_info (CDI_POST_DOMINATORS);
10317 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10318 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10319 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10320 ASSERT_EQ (1, postdom_by_b.length ());
10321 ASSERT_EQ (bb_a, postdom_by_b[0]);
10322 free_dominance_info (CDI_POST_DOMINATORS);
10324 pop_cfun ();
10327 /* Verify a simple CFG of the form:
10328 ENTRY
10332 /t \f
10338 EXIT. */
10340 static void
10341 test_diamond ()
10343 gimple_register_cfg_hooks ();
10345 tree fndecl = push_fndecl ("cfg_test_diamond");
10346 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10348 /* Create some empty blocks. */
10349 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10350 basic_block bb_b = create_empty_bb (bb_a);
10351 basic_block bb_c = create_empty_bb (bb_a);
10352 basic_block bb_d = create_empty_bb (bb_b);
10354 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10355 ASSERT_EQ (0, n_edges_for_fn (fun));
10357 /* Create the edges. */
10358 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10359 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10360 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10361 make_edge (bb_b, bb_d, 0);
10362 make_edge (bb_c, bb_d, 0);
10363 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10365 /* Verify the edges. */
10366 ASSERT_EQ (6, n_edges_for_fn (fun));
10367 ASSERT_EQ (1, bb_a->preds->length ());
10368 ASSERT_EQ (2, bb_a->succs->length ());
10369 ASSERT_EQ (1, bb_b->preds->length ());
10370 ASSERT_EQ (1, bb_b->succs->length ());
10371 ASSERT_EQ (1, bb_c->preds->length ());
10372 ASSERT_EQ (1, bb_c->succs->length ());
10373 ASSERT_EQ (2, bb_d->preds->length ());
10374 ASSERT_EQ (1, bb_d->succs->length ());
10376 /* Verify the dominance information. */
10377 calculate_dominance_info (CDI_DOMINATORS);
10378 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10379 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10380 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10381 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10382 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10383 dom_by_a.release ();
10384 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10385 ASSERT_EQ (0, dom_by_b.length ());
10386 dom_by_b.release ();
10387 free_dominance_info (CDI_DOMINATORS);
10389 /* Similarly for post-dominance. */
10390 calculate_dominance_info (CDI_POST_DOMINATORS);
10391 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10392 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10393 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10394 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10395 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10396 postdom_by_d.release ();
10397 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10398 ASSERT_EQ (0, postdom_by_b.length ());
10399 postdom_by_b.release ();
10400 free_dominance_info (CDI_POST_DOMINATORS);
10402 pop_cfun ();
10405 /* Verify that we can handle a CFG containing a "complete" aka
10406 fully-connected subgraph (where A B C D below all have edges
10407 pointing to each other node, also to themselves).
10408 e.g.:
10409 ENTRY EXIT
10415 A<--->B
10416 ^^ ^^
10417 | \ / |
10418 | X |
10419 | / \ |
10420 VV VV
10421 C<--->D
10424 static void
10425 test_fully_connected ()
10427 gimple_register_cfg_hooks ();
10429 tree fndecl = push_fndecl ("cfg_fully_connected");
10430 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10432 const int n = 4;
10434 /* Create some empty blocks. */
10435 auto_vec <basic_block> subgraph_nodes;
10436 for (int i = 0; i < n; i++)
10437 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10439 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10440 ASSERT_EQ (0, n_edges_for_fn (fun));
10442 /* Create the edges. */
10443 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10444 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10445 for (int i = 0; i < n; i++)
10446 for (int j = 0; j < n; j++)
10447 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10449 /* Verify the edges. */
10450 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10451 /* The first one is linked to ENTRY/EXIT as well as itself and
10452 everything else. */
10453 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10454 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10455 /* The other ones in the subgraph are linked to everything in
10456 the subgraph (including themselves). */
10457 for (int i = 1; i < n; i++)
10459 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10460 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10463 /* Verify the dominance information. */
10464 calculate_dominance_info (CDI_DOMINATORS);
10465 /* The initial block in the subgraph should be dominated by ENTRY. */
10466 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10467 get_immediate_dominator (CDI_DOMINATORS,
10468 subgraph_nodes[0]));
10469 /* Every other block in the subgraph should be dominated by the
10470 initial block. */
10471 for (int i = 1; i < n; i++)
10472 ASSERT_EQ (subgraph_nodes[0],
10473 get_immediate_dominator (CDI_DOMINATORS,
10474 subgraph_nodes[i]));
10475 free_dominance_info (CDI_DOMINATORS);
10477 /* Similarly for post-dominance. */
10478 calculate_dominance_info (CDI_POST_DOMINATORS);
10479 /* The initial block in the subgraph should be postdominated by EXIT. */
10480 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10481 get_immediate_dominator (CDI_POST_DOMINATORS,
10482 subgraph_nodes[0]));
10483 /* Every other block in the subgraph should be postdominated by the
10484 initial block, since that leads to EXIT. */
10485 for (int i = 1; i < n; i++)
10486 ASSERT_EQ (subgraph_nodes[0],
10487 get_immediate_dominator (CDI_POST_DOMINATORS,
10488 subgraph_nodes[i]));
10489 free_dominance_info (CDI_POST_DOMINATORS);
10491 pop_cfun ();
10494 /* Run all of the selftests within this file. */
10496 void
10497 tree_cfg_cc_tests ()
10499 test_linear_chain ();
10500 test_diamond ();
10501 test_fully_connected ();
10504 } // namespace selftest
10506 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10507 - loop
10508 - nested loops
10509 - switch statement (a block with many out-edges)
10510 - something that jumps to itself
10511 - etc */
10513 #endif /* CHECKING_P */