2018-05-16 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-cfg.c
blob7f48d2dbe6587048fc31efaf0f1a446999c68a76
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity = 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map<edge, tree> *edge_to_cases;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs;
95 /* CFG statistics. */
96 struct cfg_stats_d
98 long num_merged_labels;
101 static struct cfg_stats_d cfg_stats;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
113 location_t locus;
114 int discriminator;
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
129 inline hashval_t
130 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 return LOCATION_LINE (item->locus);
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
138 inline bool
139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
142 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq);
150 /* Edges. */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (const gcond *, tree);
174 static edge find_taken_edge_switch_expr (const gswitch *, tree);
175 static tree find_case_label_for_value (const gswitch *, tree);
176 static void lower_phi_internal_fn ();
178 void
179 init_empty_tree_cfg_for_function (struct function *fn)
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 void
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun);
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
217 static void
218 build_gimple_cfg (gimple_seq seq)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225 init_empty_tree_cfg ();
227 make_blocks (seq);
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 struct loop *loop;
318 basic_block bb;
319 gimple_stmt_iterator gsi;
320 gimple *stmt;
322 FOR_EACH_LOOP (loop, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop->header, loop);
327 /* Then look into the latch, if any. */
328 if (loop->latch)
329 replace_loop_annotate_in_block (loop->latch, loop);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 case annot_expr_parallel_kind:
351 break;
352 default:
353 gcc_unreachable ();
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
364 /* Lower internal PHI function from GIMPLE FE. */
366 static void
367 lower_phi_internal_fn ()
369 basic_block bb, pred = NULL;
370 gimple_stmt_iterator gsi;
371 tree lhs;
372 gphi *phi_node;
373 gimple *stmt;
375 /* After edge creation, handle __PHI function from GIMPLE FE. */
376 FOR_EACH_BB_FN (bb, cfun)
378 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
380 stmt = gsi_stmt (gsi);
381 if (! gimple_call_internal_p (stmt, IFN_PHI))
382 break;
384 lhs = gimple_call_lhs (stmt);
385 phi_node = create_phi_node (lhs, bb);
387 /* Add arguments to the PHI node. */
388 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
390 tree arg = gimple_call_arg (stmt, i);
391 if (TREE_CODE (arg) == LABEL_DECL)
392 pred = label_to_block (arg);
393 else
395 edge e = find_edge (pred, bb);
396 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
400 gsi_remove (&gsi, true);
405 static unsigned int
406 execute_build_cfg (void)
408 gimple_seq body = gimple_body (current_function_decl);
410 build_gimple_cfg (body);
411 gimple_set_body (current_function_decl, NULL);
412 if (dump_file && (dump_flags & TDF_DETAILS))
414 fprintf (dump_file, "Scope blocks:\n");
415 dump_scope_blocks (dump_file, dump_flags);
417 cleanup_tree_cfg ();
418 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
419 replace_loop_annotate ();
420 return 0;
423 namespace {
425 const pass_data pass_data_build_cfg =
427 GIMPLE_PASS, /* type */
428 "cfg", /* name */
429 OPTGROUP_NONE, /* optinfo_flags */
430 TV_TREE_CFG, /* tv_id */
431 PROP_gimple_leh, /* properties_required */
432 ( PROP_cfg | PROP_loops ), /* properties_provided */
433 0, /* properties_destroyed */
434 0, /* todo_flags_start */
435 0, /* todo_flags_finish */
438 class pass_build_cfg : public gimple_opt_pass
440 public:
441 pass_build_cfg (gcc::context *ctxt)
442 : gimple_opt_pass (pass_data_build_cfg, ctxt)
445 /* opt_pass methods: */
446 virtual unsigned int execute (function *) { return execute_build_cfg (); }
448 }; // class pass_build_cfg
450 } // anon namespace
452 gimple_opt_pass *
453 make_pass_build_cfg (gcc::context *ctxt)
455 return new pass_build_cfg (ctxt);
459 /* Return true if T is a computed goto. */
461 bool
462 computed_goto_p (gimple *t)
464 return (gimple_code (t) == GIMPLE_GOTO
465 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
468 /* Returns true if the sequence of statements STMTS only contains
469 a call to __builtin_unreachable (). */
471 bool
472 gimple_seq_unreachable_p (gimple_seq stmts)
474 if (stmts == NULL
475 /* Return false if -fsanitize=unreachable, we don't want to
476 optimize away those calls, but rather turn them into
477 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
478 later. */
479 || sanitize_flags_p (SANITIZE_UNREACHABLE))
480 return false;
482 gimple_stmt_iterator gsi = gsi_last (stmts);
484 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
485 return false;
487 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
489 gimple *stmt = gsi_stmt (gsi);
490 if (gimple_code (stmt) != GIMPLE_LABEL
491 && !is_gimple_debug (stmt)
492 && !gimple_clobber_p (stmt))
493 return false;
495 return true;
498 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
499 the other edge points to a bb with just __builtin_unreachable ().
500 I.e. return true for C->M edge in:
501 <bb C>:
503 if (something)
504 goto <bb N>;
505 else
506 goto <bb M>;
507 <bb N>:
508 __builtin_unreachable ();
509 <bb M>: */
511 bool
512 assert_unreachable_fallthru_edge_p (edge e)
514 basic_block pred_bb = e->src;
515 gimple *last = last_stmt (pred_bb);
516 if (last && gimple_code (last) == GIMPLE_COND)
518 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
519 if (other_bb == e->dest)
520 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
521 if (EDGE_COUNT (other_bb->succs) == 0)
522 return gimple_seq_unreachable_p (bb_seq (other_bb));
524 return false;
528 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
529 could alter control flow except via eh. We initialize the flag at
530 CFG build time and only ever clear it later. */
532 static void
533 gimple_call_initialize_ctrl_altering (gimple *stmt)
535 int flags = gimple_call_flags (stmt);
537 /* A call alters control flow if it can make an abnormal goto. */
538 if (call_can_make_abnormal_goto (stmt)
539 /* A call also alters control flow if it does not return. */
540 || flags & ECF_NORETURN
541 /* TM ending statements have backedges out of the transaction.
542 Return true so we split the basic block containing them.
543 Note that the TM_BUILTIN test is merely an optimization. */
544 || ((flags & ECF_TM_BUILTIN)
545 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
546 /* BUILT_IN_RETURN call is same as return statement. */
547 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
548 /* IFN_UNIQUE should be the last insn, to make checking for it
549 as cheap as possible. */
550 || (gimple_call_internal_p (stmt)
551 && gimple_call_internal_unique_p (stmt)))
552 gimple_call_set_ctrl_altering (stmt, true);
553 else
554 gimple_call_set_ctrl_altering (stmt, false);
558 /* Insert SEQ after BB and build a flowgraph. */
560 static basic_block
561 make_blocks_1 (gimple_seq seq, basic_block bb)
563 gimple_stmt_iterator i = gsi_start (seq);
564 gimple *stmt = NULL;
565 gimple *prev_stmt = NULL;
566 bool start_new_block = true;
567 bool first_stmt_of_seq = true;
569 while (!gsi_end_p (i))
571 /* PREV_STMT should only be set to a debug stmt if the debug
572 stmt is before nondebug stmts. Once stmt reaches a nondebug
573 nonlabel, prev_stmt will be set to it, so that
574 stmt_starts_bb_p will know to start a new block if a label is
575 found. However, if stmt was a label after debug stmts only,
576 keep the label in prev_stmt even if we find further debug
577 stmts, for there may be other labels after them, and they
578 should land in the same block. */
579 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
580 prev_stmt = stmt;
581 stmt = gsi_stmt (i);
583 if (stmt && is_gimple_call (stmt))
584 gimple_call_initialize_ctrl_altering (stmt);
586 /* If the statement starts a new basic block or if we have determined
587 in a previous pass that we need to create a new block for STMT, do
588 so now. */
589 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
591 if (!first_stmt_of_seq)
592 gsi_split_seq_before (&i, &seq);
593 bb = create_basic_block (seq, bb);
594 start_new_block = false;
595 prev_stmt = NULL;
598 /* Now add STMT to BB and create the subgraphs for special statement
599 codes. */
600 gimple_set_bb (stmt, bb);
602 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
603 next iteration. */
604 if (stmt_ends_bb_p (stmt))
606 /* If the stmt can make abnormal goto use a new temporary
607 for the assignment to the LHS. This makes sure the old value
608 of the LHS is available on the abnormal edge. Otherwise
609 we will end up with overlapping life-ranges for abnormal
610 SSA names. */
611 if (gimple_has_lhs (stmt)
612 && stmt_can_make_abnormal_goto (stmt)
613 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
615 tree lhs = gimple_get_lhs (stmt);
616 tree tmp = create_tmp_var (TREE_TYPE (lhs));
617 gimple *s = gimple_build_assign (lhs, tmp);
618 gimple_set_location (s, gimple_location (stmt));
619 gimple_set_block (s, gimple_block (stmt));
620 gimple_set_lhs (stmt, tmp);
621 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
622 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
623 DECL_GIMPLE_REG_P (tmp) = 1;
624 gsi_insert_after (&i, s, GSI_SAME_STMT);
626 start_new_block = true;
629 gsi_next (&i);
630 first_stmt_of_seq = false;
632 return bb;
635 /* Build a flowgraph for the sequence of stmts SEQ. */
637 static void
638 make_blocks (gimple_seq seq)
640 /* Look for debug markers right before labels, and move the debug
641 stmts after the labels. Accepting labels among debug markers
642 adds no value, just complexity; if we wanted to annotate labels
643 with view numbers (so sequencing among markers would matter) or
644 somesuch, we're probably better off still moving the labels, but
645 adding other debug annotations in their original positions or
646 emitting nonbind or bind markers associated with the labels in
647 the original position of the labels.
649 Moving labels would probably be simpler, but we can't do that:
650 moving labels assigns label ids to them, and doing so because of
651 debug markers makes for -fcompare-debug and possibly even codegen
652 differences. So, we have to move the debug stmts instead. To
653 that end, we scan SEQ backwards, marking the position of the
654 latest (earliest we find) label, and moving debug stmts that are
655 not separated from it by nondebug nonlabel stmts after the
656 label. */
657 if (MAY_HAVE_DEBUG_MARKER_STMTS)
659 gimple_stmt_iterator label = gsi_none ();
661 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
663 gimple *stmt = gsi_stmt (i);
665 /* If this is the first label we encounter (latest in SEQ)
666 before nondebug stmts, record its position. */
667 if (is_a <glabel *> (stmt))
669 if (gsi_end_p (label))
670 label = i;
671 continue;
674 /* Without a recorded label position to move debug stmts to,
675 there's nothing to do. */
676 if (gsi_end_p (label))
677 continue;
679 /* Move the debug stmt at I after LABEL. */
680 if (is_gimple_debug (stmt))
682 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
683 /* As STMT is removed, I advances to the stmt after
684 STMT, so the gsi_prev in the for "increment"
685 expression gets us to the stmt we're to visit after
686 STMT. LABEL, however, would advance to the moved
687 stmt if we passed it to gsi_move_after, so pass it a
688 copy instead, so as to keep LABEL pointing to the
689 LABEL. */
690 gimple_stmt_iterator copy = label;
691 gsi_move_after (&i, &copy);
692 continue;
695 /* There aren't any (more?) debug stmts before label, so
696 there isn't anything else to move after it. */
697 label = gsi_none ();
701 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
704 /* Create and return a new empty basic block after bb AFTER. */
706 static basic_block
707 create_bb (void *h, void *e, basic_block after)
709 basic_block bb;
711 gcc_assert (!e);
713 /* Create and initialize a new basic block. Since alloc_block uses
714 GC allocation that clears memory to allocate a basic block, we do
715 not have to clear the newly allocated basic block here. */
716 bb = alloc_block ();
718 bb->index = last_basic_block_for_fn (cfun);
719 bb->flags = BB_NEW;
720 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
722 /* Add the new block to the linked list of blocks. */
723 link_block (bb, after);
725 /* Grow the basic block array if needed. */
726 if ((size_t) last_basic_block_for_fn (cfun)
727 == basic_block_info_for_fn (cfun)->length ())
729 size_t new_size =
730 (last_basic_block_for_fn (cfun)
731 + (last_basic_block_for_fn (cfun) + 3) / 4);
732 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
735 /* Add the newly created block to the array. */
736 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
738 n_basic_blocks_for_fn (cfun)++;
739 last_basic_block_for_fn (cfun)++;
741 return bb;
745 /*---------------------------------------------------------------------------
746 Edge creation
747 ---------------------------------------------------------------------------*/
749 /* If basic block BB has an abnormal edge to a basic block
750 containing IFN_ABNORMAL_DISPATCHER internal call, return
751 that the dispatcher's basic block, otherwise return NULL. */
753 basic_block
754 get_abnormal_succ_dispatcher (basic_block bb)
756 edge e;
757 edge_iterator ei;
759 FOR_EACH_EDGE (e, ei, bb->succs)
760 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
762 gimple_stmt_iterator gsi
763 = gsi_start_nondebug_after_labels_bb (e->dest);
764 gimple *g = gsi_stmt (gsi);
765 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
766 return e->dest;
768 return NULL;
771 /* Helper function for make_edges. Create a basic block with
772 with ABNORMAL_DISPATCHER internal call in it if needed, and
773 create abnormal edges from BBS to it and from it to FOR_BB
774 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
776 static void
777 handle_abnormal_edges (basic_block *dispatcher_bbs,
778 basic_block for_bb, int *bb_to_omp_idx,
779 auto_vec<basic_block> *bbs, bool computed_goto)
781 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
782 unsigned int idx = 0;
783 basic_block bb;
784 bool inner = false;
786 if (bb_to_omp_idx)
788 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
789 if (bb_to_omp_idx[for_bb->index] != 0)
790 inner = true;
793 /* If the dispatcher has been created already, then there are basic
794 blocks with abnormal edges to it, so just make a new edge to
795 for_bb. */
796 if (*dispatcher == NULL)
798 /* Check if there are any basic blocks that need to have
799 abnormal edges to this dispatcher. If there are none, return
800 early. */
801 if (bb_to_omp_idx == NULL)
803 if (bbs->is_empty ())
804 return;
806 else
808 FOR_EACH_VEC_ELT (*bbs, idx, bb)
809 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
810 break;
811 if (bb == NULL)
812 return;
815 /* Create the dispatcher bb. */
816 *dispatcher = create_basic_block (NULL, for_bb);
817 if (computed_goto)
819 /* Factor computed gotos into a common computed goto site. Also
820 record the location of that site so that we can un-factor the
821 gotos after we have converted back to normal form. */
822 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
824 /* Create the destination of the factored goto. Each original
825 computed goto will put its desired destination into this
826 variable and jump to the label we create immediately below. */
827 tree var = create_tmp_var (ptr_type_node, "gotovar");
829 /* Build a label for the new block which will contain the
830 factored computed goto. */
831 tree factored_label_decl
832 = create_artificial_label (UNKNOWN_LOCATION);
833 gimple *factored_computed_goto_label
834 = gimple_build_label (factored_label_decl);
835 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
837 /* Build our new computed goto. */
838 gimple *factored_computed_goto = gimple_build_goto (var);
839 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
841 FOR_EACH_VEC_ELT (*bbs, idx, bb)
843 if (bb_to_omp_idx
844 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
845 continue;
847 gsi = gsi_last_bb (bb);
848 gimple *last = gsi_stmt (gsi);
850 gcc_assert (computed_goto_p (last));
852 /* Copy the original computed goto's destination into VAR. */
853 gimple *assignment
854 = gimple_build_assign (var, gimple_goto_dest (last));
855 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
857 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
858 e->goto_locus = gimple_location (last);
859 gsi_remove (&gsi, true);
862 else
864 tree arg = inner ? boolean_true_node : boolean_false_node;
865 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
866 1, arg);
867 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
868 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
870 /* Create predecessor edges of the dispatcher. */
871 FOR_EACH_VEC_ELT (*bbs, idx, bb)
873 if (bb_to_omp_idx
874 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
875 continue;
876 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
881 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
884 /* Creates outgoing edges for BB. Returns 1 when it ends with an
885 computed goto, returns 2 when it ends with a statement that
886 might return to this function via an nonlocal goto, otherwise
887 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
889 static int
890 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
892 gimple *last = last_stmt (bb);
893 bool fallthru = false;
894 int ret = 0;
896 if (!last)
897 return ret;
899 switch (gimple_code (last))
901 case GIMPLE_GOTO:
902 if (make_goto_expr_edges (bb))
903 ret = 1;
904 fallthru = false;
905 break;
906 case GIMPLE_RETURN:
908 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
909 e->goto_locus = gimple_location (last);
910 fallthru = false;
912 break;
913 case GIMPLE_COND:
914 make_cond_expr_edges (bb);
915 fallthru = false;
916 break;
917 case GIMPLE_SWITCH:
918 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
919 fallthru = false;
920 break;
921 case GIMPLE_RESX:
922 make_eh_edges (last);
923 fallthru = false;
924 break;
925 case GIMPLE_EH_DISPATCH:
926 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
927 break;
929 case GIMPLE_CALL:
930 /* If this function receives a nonlocal goto, then we need to
931 make edges from this call site to all the nonlocal goto
932 handlers. */
933 if (stmt_can_make_abnormal_goto (last))
934 ret = 2;
936 /* If this statement has reachable exception handlers, then
937 create abnormal edges to them. */
938 make_eh_edges (last);
940 /* BUILTIN_RETURN is really a return statement. */
941 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
943 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
944 fallthru = false;
946 /* Some calls are known not to return. */
947 else
948 fallthru = !gimple_call_noreturn_p (last);
949 break;
951 case GIMPLE_ASSIGN:
952 /* A GIMPLE_ASSIGN may throw internally and thus be considered
953 control-altering. */
954 if (is_ctrl_altering_stmt (last))
955 make_eh_edges (last);
956 fallthru = true;
957 break;
959 case GIMPLE_ASM:
960 make_gimple_asm_edges (bb);
961 fallthru = true;
962 break;
964 CASE_GIMPLE_OMP:
965 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
966 break;
968 case GIMPLE_TRANSACTION:
970 gtransaction *txn = as_a <gtransaction *> (last);
971 tree label1 = gimple_transaction_label_norm (txn);
972 tree label2 = gimple_transaction_label_uninst (txn);
974 if (label1)
975 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
976 if (label2)
977 make_edge (bb, label_to_block (label2),
978 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
980 tree label3 = gimple_transaction_label_over (txn);
981 if (gimple_transaction_subcode (txn)
982 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
983 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
985 fallthru = false;
987 break;
989 default:
990 gcc_assert (!stmt_ends_bb_p (last));
991 fallthru = true;
992 break;
995 if (fallthru)
996 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
998 return ret;
1001 /* Join all the blocks in the flowgraph. */
1003 static void
1004 make_edges (void)
1006 basic_block bb;
1007 struct omp_region *cur_region = NULL;
1008 auto_vec<basic_block> ab_edge_goto;
1009 auto_vec<basic_block> ab_edge_call;
1010 int *bb_to_omp_idx = NULL;
1011 int cur_omp_region_idx = 0;
1013 /* Create an edge from entry to the first block with executable
1014 statements in it. */
1015 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1016 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
1017 EDGE_FALLTHRU);
1019 /* Traverse the basic block array placing edges. */
1020 FOR_EACH_BB_FN (bb, cfun)
1022 int mer;
1024 if (bb_to_omp_idx)
1025 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
1027 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1028 if (mer == 1)
1029 ab_edge_goto.safe_push (bb);
1030 else if (mer == 2)
1031 ab_edge_call.safe_push (bb);
1033 if (cur_region && bb_to_omp_idx == NULL)
1034 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
1037 /* Computed gotos are hell to deal with, especially if there are
1038 lots of them with a large number of destinations. So we factor
1039 them to a common computed goto location before we build the
1040 edge list. After we convert back to normal form, we will un-factor
1041 the computed gotos since factoring introduces an unwanted jump.
1042 For non-local gotos and abnormal edges from calls to calls that return
1043 twice or forced labels, factor the abnormal edges too, by having all
1044 abnormal edges from the calls go to a common artificial basic block
1045 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1046 basic block to all forced labels and calls returning twice.
1047 We do this per-OpenMP structured block, because those regions
1048 are guaranteed to be single entry single exit by the standard,
1049 so it is not allowed to enter or exit such regions abnormally this way,
1050 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1051 must not transfer control across SESE region boundaries. */
1052 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1054 gimple_stmt_iterator gsi;
1055 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1056 basic_block *dispatcher_bbs = dispatcher_bb_array;
1057 int count = n_basic_blocks_for_fn (cfun);
1059 if (bb_to_omp_idx)
1060 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1062 FOR_EACH_BB_FN (bb, cfun)
1064 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1066 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1067 tree target;
1069 if (!label_stmt)
1070 break;
1072 target = gimple_label_label (label_stmt);
1074 /* Make an edge to every label block that has been marked as a
1075 potential target for a computed goto or a non-local goto. */
1076 if (FORCED_LABEL (target))
1077 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1078 &ab_edge_goto, true);
1079 if (DECL_NONLOCAL (target))
1081 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1082 &ab_edge_call, false);
1083 break;
1087 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1088 gsi_next_nondebug (&gsi);
1089 if (!gsi_end_p (gsi))
1091 /* Make an edge to every setjmp-like call. */
1092 gimple *call_stmt = gsi_stmt (gsi);
1093 if (is_gimple_call (call_stmt)
1094 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1095 || gimple_call_builtin_p (call_stmt,
1096 BUILT_IN_SETJMP_RECEIVER)))
1097 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1098 &ab_edge_call, false);
1102 if (bb_to_omp_idx)
1103 XDELETE (dispatcher_bbs);
1106 XDELETE (bb_to_omp_idx);
1108 omp_free_regions ();
1111 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1112 needed. Returns true if new bbs were created.
1113 Note: This is transitional code, and should not be used for new code. We
1114 should be able to get rid of this by rewriting all target va-arg
1115 gimplification hooks to use an interface gimple_build_cond_value as described
1116 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1118 bool
1119 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1121 gimple *stmt = gsi_stmt (*gsi);
1122 basic_block bb = gimple_bb (stmt);
1123 basic_block lastbb, afterbb;
1124 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1125 edge e;
1126 lastbb = make_blocks_1 (seq, bb);
1127 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1128 return false;
1129 e = split_block (bb, stmt);
1130 /* Move e->dest to come after the new basic blocks. */
1131 afterbb = e->dest;
1132 unlink_block (afterbb);
1133 link_block (afterbb, lastbb);
1134 redirect_edge_succ (e, bb->next_bb);
1135 bb = bb->next_bb;
1136 while (bb != afterbb)
1138 struct omp_region *cur_region = NULL;
1139 profile_count cnt = profile_count::zero ();
1140 bool all = true;
1142 int cur_omp_region_idx = 0;
1143 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1144 gcc_assert (!mer && !cur_region);
1145 add_bb_to_loop (bb, afterbb->loop_father);
1147 edge e;
1148 edge_iterator ei;
1149 FOR_EACH_EDGE (e, ei, bb->preds)
1151 if (e->count ().initialized_p ())
1152 cnt += e->count ();
1153 else
1154 all = false;
1156 tree_guess_outgoing_edge_probabilities (bb);
1157 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1158 bb->count = cnt;
1160 bb = bb->next_bb;
1162 return true;
1165 /* Find the next available discriminator value for LOCUS. The
1166 discriminator distinguishes among several basic blocks that
1167 share a common locus, allowing for more accurate sample-based
1168 profiling. */
1170 static int
1171 next_discriminator_for_locus (location_t locus)
1173 struct locus_discrim_map item;
1174 struct locus_discrim_map **slot;
1176 item.locus = locus;
1177 item.discriminator = 0;
1178 slot = discriminator_per_locus->find_slot_with_hash (
1179 &item, LOCATION_LINE (locus), INSERT);
1180 gcc_assert (slot);
1181 if (*slot == HTAB_EMPTY_ENTRY)
1183 *slot = XNEW (struct locus_discrim_map);
1184 gcc_assert (*slot);
1185 (*slot)->locus = locus;
1186 (*slot)->discriminator = 0;
1188 (*slot)->discriminator++;
1189 return (*slot)->discriminator;
1192 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1194 static bool
1195 same_line_p (location_t locus1, location_t locus2)
1197 expanded_location from, to;
1199 if (locus1 == locus2)
1200 return true;
1202 from = expand_location (locus1);
1203 to = expand_location (locus2);
1205 if (from.line != to.line)
1206 return false;
1207 if (from.file == to.file)
1208 return true;
1209 return (from.file != NULL
1210 && to.file != NULL
1211 && filename_cmp (from.file, to.file) == 0);
1214 /* Assign discriminators to each basic block. */
1216 static void
1217 assign_discriminators (void)
1219 basic_block bb;
1221 FOR_EACH_BB_FN (bb, cfun)
1223 edge e;
1224 edge_iterator ei;
1225 gimple *last = last_stmt (bb);
1226 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1228 if (locus == UNKNOWN_LOCATION)
1229 continue;
1231 FOR_EACH_EDGE (e, ei, bb->succs)
1233 gimple *first = first_non_label_stmt (e->dest);
1234 gimple *last = last_stmt (e->dest);
1235 if ((first && same_line_p (locus, gimple_location (first)))
1236 || (last && same_line_p (locus, gimple_location (last))))
1238 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1239 bb->discriminator = next_discriminator_for_locus (locus);
1240 else
1241 e->dest->discriminator = next_discriminator_for_locus (locus);
1247 /* Create the edges for a GIMPLE_COND starting at block BB. */
1249 static void
1250 make_cond_expr_edges (basic_block bb)
1252 gcond *entry = as_a <gcond *> (last_stmt (bb));
1253 gimple *then_stmt, *else_stmt;
1254 basic_block then_bb, else_bb;
1255 tree then_label, else_label;
1256 edge e;
1258 gcc_assert (entry);
1259 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1261 /* Entry basic blocks for each component. */
1262 then_label = gimple_cond_true_label (entry);
1263 else_label = gimple_cond_false_label (entry);
1264 then_bb = label_to_block (then_label);
1265 else_bb = label_to_block (else_label);
1266 then_stmt = first_stmt (then_bb);
1267 else_stmt = first_stmt (else_bb);
1269 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1270 e->goto_locus = gimple_location (then_stmt);
1271 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1272 if (e)
1273 e->goto_locus = gimple_location (else_stmt);
1275 /* We do not need the labels anymore. */
1276 gimple_cond_set_true_label (entry, NULL_TREE);
1277 gimple_cond_set_false_label (entry, NULL_TREE);
1281 /* Called for each element in the hash table (P) as we delete the
1282 edge to cases hash table.
1284 Clear all the CASE_CHAINs to prevent problems with copying of
1285 SWITCH_EXPRs and structure sharing rules, then free the hash table
1286 element. */
1288 bool
1289 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1291 tree t, next;
1293 for (t = value; t; t = next)
1295 next = CASE_CHAIN (t);
1296 CASE_CHAIN (t) = NULL;
1299 return true;
1302 /* Start recording information mapping edges to case labels. */
1304 void
1305 start_recording_case_labels (void)
1307 gcc_assert (edge_to_cases == NULL);
1308 edge_to_cases = new hash_map<edge, tree>;
1309 touched_switch_bbs = BITMAP_ALLOC (NULL);
1312 /* Return nonzero if we are recording information for case labels. */
1314 static bool
1315 recording_case_labels_p (void)
1317 return (edge_to_cases != NULL);
1320 /* Stop recording information mapping edges to case labels and
1321 remove any information we have recorded. */
1322 void
1323 end_recording_case_labels (void)
1325 bitmap_iterator bi;
1326 unsigned i;
1327 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1328 delete edge_to_cases;
1329 edge_to_cases = NULL;
1330 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1332 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1333 if (bb)
1335 gimple *stmt = last_stmt (bb);
1336 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1337 group_case_labels_stmt (as_a <gswitch *> (stmt));
1340 BITMAP_FREE (touched_switch_bbs);
1343 /* If we are inside a {start,end}_recording_cases block, then return
1344 a chain of CASE_LABEL_EXPRs from T which reference E.
1346 Otherwise return NULL. */
1348 static tree
1349 get_cases_for_edge (edge e, gswitch *t)
1351 tree *slot;
1352 size_t i, n;
1354 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1355 chains available. Return NULL so the caller can detect this case. */
1356 if (!recording_case_labels_p ())
1357 return NULL;
1359 slot = edge_to_cases->get (e);
1360 if (slot)
1361 return *slot;
1363 /* If we did not find E in the hash table, then this must be the first
1364 time we have been queried for information about E & T. Add all the
1365 elements from T to the hash table then perform the query again. */
1367 n = gimple_switch_num_labels (t);
1368 for (i = 0; i < n; i++)
1370 tree elt = gimple_switch_label (t, i);
1371 tree lab = CASE_LABEL (elt);
1372 basic_block label_bb = label_to_block (lab);
1373 edge this_edge = find_edge (e->src, label_bb);
1375 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1376 a new chain. */
1377 tree &s = edge_to_cases->get_or_insert (this_edge);
1378 CASE_CHAIN (elt) = s;
1379 s = elt;
1382 return *edge_to_cases->get (e);
1385 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1387 static void
1388 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1390 size_t i, n;
1392 n = gimple_switch_num_labels (entry);
1394 for (i = 0; i < n; ++i)
1396 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1397 basic_block label_bb = label_to_block (lab);
1398 make_edge (bb, label_bb, 0);
1403 /* Return the basic block holding label DEST. */
1405 basic_block
1406 label_to_block_fn (struct function *ifun, tree dest)
1408 int uid = LABEL_DECL_UID (dest);
1410 /* We would die hard when faced by an undefined label. Emit a label to
1411 the very first basic block. This will hopefully make even the dataflow
1412 and undefined variable warnings quite right. */
1413 if (seen_error () && uid < 0)
1415 gimple_stmt_iterator gsi =
1416 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1417 gimple *stmt;
1419 stmt = gimple_build_label (dest);
1420 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1421 uid = LABEL_DECL_UID (dest);
1423 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1424 return NULL;
1425 return (*ifun->cfg->x_label_to_block_map)[uid];
1428 /* Create edges for a goto statement at block BB. Returns true
1429 if abnormal edges should be created. */
1431 static bool
1432 make_goto_expr_edges (basic_block bb)
1434 gimple_stmt_iterator last = gsi_last_bb (bb);
1435 gimple *goto_t = gsi_stmt (last);
1437 /* A simple GOTO creates normal edges. */
1438 if (simple_goto_p (goto_t))
1440 tree dest = gimple_goto_dest (goto_t);
1441 basic_block label_bb = label_to_block (dest);
1442 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1443 e->goto_locus = gimple_location (goto_t);
1444 gsi_remove (&last, true);
1445 return false;
1448 /* A computed GOTO creates abnormal edges. */
1449 return true;
1452 /* Create edges for an asm statement with labels at block BB. */
1454 static void
1455 make_gimple_asm_edges (basic_block bb)
1457 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1458 int i, n = gimple_asm_nlabels (stmt);
1460 for (i = 0; i < n; ++i)
1462 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1463 basic_block label_bb = label_to_block (label);
1464 make_edge (bb, label_bb, 0);
1468 /*---------------------------------------------------------------------------
1469 Flowgraph analysis
1470 ---------------------------------------------------------------------------*/
1472 /* Cleanup useless labels in basic blocks. This is something we wish
1473 to do early because it allows us to group case labels before creating
1474 the edges for the CFG, and it speeds up block statement iterators in
1475 all passes later on.
1476 We rerun this pass after CFG is created, to get rid of the labels that
1477 are no longer referenced. After then we do not run it any more, since
1478 (almost) no new labels should be created. */
1480 /* A map from basic block index to the leading label of that block. */
1481 static struct label_record
1483 /* The label. */
1484 tree label;
1486 /* True if the label is referenced from somewhere. */
1487 bool used;
1488 } *label_for_bb;
1490 /* Given LABEL return the first label in the same basic block. */
1492 static tree
1493 main_block_label (tree label)
1495 basic_block bb = label_to_block (label);
1496 tree main_label = label_for_bb[bb->index].label;
1498 /* label_to_block possibly inserted undefined label into the chain. */
1499 if (!main_label)
1501 label_for_bb[bb->index].label = label;
1502 main_label = label;
1505 label_for_bb[bb->index].used = true;
1506 return main_label;
1509 /* Clean up redundant labels within the exception tree. */
1511 static void
1512 cleanup_dead_labels_eh (void)
1514 eh_landing_pad lp;
1515 eh_region r;
1516 tree lab;
1517 int i;
1519 if (cfun->eh == NULL)
1520 return;
1522 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1523 if (lp && lp->post_landing_pad)
1525 lab = main_block_label (lp->post_landing_pad);
1526 if (lab != lp->post_landing_pad)
1528 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1529 EH_LANDING_PAD_NR (lab) = lp->index;
1533 FOR_ALL_EH_REGION (r)
1534 switch (r->type)
1536 case ERT_CLEANUP:
1537 case ERT_MUST_NOT_THROW:
1538 break;
1540 case ERT_TRY:
1542 eh_catch c;
1543 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1545 lab = c->label;
1546 if (lab)
1547 c->label = main_block_label (lab);
1550 break;
1552 case ERT_ALLOWED_EXCEPTIONS:
1553 lab = r->u.allowed.label;
1554 if (lab)
1555 r->u.allowed.label = main_block_label (lab);
1556 break;
1561 /* Cleanup redundant labels. This is a three-step process:
1562 1) Find the leading label for each block.
1563 2) Redirect all references to labels to the leading labels.
1564 3) Cleanup all useless labels. */
1566 void
1567 cleanup_dead_labels (void)
1569 basic_block bb;
1570 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1572 /* Find a suitable label for each block. We use the first user-defined
1573 label if there is one, or otherwise just the first label we see. */
1574 FOR_EACH_BB_FN (bb, cfun)
1576 gimple_stmt_iterator i;
1578 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1580 tree label;
1581 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1583 if (!label_stmt)
1584 break;
1586 label = gimple_label_label (label_stmt);
1588 /* If we have not yet seen a label for the current block,
1589 remember this one and see if there are more labels. */
1590 if (!label_for_bb[bb->index].label)
1592 label_for_bb[bb->index].label = label;
1593 continue;
1596 /* If we did see a label for the current block already, but it
1597 is an artificially created label, replace it if the current
1598 label is a user defined label. */
1599 if (!DECL_ARTIFICIAL (label)
1600 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1602 label_for_bb[bb->index].label = label;
1603 break;
1608 /* Now redirect all jumps/branches to the selected label.
1609 First do so for each block ending in a control statement. */
1610 FOR_EACH_BB_FN (bb, cfun)
1612 gimple *stmt = last_stmt (bb);
1613 tree label, new_label;
1615 if (!stmt)
1616 continue;
1618 switch (gimple_code (stmt))
1620 case GIMPLE_COND:
1622 gcond *cond_stmt = as_a <gcond *> (stmt);
1623 label = gimple_cond_true_label (cond_stmt);
1624 if (label)
1626 new_label = main_block_label (label);
1627 if (new_label != label)
1628 gimple_cond_set_true_label (cond_stmt, new_label);
1631 label = gimple_cond_false_label (cond_stmt);
1632 if (label)
1634 new_label = main_block_label (label);
1635 if (new_label != label)
1636 gimple_cond_set_false_label (cond_stmt, new_label);
1639 break;
1641 case GIMPLE_SWITCH:
1643 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1644 size_t i, n = gimple_switch_num_labels (switch_stmt);
1646 /* Replace all destination labels. */
1647 for (i = 0; i < n; ++i)
1649 tree case_label = gimple_switch_label (switch_stmt, i);
1650 label = CASE_LABEL (case_label);
1651 new_label = main_block_label (label);
1652 if (new_label != label)
1653 CASE_LABEL (case_label) = new_label;
1655 break;
1658 case GIMPLE_ASM:
1660 gasm *asm_stmt = as_a <gasm *> (stmt);
1661 int i, n = gimple_asm_nlabels (asm_stmt);
1663 for (i = 0; i < n; ++i)
1665 tree cons = gimple_asm_label_op (asm_stmt, i);
1666 tree label = main_block_label (TREE_VALUE (cons));
1667 TREE_VALUE (cons) = label;
1669 break;
1672 /* We have to handle gotos until they're removed, and we don't
1673 remove them until after we've created the CFG edges. */
1674 case GIMPLE_GOTO:
1675 if (!computed_goto_p (stmt))
1677 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1678 label = gimple_goto_dest (goto_stmt);
1679 new_label = main_block_label (label);
1680 if (new_label != label)
1681 gimple_goto_set_dest (goto_stmt, new_label);
1683 break;
1685 case GIMPLE_TRANSACTION:
1687 gtransaction *txn = as_a <gtransaction *> (stmt);
1689 label = gimple_transaction_label_norm (txn);
1690 if (label)
1692 new_label = main_block_label (label);
1693 if (new_label != label)
1694 gimple_transaction_set_label_norm (txn, new_label);
1697 label = gimple_transaction_label_uninst (txn);
1698 if (label)
1700 new_label = main_block_label (label);
1701 if (new_label != label)
1702 gimple_transaction_set_label_uninst (txn, new_label);
1705 label = gimple_transaction_label_over (txn);
1706 if (label)
1708 new_label = main_block_label (label);
1709 if (new_label != label)
1710 gimple_transaction_set_label_over (txn, new_label);
1713 break;
1715 default:
1716 break;
1720 /* Do the same for the exception region tree labels. */
1721 cleanup_dead_labels_eh ();
1723 /* Finally, purge dead labels. All user-defined labels and labels that
1724 can be the target of non-local gotos and labels which have their
1725 address taken are preserved. */
1726 FOR_EACH_BB_FN (bb, cfun)
1728 gimple_stmt_iterator i;
1729 tree label_for_this_bb = label_for_bb[bb->index].label;
1731 if (!label_for_this_bb)
1732 continue;
1734 /* If the main label of the block is unused, we may still remove it. */
1735 if (!label_for_bb[bb->index].used)
1736 label_for_this_bb = NULL;
1738 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1740 tree label;
1741 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1743 if (!label_stmt)
1744 break;
1746 label = gimple_label_label (label_stmt);
1748 if (label == label_for_this_bb
1749 || !DECL_ARTIFICIAL (label)
1750 || DECL_NONLOCAL (label)
1751 || FORCED_LABEL (label))
1752 gsi_next (&i);
1753 else
1754 gsi_remove (&i, true);
1758 free (label_for_bb);
1761 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1762 the ones jumping to the same label.
1763 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1765 bool
1766 group_case_labels_stmt (gswitch *stmt)
1768 int old_size = gimple_switch_num_labels (stmt);
1769 int i, next_index, new_size;
1770 basic_block default_bb = NULL;
1772 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1774 /* Look for possible opportunities to merge cases. */
1775 new_size = i = 1;
1776 while (i < old_size)
1778 tree base_case, base_high;
1779 basic_block base_bb;
1781 base_case = gimple_switch_label (stmt, i);
1783 gcc_assert (base_case);
1784 base_bb = label_to_block (CASE_LABEL (base_case));
1786 /* Discard cases that have the same destination as the default case or
1787 whose destiniation blocks have already been removed as unreachable. */
1788 if (base_bb == NULL || base_bb == default_bb)
1790 i++;
1791 continue;
1794 base_high = CASE_HIGH (base_case)
1795 ? CASE_HIGH (base_case)
1796 : CASE_LOW (base_case);
1797 next_index = i + 1;
1799 /* Try to merge case labels. Break out when we reach the end
1800 of the label vector or when we cannot merge the next case
1801 label with the current one. */
1802 while (next_index < old_size)
1804 tree merge_case = gimple_switch_label (stmt, next_index);
1805 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1806 wide_int bhp1 = wi::to_wide (base_high) + 1;
1808 /* Merge the cases if they jump to the same place,
1809 and their ranges are consecutive. */
1810 if (merge_bb == base_bb
1811 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1813 base_high = CASE_HIGH (merge_case) ?
1814 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1815 CASE_HIGH (base_case) = base_high;
1816 next_index++;
1818 else
1819 break;
1822 /* Discard cases that have an unreachable destination block. */
1823 if (EDGE_COUNT (base_bb->succs) == 0
1824 && gimple_seq_unreachable_p (bb_seq (base_bb))
1825 /* Don't optimize this if __builtin_unreachable () is the
1826 implicitly added one by the C++ FE too early, before
1827 -Wreturn-type can be diagnosed. We'll optimize it later
1828 during switchconv pass or any other cfg cleanup. */
1829 && (gimple_in_ssa_p (cfun)
1830 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1831 != BUILTINS_LOCATION)))
1833 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1834 if (base_edge != NULL)
1835 remove_edge_and_dominated_blocks (base_edge);
1836 i = next_index;
1837 continue;
1840 if (new_size < i)
1841 gimple_switch_set_label (stmt, new_size,
1842 gimple_switch_label (stmt, i));
1843 i = next_index;
1844 new_size++;
1847 gcc_assert (new_size <= old_size);
1849 if (new_size < old_size)
1850 gimple_switch_set_num_labels (stmt, new_size);
1852 return new_size < old_size;
1855 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1856 and scan the sorted vector of cases. Combine the ones jumping to the
1857 same label. */
1859 bool
1860 group_case_labels (void)
1862 basic_block bb;
1863 bool changed = false;
1865 FOR_EACH_BB_FN (bb, cfun)
1867 gimple *stmt = last_stmt (bb);
1868 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1869 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1872 return changed;
1875 /* Checks whether we can merge block B into block A. */
1877 static bool
1878 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1880 gimple *stmt;
1882 if (!single_succ_p (a))
1883 return false;
1885 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1886 return false;
1888 if (single_succ (a) != b)
1889 return false;
1891 if (!single_pred_p (b))
1892 return false;
1894 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1895 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1896 return false;
1898 /* If A ends by a statement causing exceptions or something similar, we
1899 cannot merge the blocks. */
1900 stmt = last_stmt (a);
1901 if (stmt && stmt_ends_bb_p (stmt))
1902 return false;
1904 /* Do not allow a block with only a non-local label to be merged. */
1905 if (stmt)
1906 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1907 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1908 return false;
1910 /* Examine the labels at the beginning of B. */
1911 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1912 gsi_next (&gsi))
1914 tree lab;
1915 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1916 if (!label_stmt)
1917 break;
1918 lab = gimple_label_label (label_stmt);
1920 /* Do not remove user forced labels or for -O0 any user labels. */
1921 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1922 return false;
1925 /* Protect simple loop latches. We only want to avoid merging
1926 the latch with the loop header or with a block in another
1927 loop in this case. */
1928 if (current_loops
1929 && b->loop_father->latch == b
1930 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1931 && (b->loop_father->header == a
1932 || b->loop_father != a->loop_father))
1933 return false;
1935 /* It must be possible to eliminate all phi nodes in B. If ssa form
1936 is not up-to-date and a name-mapping is registered, we cannot eliminate
1937 any phis. Symbols marked for renaming are never a problem though. */
1938 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1939 gsi_next (&gsi))
1941 gphi *phi = gsi.phi ();
1942 /* Technically only new names matter. */
1943 if (name_registered_for_update_p (PHI_RESULT (phi)))
1944 return false;
1947 /* When not optimizing, don't merge if we'd lose goto_locus. */
1948 if (!optimize
1949 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1951 location_t goto_locus = single_succ_edge (a)->goto_locus;
1952 gimple_stmt_iterator prev, next;
1953 prev = gsi_last_nondebug_bb (a);
1954 next = gsi_after_labels (b);
1955 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1956 gsi_next_nondebug (&next);
1957 if ((gsi_end_p (prev)
1958 || gimple_location (gsi_stmt (prev)) != goto_locus)
1959 && (gsi_end_p (next)
1960 || gimple_location (gsi_stmt (next)) != goto_locus))
1961 return false;
1964 return true;
1967 /* Replaces all uses of NAME by VAL. */
1969 void
1970 replace_uses_by (tree name, tree val)
1972 imm_use_iterator imm_iter;
1973 use_operand_p use;
1974 gimple *stmt;
1975 edge e;
1977 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1979 /* Mark the block if we change the last stmt in it. */
1980 if (cfgcleanup_altered_bbs
1981 && stmt_ends_bb_p (stmt))
1982 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1984 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1986 replace_exp (use, val);
1988 if (gimple_code (stmt) == GIMPLE_PHI)
1990 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1991 PHI_ARG_INDEX_FROM_USE (use));
1992 if (e->flags & EDGE_ABNORMAL
1993 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1995 /* This can only occur for virtual operands, since
1996 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1997 would prevent replacement. */
1998 gcc_checking_assert (virtual_operand_p (name));
1999 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2004 if (gimple_code (stmt) != GIMPLE_PHI)
2006 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2007 gimple *orig_stmt = stmt;
2008 size_t i;
2010 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2011 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2012 only change sth from non-invariant to invariant, and only
2013 when propagating constants. */
2014 if (is_gimple_min_invariant (val))
2015 for (i = 0; i < gimple_num_ops (stmt); i++)
2017 tree op = gimple_op (stmt, i);
2018 /* Operands may be empty here. For example, the labels
2019 of a GIMPLE_COND are nulled out following the creation
2020 of the corresponding CFG edges. */
2021 if (op && TREE_CODE (op) == ADDR_EXPR)
2022 recompute_tree_invariant_for_addr_expr (op);
2025 if (fold_stmt (&gsi))
2026 stmt = gsi_stmt (gsi);
2028 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2029 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2031 update_stmt (stmt);
2035 gcc_checking_assert (has_zero_uses (name));
2037 /* Also update the trees stored in loop structures. */
2038 if (current_loops)
2040 struct loop *loop;
2042 FOR_EACH_LOOP (loop, 0)
2044 substitute_in_loop_info (loop, name, val);
2049 /* Merge block B into block A. */
2051 static void
2052 gimple_merge_blocks (basic_block a, basic_block b)
2054 gimple_stmt_iterator last, gsi;
2055 gphi_iterator psi;
2057 if (dump_file)
2058 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2060 /* Remove all single-valued PHI nodes from block B of the form
2061 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2062 gsi = gsi_last_bb (a);
2063 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2065 gimple *phi = gsi_stmt (psi);
2066 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2067 gimple *copy;
2068 bool may_replace_uses = (virtual_operand_p (def)
2069 || may_propagate_copy (def, use));
2071 /* In case we maintain loop closed ssa form, do not propagate arguments
2072 of loop exit phi nodes. */
2073 if (current_loops
2074 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2075 && !virtual_operand_p (def)
2076 && TREE_CODE (use) == SSA_NAME
2077 && a->loop_father != b->loop_father)
2078 may_replace_uses = false;
2080 if (!may_replace_uses)
2082 gcc_assert (!virtual_operand_p (def));
2084 /* Note that just emitting the copies is fine -- there is no problem
2085 with ordering of phi nodes. This is because A is the single
2086 predecessor of B, therefore results of the phi nodes cannot
2087 appear as arguments of the phi nodes. */
2088 copy = gimple_build_assign (def, use);
2089 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2090 remove_phi_node (&psi, false);
2092 else
2094 /* If we deal with a PHI for virtual operands, we can simply
2095 propagate these without fussing with folding or updating
2096 the stmt. */
2097 if (virtual_operand_p (def))
2099 imm_use_iterator iter;
2100 use_operand_p use_p;
2101 gimple *stmt;
2103 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2104 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2105 SET_USE (use_p, use);
2107 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2108 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2110 else
2111 replace_uses_by (def, use);
2113 remove_phi_node (&psi, true);
2117 /* Ensure that B follows A. */
2118 move_block_after (b, a);
2120 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2121 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2123 /* Remove labels from B and set gimple_bb to A for other statements. */
2124 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2126 gimple *stmt = gsi_stmt (gsi);
2127 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2129 tree label = gimple_label_label (label_stmt);
2130 int lp_nr;
2132 gsi_remove (&gsi, false);
2134 /* Now that we can thread computed gotos, we might have
2135 a situation where we have a forced label in block B
2136 However, the label at the start of block B might still be
2137 used in other ways (think about the runtime checking for
2138 Fortran assigned gotos). So we can not just delete the
2139 label. Instead we move the label to the start of block A. */
2140 if (FORCED_LABEL (label))
2142 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2143 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2145 /* Other user labels keep around in a form of a debug stmt. */
2146 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2148 gimple *dbg = gimple_build_debug_bind (label,
2149 integer_zero_node,
2150 stmt);
2151 gimple_debug_bind_reset_value (dbg);
2152 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2155 lp_nr = EH_LANDING_PAD_NR (label);
2156 if (lp_nr)
2158 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2159 lp->post_landing_pad = NULL;
2162 else
2164 gimple_set_bb (stmt, a);
2165 gsi_next (&gsi);
2169 /* When merging two BBs, if their counts are different, the larger count
2170 is selected as the new bb count. This is to handle inconsistent
2171 profiles. */
2172 if (a->loop_father == b->loop_father)
2174 a->count = a->count.merge (b->count);
2177 /* Merge the sequences. */
2178 last = gsi_last_bb (a);
2179 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2180 set_bb_seq (b, NULL);
2182 if (cfgcleanup_altered_bbs)
2183 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2187 /* Return the one of two successors of BB that is not reachable by a
2188 complex edge, if there is one. Else, return BB. We use
2189 this in optimizations that use post-dominators for their heuristics,
2190 to catch the cases in C++ where function calls are involved. */
2192 basic_block
2193 single_noncomplex_succ (basic_block bb)
2195 edge e0, e1;
2196 if (EDGE_COUNT (bb->succs) != 2)
2197 return bb;
2199 e0 = EDGE_SUCC (bb, 0);
2200 e1 = EDGE_SUCC (bb, 1);
2201 if (e0->flags & EDGE_COMPLEX)
2202 return e1->dest;
2203 if (e1->flags & EDGE_COMPLEX)
2204 return e0->dest;
2206 return bb;
2209 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2211 void
2212 notice_special_calls (gcall *call)
2214 int flags = gimple_call_flags (call);
2216 if (flags & ECF_MAY_BE_ALLOCA)
2217 cfun->calls_alloca = true;
2218 if (flags & ECF_RETURNS_TWICE)
2219 cfun->calls_setjmp = true;
2223 /* Clear flags set by notice_special_calls. Used by dead code removal
2224 to update the flags. */
2226 void
2227 clear_special_calls (void)
2229 cfun->calls_alloca = false;
2230 cfun->calls_setjmp = false;
2233 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2235 static void
2236 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2238 /* Since this block is no longer reachable, we can just delete all
2239 of its PHI nodes. */
2240 remove_phi_nodes (bb);
2242 /* Remove edges to BB's successors. */
2243 while (EDGE_COUNT (bb->succs) > 0)
2244 remove_edge (EDGE_SUCC (bb, 0));
2248 /* Remove statements of basic block BB. */
2250 static void
2251 remove_bb (basic_block bb)
2253 gimple_stmt_iterator i;
2255 if (dump_file)
2257 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2258 if (dump_flags & TDF_DETAILS)
2260 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2261 fprintf (dump_file, "\n");
2265 if (current_loops)
2267 struct loop *loop = bb->loop_father;
2269 /* If a loop gets removed, clean up the information associated
2270 with it. */
2271 if (loop->latch == bb
2272 || loop->header == bb)
2273 free_numbers_of_iterations_estimates (loop);
2276 /* Remove all the instructions in the block. */
2277 if (bb_seq (bb) != NULL)
2279 /* Walk backwards so as to get a chance to substitute all
2280 released DEFs into debug stmts. See
2281 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2282 details. */
2283 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2285 gimple *stmt = gsi_stmt (i);
2286 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2287 if (label_stmt
2288 && (FORCED_LABEL (gimple_label_label (label_stmt))
2289 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2291 basic_block new_bb;
2292 gimple_stmt_iterator new_gsi;
2294 /* A non-reachable non-local label may still be referenced.
2295 But it no longer needs to carry the extra semantics of
2296 non-locality. */
2297 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2299 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2300 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2303 new_bb = bb->prev_bb;
2304 /* Don't move any labels into ENTRY block. */
2305 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2307 new_bb = single_succ (new_bb);
2308 gcc_assert (new_bb != bb);
2310 new_gsi = gsi_start_bb (new_bb);
2311 gsi_remove (&i, false);
2312 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2314 else
2316 /* Release SSA definitions. */
2317 release_defs (stmt);
2318 gsi_remove (&i, true);
2321 if (gsi_end_p (i))
2322 i = gsi_last_bb (bb);
2323 else
2324 gsi_prev (&i);
2328 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2329 bb->il.gimple.seq = NULL;
2330 bb->il.gimple.phi_nodes = NULL;
2334 /* Given a basic block BB and a value VAL for use in the final statement
2335 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2336 the edge that will be taken out of the block.
2337 If VAL is NULL_TREE, then the current value of the final statement's
2338 predicate or index is used.
2339 If the value does not match a unique edge, NULL is returned. */
2341 edge
2342 find_taken_edge (basic_block bb, tree val)
2344 gimple *stmt;
2346 stmt = last_stmt (bb);
2348 /* Handle ENTRY and EXIT. */
2349 if (!stmt)
2350 return NULL;
2352 if (gimple_code (stmt) == GIMPLE_COND)
2353 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2355 if (gimple_code (stmt) == GIMPLE_SWITCH)
2356 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2358 if (computed_goto_p (stmt))
2360 /* Only optimize if the argument is a label, if the argument is
2361 not a label then we can not construct a proper CFG.
2363 It may be the case that we only need to allow the LABEL_REF to
2364 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2365 appear inside a LABEL_EXPR just to be safe. */
2366 if (val
2367 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2368 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2369 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2372 /* Otherwise we only know the taken successor edge if it's unique. */
2373 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2376 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2377 statement, determine which of the outgoing edges will be taken out of the
2378 block. Return NULL if either edge may be taken. */
2380 static edge
2381 find_taken_edge_computed_goto (basic_block bb, tree val)
2383 basic_block dest;
2384 edge e = NULL;
2386 dest = label_to_block (val);
2387 if (dest)
2388 e = find_edge (bb, dest);
2390 /* It's possible for find_edge to return NULL here on invalid code
2391 that abuses the labels-as-values extension (e.g. code that attempts to
2392 jump *between* functions via stored labels-as-values; PR 84136).
2393 If so, then we simply return that NULL for the edge.
2394 We don't currently have a way of detecting such invalid code, so we
2395 can't assert that it was the case when a NULL edge occurs here. */
2397 return e;
2400 /* Given COND_STMT and a constant value VAL for use as the predicate,
2401 determine which of the two edges will be taken out of
2402 the statement's block. Return NULL if either edge may be taken.
2403 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2404 is used. */
2406 static edge
2407 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2409 edge true_edge, false_edge;
2411 if (val == NULL_TREE)
2413 /* Use the current value of the predicate. */
2414 if (gimple_cond_true_p (cond_stmt))
2415 val = integer_one_node;
2416 else if (gimple_cond_false_p (cond_stmt))
2417 val = integer_zero_node;
2418 else
2419 return NULL;
2421 else if (TREE_CODE (val) != INTEGER_CST)
2422 return NULL;
2424 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2425 &true_edge, &false_edge);
2427 return (integer_zerop (val) ? false_edge : true_edge);
2430 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2431 which edge will be taken out of the statement's block. Return NULL if any
2432 edge may be taken.
2433 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2434 is used. */
2436 static edge
2437 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2439 basic_block dest_bb;
2440 edge e;
2441 tree taken_case;
2443 if (gimple_switch_num_labels (switch_stmt) == 1)
2444 taken_case = gimple_switch_default_label (switch_stmt);
2445 else
2447 if (val == NULL_TREE)
2448 val = gimple_switch_index (switch_stmt);
2449 if (TREE_CODE (val) != INTEGER_CST)
2450 return NULL;
2451 else
2452 taken_case = find_case_label_for_value (switch_stmt, val);
2454 dest_bb = label_to_block (CASE_LABEL (taken_case));
2456 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2457 gcc_assert (e);
2458 return e;
2462 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2463 We can make optimal use here of the fact that the case labels are
2464 sorted: We can do a binary search for a case matching VAL. */
2466 static tree
2467 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2469 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2470 tree default_case = gimple_switch_default_label (switch_stmt);
2472 for (low = 0, high = n; high - low > 1; )
2474 size_t i = (high + low) / 2;
2475 tree t = gimple_switch_label (switch_stmt, i);
2476 int cmp;
2478 /* Cache the result of comparing CASE_LOW and val. */
2479 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2481 if (cmp > 0)
2482 high = i;
2483 else
2484 low = i;
2486 if (CASE_HIGH (t) == NULL)
2488 /* A singe-valued case label. */
2489 if (cmp == 0)
2490 return t;
2492 else
2494 /* A case range. We can only handle integer ranges. */
2495 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2496 return t;
2500 return default_case;
2504 /* Dump a basic block on stderr. */
2506 void
2507 gimple_debug_bb (basic_block bb)
2509 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2513 /* Dump basic block with index N on stderr. */
2515 basic_block
2516 gimple_debug_bb_n (int n)
2518 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2519 return BASIC_BLOCK_FOR_FN (cfun, n);
2523 /* Dump the CFG on stderr.
2525 FLAGS are the same used by the tree dumping functions
2526 (see TDF_* in dumpfile.h). */
2528 void
2529 gimple_debug_cfg (dump_flags_t flags)
2531 gimple_dump_cfg (stderr, flags);
2535 /* Dump the program showing basic block boundaries on the given FILE.
2537 FLAGS are the same used by the tree dumping functions (see TDF_* in
2538 tree.h). */
2540 void
2541 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2543 if (flags & TDF_DETAILS)
2545 dump_function_header (file, current_function_decl, flags);
2546 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2547 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2548 last_basic_block_for_fn (cfun));
2550 brief_dump_cfg (file, flags);
2551 fprintf (file, "\n");
2554 if (flags & TDF_STATS)
2555 dump_cfg_stats (file);
2557 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2561 /* Dump CFG statistics on FILE. */
2563 void
2564 dump_cfg_stats (FILE *file)
2566 static long max_num_merged_labels = 0;
2567 unsigned long size, total = 0;
2568 long num_edges;
2569 basic_block bb;
2570 const char * const fmt_str = "%-30s%-13s%12s\n";
2571 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2572 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2573 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2574 const char *funcname = current_function_name ();
2576 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2578 fprintf (file, "---------------------------------------------------------\n");
2579 fprintf (file, fmt_str, "", " Number of ", "Memory");
2580 fprintf (file, fmt_str, "", " instances ", "used ");
2581 fprintf (file, "---------------------------------------------------------\n");
2583 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2584 total += size;
2585 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2586 SCALE (size), LABEL (size));
2588 num_edges = 0;
2589 FOR_EACH_BB_FN (bb, cfun)
2590 num_edges += EDGE_COUNT (bb->succs);
2591 size = num_edges * sizeof (struct edge_def);
2592 total += size;
2593 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2595 fprintf (file, "---------------------------------------------------------\n");
2596 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2597 LABEL (total));
2598 fprintf (file, "---------------------------------------------------------\n");
2599 fprintf (file, "\n");
2601 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2602 max_num_merged_labels = cfg_stats.num_merged_labels;
2604 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2605 cfg_stats.num_merged_labels, max_num_merged_labels);
2607 fprintf (file, "\n");
2611 /* Dump CFG statistics on stderr. Keep extern so that it's always
2612 linked in the final executable. */
2614 DEBUG_FUNCTION void
2615 debug_cfg_stats (void)
2617 dump_cfg_stats (stderr);
2620 /*---------------------------------------------------------------------------
2621 Miscellaneous helpers
2622 ---------------------------------------------------------------------------*/
2624 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2625 flow. Transfers of control flow associated with EH are excluded. */
2627 static bool
2628 call_can_make_abnormal_goto (gimple *t)
2630 /* If the function has no non-local labels, then a call cannot make an
2631 abnormal transfer of control. */
2632 if (!cfun->has_nonlocal_label
2633 && !cfun->calls_setjmp)
2634 return false;
2636 /* Likewise if the call has no side effects. */
2637 if (!gimple_has_side_effects (t))
2638 return false;
2640 /* Likewise if the called function is leaf. */
2641 if (gimple_call_flags (t) & ECF_LEAF)
2642 return false;
2644 return true;
2648 /* Return true if T can make an abnormal transfer of control flow.
2649 Transfers of control flow associated with EH are excluded. */
2651 bool
2652 stmt_can_make_abnormal_goto (gimple *t)
2654 if (computed_goto_p (t))
2655 return true;
2656 if (is_gimple_call (t))
2657 return call_can_make_abnormal_goto (t);
2658 return false;
2662 /* Return true if T represents a stmt that always transfers control. */
2664 bool
2665 is_ctrl_stmt (gimple *t)
2667 switch (gimple_code (t))
2669 case GIMPLE_COND:
2670 case GIMPLE_SWITCH:
2671 case GIMPLE_GOTO:
2672 case GIMPLE_RETURN:
2673 case GIMPLE_RESX:
2674 return true;
2675 default:
2676 return false;
2681 /* Return true if T is a statement that may alter the flow of control
2682 (e.g., a call to a non-returning function). */
2684 bool
2685 is_ctrl_altering_stmt (gimple *t)
2687 gcc_assert (t);
2689 switch (gimple_code (t))
2691 case GIMPLE_CALL:
2692 /* Per stmt call flag indicates whether the call could alter
2693 controlflow. */
2694 if (gimple_call_ctrl_altering_p (t))
2695 return true;
2696 break;
2698 case GIMPLE_EH_DISPATCH:
2699 /* EH_DISPATCH branches to the individual catch handlers at
2700 this level of a try or allowed-exceptions region. It can
2701 fallthru to the next statement as well. */
2702 return true;
2704 case GIMPLE_ASM:
2705 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2706 return true;
2707 break;
2709 CASE_GIMPLE_OMP:
2710 /* OpenMP directives alter control flow. */
2711 return true;
2713 case GIMPLE_TRANSACTION:
2714 /* A transaction start alters control flow. */
2715 return true;
2717 default:
2718 break;
2721 /* If a statement can throw, it alters control flow. */
2722 return stmt_can_throw_internal (t);
2726 /* Return true if T is a simple local goto. */
2728 bool
2729 simple_goto_p (gimple *t)
2731 return (gimple_code (t) == GIMPLE_GOTO
2732 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2736 /* Return true if STMT should start a new basic block. PREV_STMT is
2737 the statement preceding STMT. It is used when STMT is a label or a
2738 case label. Labels should only start a new basic block if their
2739 previous statement wasn't a label. Otherwise, sequence of labels
2740 would generate unnecessary basic blocks that only contain a single
2741 label. */
2743 static inline bool
2744 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2746 if (stmt == NULL)
2747 return false;
2749 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2750 any nondebug stmts in the block. We don't want to start another
2751 block in this case: the debug stmt will already have started the
2752 one STMT would start if we weren't outputting debug stmts. */
2753 if (prev_stmt && is_gimple_debug (prev_stmt))
2754 return false;
2756 /* Labels start a new basic block only if the preceding statement
2757 wasn't a label of the same type. This prevents the creation of
2758 consecutive blocks that have nothing but a single label. */
2759 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2761 /* Nonlocal and computed GOTO targets always start a new block. */
2762 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2763 || FORCED_LABEL (gimple_label_label (label_stmt)))
2764 return true;
2766 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2768 if (DECL_NONLOCAL (gimple_label_label (
2769 as_a <glabel *> (prev_stmt))))
2770 return true;
2772 cfg_stats.num_merged_labels++;
2773 return false;
2775 else
2776 return true;
2778 else if (gimple_code (stmt) == GIMPLE_CALL)
2780 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2781 /* setjmp acts similar to a nonlocal GOTO target and thus should
2782 start a new block. */
2783 return true;
2784 if (gimple_call_internal_p (stmt, IFN_PHI)
2785 && prev_stmt
2786 && gimple_code (prev_stmt) != GIMPLE_LABEL
2787 && (gimple_code (prev_stmt) != GIMPLE_CALL
2788 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2789 /* PHI nodes start a new block unless preceeded by a label
2790 or another PHI. */
2791 return true;
2794 return false;
2798 /* Return true if T should end a basic block. */
2800 bool
2801 stmt_ends_bb_p (gimple *t)
2803 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2806 /* Remove block annotations and other data structures. */
2808 void
2809 delete_tree_cfg_annotations (struct function *fn)
2811 vec_free (label_to_block_map_for_fn (fn));
2814 /* Return the virtual phi in BB. */
2816 gphi *
2817 get_virtual_phi (basic_block bb)
2819 for (gphi_iterator gsi = gsi_start_phis (bb);
2820 !gsi_end_p (gsi);
2821 gsi_next (&gsi))
2823 gphi *phi = gsi.phi ();
2825 if (virtual_operand_p (PHI_RESULT (phi)))
2826 return phi;
2829 return NULL;
2832 /* Return the first statement in basic block BB. */
2834 gimple *
2835 first_stmt (basic_block bb)
2837 gimple_stmt_iterator i = gsi_start_bb (bb);
2838 gimple *stmt = NULL;
2840 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2842 gsi_next (&i);
2843 stmt = NULL;
2845 return stmt;
2848 /* Return the first non-label statement in basic block BB. */
2850 static gimple *
2851 first_non_label_stmt (basic_block bb)
2853 gimple_stmt_iterator i = gsi_start_bb (bb);
2854 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2855 gsi_next (&i);
2856 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2859 /* Return the last statement in basic block BB. */
2861 gimple *
2862 last_stmt (basic_block bb)
2864 gimple_stmt_iterator i = gsi_last_bb (bb);
2865 gimple *stmt = NULL;
2867 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2869 gsi_prev (&i);
2870 stmt = NULL;
2872 return stmt;
2875 /* Return the last statement of an otherwise empty block. Return NULL
2876 if the block is totally empty, or if it contains more than one
2877 statement. */
2879 gimple *
2880 last_and_only_stmt (basic_block bb)
2882 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2883 gimple *last, *prev;
2885 if (gsi_end_p (i))
2886 return NULL;
2888 last = gsi_stmt (i);
2889 gsi_prev_nondebug (&i);
2890 if (gsi_end_p (i))
2891 return last;
2893 /* Empty statements should no longer appear in the instruction stream.
2894 Everything that might have appeared before should be deleted by
2895 remove_useless_stmts, and the optimizers should just gsi_remove
2896 instead of smashing with build_empty_stmt.
2898 Thus the only thing that should appear here in a block containing
2899 one executable statement is a label. */
2900 prev = gsi_stmt (i);
2901 if (gimple_code (prev) == GIMPLE_LABEL)
2902 return last;
2903 else
2904 return NULL;
2907 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2909 static void
2910 reinstall_phi_args (edge new_edge, edge old_edge)
2912 edge_var_map *vm;
2913 int i;
2914 gphi_iterator phis;
2916 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2917 if (!v)
2918 return;
2920 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2921 v->iterate (i, &vm) && !gsi_end_p (phis);
2922 i++, gsi_next (&phis))
2924 gphi *phi = phis.phi ();
2925 tree result = redirect_edge_var_map_result (vm);
2926 tree arg = redirect_edge_var_map_def (vm);
2928 gcc_assert (result == gimple_phi_result (phi));
2930 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2933 redirect_edge_var_map_clear (old_edge);
2936 /* Returns the basic block after which the new basic block created
2937 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2938 near its "logical" location. This is of most help to humans looking
2939 at debugging dumps. */
2941 basic_block
2942 split_edge_bb_loc (edge edge_in)
2944 basic_block dest = edge_in->dest;
2945 basic_block dest_prev = dest->prev_bb;
2947 if (dest_prev)
2949 edge e = find_edge (dest_prev, dest);
2950 if (e && !(e->flags & EDGE_COMPLEX))
2951 return edge_in->src;
2953 return dest_prev;
2956 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2957 Abort on abnormal edges. */
2959 static basic_block
2960 gimple_split_edge (edge edge_in)
2962 basic_block new_bb, after_bb, dest;
2963 edge new_edge, e;
2965 /* Abnormal edges cannot be split. */
2966 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2968 dest = edge_in->dest;
2970 after_bb = split_edge_bb_loc (edge_in);
2972 new_bb = create_empty_bb (after_bb);
2973 new_bb->count = edge_in->count ();
2975 e = redirect_edge_and_branch (edge_in, new_bb);
2976 gcc_assert (e == edge_in);
2978 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2979 reinstall_phi_args (new_edge, e);
2981 return new_bb;
2985 /* Verify properties of the address expression T whose base should be
2986 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2988 static bool
2989 verify_address (tree t, bool verify_addressable)
2991 bool old_constant;
2992 bool old_side_effects;
2993 bool new_constant;
2994 bool new_side_effects;
2996 old_constant = TREE_CONSTANT (t);
2997 old_side_effects = TREE_SIDE_EFFECTS (t);
2999 recompute_tree_invariant_for_addr_expr (t);
3000 new_side_effects = TREE_SIDE_EFFECTS (t);
3001 new_constant = TREE_CONSTANT (t);
3003 if (old_constant != new_constant)
3005 error ("constant not recomputed when ADDR_EXPR changed");
3006 return true;
3008 if (old_side_effects != new_side_effects)
3010 error ("side effects not recomputed when ADDR_EXPR changed");
3011 return true;
3014 tree base = TREE_OPERAND (t, 0);
3015 while (handled_component_p (base))
3016 base = TREE_OPERAND (base, 0);
3018 if (!(VAR_P (base)
3019 || TREE_CODE (base) == PARM_DECL
3020 || TREE_CODE (base) == RESULT_DECL))
3021 return false;
3023 if (DECL_GIMPLE_REG_P (base))
3025 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3026 return true;
3029 if (verify_addressable && !TREE_ADDRESSABLE (base))
3031 error ("address taken, but ADDRESSABLE bit not set");
3032 return true;
3035 return false;
3039 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3040 Returns true if there is an error, otherwise false. */
3042 static bool
3043 verify_types_in_gimple_min_lval (tree expr)
3045 tree op;
3047 if (is_gimple_id (expr))
3048 return false;
3050 if (TREE_CODE (expr) != TARGET_MEM_REF
3051 && TREE_CODE (expr) != MEM_REF)
3053 error ("invalid expression for min lvalue");
3054 return true;
3057 /* TARGET_MEM_REFs are strange beasts. */
3058 if (TREE_CODE (expr) == TARGET_MEM_REF)
3059 return false;
3061 op = TREE_OPERAND (expr, 0);
3062 if (!is_gimple_val (op))
3064 error ("invalid operand in indirect reference");
3065 debug_generic_stmt (op);
3066 return true;
3068 /* Memory references now generally can involve a value conversion. */
3070 return false;
3073 /* Verify if EXPR is a valid GIMPLE reference expression. If
3074 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3075 if there is an error, otherwise false. */
3077 static bool
3078 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3080 if (TREE_CODE (expr) == REALPART_EXPR
3081 || TREE_CODE (expr) == IMAGPART_EXPR
3082 || TREE_CODE (expr) == BIT_FIELD_REF)
3084 tree op = TREE_OPERAND (expr, 0);
3085 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3087 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3088 return true;
3091 if (TREE_CODE (expr) == BIT_FIELD_REF)
3093 tree t1 = TREE_OPERAND (expr, 1);
3094 tree t2 = TREE_OPERAND (expr, 2);
3095 poly_uint64 size, bitpos;
3096 if (!poly_int_tree_p (t1, &size)
3097 || !poly_int_tree_p (t2, &bitpos)
3098 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3099 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3101 error ("invalid position or size operand to BIT_FIELD_REF");
3102 return true;
3104 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3105 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3107 error ("integral result type precision does not match "
3108 "field size of BIT_FIELD_REF");
3109 return true;
3111 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3112 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3113 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3114 size))
3116 error ("mode size of non-integral result does not "
3117 "match field size of BIT_FIELD_REF");
3118 return true;
3120 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3121 && maybe_gt (size + bitpos,
3122 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3124 error ("position plus size exceeds size of referenced object in "
3125 "BIT_FIELD_REF");
3126 return true;
3130 if ((TREE_CODE (expr) == REALPART_EXPR
3131 || TREE_CODE (expr) == IMAGPART_EXPR)
3132 && !useless_type_conversion_p (TREE_TYPE (expr),
3133 TREE_TYPE (TREE_TYPE (op))))
3135 error ("type mismatch in real/imagpart reference");
3136 debug_generic_stmt (TREE_TYPE (expr));
3137 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3138 return true;
3140 expr = op;
3143 while (handled_component_p (expr))
3145 if (TREE_CODE (expr) == REALPART_EXPR
3146 || TREE_CODE (expr) == IMAGPART_EXPR
3147 || TREE_CODE (expr) == BIT_FIELD_REF)
3149 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3150 return true;
3153 tree op = TREE_OPERAND (expr, 0);
3155 if (TREE_CODE (expr) == ARRAY_REF
3156 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3158 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3159 || (TREE_OPERAND (expr, 2)
3160 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3161 || (TREE_OPERAND (expr, 3)
3162 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3164 error ("invalid operands to array reference");
3165 debug_generic_stmt (expr);
3166 return true;
3170 /* Verify if the reference array element types are compatible. */
3171 if (TREE_CODE (expr) == ARRAY_REF
3172 && !useless_type_conversion_p (TREE_TYPE (expr),
3173 TREE_TYPE (TREE_TYPE (op))))
3175 error ("type mismatch in array reference");
3176 debug_generic_stmt (TREE_TYPE (expr));
3177 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3178 return true;
3180 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3181 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3182 TREE_TYPE (TREE_TYPE (op))))
3184 error ("type mismatch in array range reference");
3185 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3186 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3187 return true;
3190 if (TREE_CODE (expr) == COMPONENT_REF)
3192 if (TREE_OPERAND (expr, 2)
3193 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3195 error ("invalid COMPONENT_REF offset operator");
3196 return true;
3198 if (!useless_type_conversion_p (TREE_TYPE (expr),
3199 TREE_TYPE (TREE_OPERAND (expr, 1))))
3201 error ("type mismatch in component reference");
3202 debug_generic_stmt (TREE_TYPE (expr));
3203 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3204 return true;
3208 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3210 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3211 that their operand is not an SSA name or an invariant when
3212 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3213 bug). Otherwise there is nothing to verify, gross mismatches at
3214 most invoke undefined behavior. */
3215 if (require_lvalue
3216 && (TREE_CODE (op) == SSA_NAME
3217 || is_gimple_min_invariant (op)))
3219 error ("conversion of an SSA_NAME on the left hand side");
3220 debug_generic_stmt (expr);
3221 return true;
3223 else if (TREE_CODE (op) == SSA_NAME
3224 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3226 error ("conversion of register to a different size");
3227 debug_generic_stmt (expr);
3228 return true;
3230 else if (!handled_component_p (op))
3231 return false;
3234 expr = op;
3237 if (TREE_CODE (expr) == MEM_REF)
3239 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3240 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3241 && verify_address (TREE_OPERAND (expr, 0), false)))
3243 error ("invalid address operand in MEM_REF");
3244 debug_generic_stmt (expr);
3245 return true;
3247 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3248 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3250 error ("invalid offset operand in MEM_REF");
3251 debug_generic_stmt (expr);
3252 return true;
3255 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3257 if (!TMR_BASE (expr)
3258 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3259 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3260 && verify_address (TMR_BASE (expr), false)))
3262 error ("invalid address operand in TARGET_MEM_REF");
3263 return true;
3265 if (!TMR_OFFSET (expr)
3266 || !poly_int_tree_p (TMR_OFFSET (expr))
3267 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3269 error ("invalid offset operand in TARGET_MEM_REF");
3270 debug_generic_stmt (expr);
3271 return true;
3274 else if (TREE_CODE (expr) == INDIRECT_REF)
3276 error ("INDIRECT_REF in gimple IL");
3277 debug_generic_stmt (expr);
3278 return true;
3281 return ((require_lvalue || !is_gimple_min_invariant (expr))
3282 && verify_types_in_gimple_min_lval (expr));
3285 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3286 list of pointer-to types that is trivially convertible to DEST. */
3288 static bool
3289 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3291 tree src;
3293 if (!TYPE_POINTER_TO (src_obj))
3294 return true;
3296 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3297 if (useless_type_conversion_p (dest, src))
3298 return true;
3300 return false;
3303 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3304 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3306 static bool
3307 valid_fixed_convert_types_p (tree type1, tree type2)
3309 return (FIXED_POINT_TYPE_P (type1)
3310 && (INTEGRAL_TYPE_P (type2)
3311 || SCALAR_FLOAT_TYPE_P (type2)
3312 || FIXED_POINT_TYPE_P (type2)));
3315 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3316 is a problem, otherwise false. */
3318 static bool
3319 verify_gimple_call (gcall *stmt)
3321 tree fn = gimple_call_fn (stmt);
3322 tree fntype, fndecl;
3323 unsigned i;
3325 if (gimple_call_internal_p (stmt))
3327 if (fn)
3329 error ("gimple call has two targets");
3330 debug_generic_stmt (fn);
3331 return true;
3333 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3334 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3336 return false;
3339 else
3341 if (!fn)
3343 error ("gimple call has no target");
3344 return true;
3348 if (fn && !is_gimple_call_addr (fn))
3350 error ("invalid function in gimple call");
3351 debug_generic_stmt (fn);
3352 return true;
3355 if (fn
3356 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3357 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3358 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3360 error ("non-function in gimple call");
3361 return true;
3364 fndecl = gimple_call_fndecl (stmt);
3365 if (fndecl
3366 && TREE_CODE (fndecl) == FUNCTION_DECL
3367 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3368 && !DECL_PURE_P (fndecl)
3369 && !TREE_READONLY (fndecl))
3371 error ("invalid pure const state for function");
3372 return true;
3375 tree lhs = gimple_call_lhs (stmt);
3376 if (lhs
3377 && (!is_gimple_lvalue (lhs)
3378 || verify_types_in_gimple_reference (lhs, true)))
3380 error ("invalid LHS in gimple call");
3381 return true;
3384 if (gimple_call_ctrl_altering_p (stmt)
3385 && gimple_call_noreturn_p (stmt)
3386 && should_remove_lhs_p (lhs))
3388 error ("LHS in noreturn call");
3389 return true;
3392 fntype = gimple_call_fntype (stmt);
3393 if (fntype
3394 && lhs
3395 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3396 /* ??? At least C++ misses conversions at assignments from
3397 void * call results.
3398 For now simply allow arbitrary pointer type conversions. */
3399 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3400 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3402 error ("invalid conversion in gimple call");
3403 debug_generic_stmt (TREE_TYPE (lhs));
3404 debug_generic_stmt (TREE_TYPE (fntype));
3405 return true;
3408 if (gimple_call_chain (stmt)
3409 && !is_gimple_val (gimple_call_chain (stmt)))
3411 error ("invalid static chain in gimple call");
3412 debug_generic_stmt (gimple_call_chain (stmt));
3413 return true;
3416 /* If there is a static chain argument, the call should either be
3417 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3418 if (gimple_call_chain (stmt)
3419 && fndecl
3420 && !DECL_STATIC_CHAIN (fndecl))
3422 error ("static chain with function that doesn%'t use one");
3423 return true;
3426 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3428 switch (DECL_FUNCTION_CODE (fndecl))
3430 case BUILT_IN_UNREACHABLE:
3431 case BUILT_IN_TRAP:
3432 if (gimple_call_num_args (stmt) > 0)
3434 /* Built-in unreachable with parameters might not be caught by
3435 undefined behavior sanitizer. Front-ends do check users do not
3436 call them that way but we also produce calls to
3437 __builtin_unreachable internally, for example when IPA figures
3438 out a call cannot happen in a legal program. In such cases,
3439 we must make sure arguments are stripped off. */
3440 error ("__builtin_unreachable or __builtin_trap call with "
3441 "arguments");
3442 return true;
3444 break;
3445 default:
3446 break;
3450 /* ??? The C frontend passes unpromoted arguments in case it
3451 didn't see a function declaration before the call. So for now
3452 leave the call arguments mostly unverified. Once we gimplify
3453 unit-at-a-time we have a chance to fix this. */
3455 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3457 tree arg = gimple_call_arg (stmt, i);
3458 if ((is_gimple_reg_type (TREE_TYPE (arg))
3459 && !is_gimple_val (arg))
3460 || (!is_gimple_reg_type (TREE_TYPE (arg))
3461 && !is_gimple_lvalue (arg)))
3463 error ("invalid argument to gimple call");
3464 debug_generic_expr (arg);
3465 return true;
3469 return false;
3472 /* Verifies the gimple comparison with the result type TYPE and
3473 the operands OP0 and OP1, comparison code is CODE. */
3475 static bool
3476 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3478 tree op0_type = TREE_TYPE (op0);
3479 tree op1_type = TREE_TYPE (op1);
3481 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3483 error ("invalid operands in gimple comparison");
3484 return true;
3487 /* For comparisons we do not have the operations type as the
3488 effective type the comparison is carried out in. Instead
3489 we require that either the first operand is trivially
3490 convertible into the second, or the other way around.
3491 Because we special-case pointers to void we allow
3492 comparisons of pointers with the same mode as well. */
3493 if (!useless_type_conversion_p (op0_type, op1_type)
3494 && !useless_type_conversion_p (op1_type, op0_type)
3495 && (!POINTER_TYPE_P (op0_type)
3496 || !POINTER_TYPE_P (op1_type)
3497 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3499 error ("mismatching comparison operand types");
3500 debug_generic_expr (op0_type);
3501 debug_generic_expr (op1_type);
3502 return true;
3505 /* The resulting type of a comparison may be an effective boolean type. */
3506 if (INTEGRAL_TYPE_P (type)
3507 && (TREE_CODE (type) == BOOLEAN_TYPE
3508 || TYPE_PRECISION (type) == 1))
3510 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3511 || TREE_CODE (op1_type) == VECTOR_TYPE)
3512 && code != EQ_EXPR && code != NE_EXPR
3513 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3514 && !VECTOR_INTEGER_TYPE_P (op0_type))
3516 error ("unsupported operation or type for vector comparison"
3517 " returning a boolean");
3518 debug_generic_expr (op0_type);
3519 debug_generic_expr (op1_type);
3520 return true;
3523 /* Or a boolean vector type with the same element count
3524 as the comparison operand types. */
3525 else if (TREE_CODE (type) == VECTOR_TYPE
3526 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3528 if (TREE_CODE (op0_type) != VECTOR_TYPE
3529 || TREE_CODE (op1_type) != VECTOR_TYPE)
3531 error ("non-vector operands in vector comparison");
3532 debug_generic_expr (op0_type);
3533 debug_generic_expr (op1_type);
3534 return true;
3537 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3538 TYPE_VECTOR_SUBPARTS (op0_type)))
3540 error ("invalid vector comparison resulting type");
3541 debug_generic_expr (type);
3542 return true;
3545 else
3547 error ("bogus comparison result type");
3548 debug_generic_expr (type);
3549 return true;
3552 return false;
3555 /* Verify a gimple assignment statement STMT with an unary rhs.
3556 Returns true if anything is wrong. */
3558 static bool
3559 verify_gimple_assign_unary (gassign *stmt)
3561 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3562 tree lhs = gimple_assign_lhs (stmt);
3563 tree lhs_type = TREE_TYPE (lhs);
3564 tree rhs1 = gimple_assign_rhs1 (stmt);
3565 tree rhs1_type = TREE_TYPE (rhs1);
3567 if (!is_gimple_reg (lhs))
3569 error ("non-register as LHS of unary operation");
3570 return true;
3573 if (!is_gimple_val (rhs1))
3575 error ("invalid operand in unary operation");
3576 return true;
3579 /* First handle conversions. */
3580 switch (rhs_code)
3582 CASE_CONVERT:
3584 /* Allow conversions from pointer type to integral type only if
3585 there is no sign or zero extension involved.
3586 For targets were the precision of ptrofftype doesn't match that
3587 of pointers we need to allow arbitrary conversions to ptrofftype. */
3588 if ((POINTER_TYPE_P (lhs_type)
3589 && INTEGRAL_TYPE_P (rhs1_type))
3590 || (POINTER_TYPE_P (rhs1_type)
3591 && INTEGRAL_TYPE_P (lhs_type)
3592 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3593 || ptrofftype_p (lhs_type))))
3594 return false;
3596 /* Allow conversion from integral to offset type and vice versa. */
3597 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3598 && INTEGRAL_TYPE_P (rhs1_type))
3599 || (INTEGRAL_TYPE_P (lhs_type)
3600 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3601 return false;
3603 /* Otherwise assert we are converting between types of the
3604 same kind. */
3605 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3607 error ("invalid types in nop conversion");
3608 debug_generic_expr (lhs_type);
3609 debug_generic_expr (rhs1_type);
3610 return true;
3613 return false;
3616 case ADDR_SPACE_CONVERT_EXPR:
3618 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3619 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3620 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3622 error ("invalid types in address space conversion");
3623 debug_generic_expr (lhs_type);
3624 debug_generic_expr (rhs1_type);
3625 return true;
3628 return false;
3631 case FIXED_CONVERT_EXPR:
3633 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3634 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3636 error ("invalid types in fixed-point conversion");
3637 debug_generic_expr (lhs_type);
3638 debug_generic_expr (rhs1_type);
3639 return true;
3642 return false;
3645 case FLOAT_EXPR:
3647 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3648 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3649 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3651 error ("invalid types in conversion to floating point");
3652 debug_generic_expr (lhs_type);
3653 debug_generic_expr (rhs1_type);
3654 return true;
3657 return false;
3660 case FIX_TRUNC_EXPR:
3662 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3663 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3664 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3666 error ("invalid types in conversion to integer");
3667 debug_generic_expr (lhs_type);
3668 debug_generic_expr (rhs1_type);
3669 return true;
3672 return false;
3675 case VEC_UNPACK_HI_EXPR:
3676 case VEC_UNPACK_LO_EXPR:
3677 case VEC_UNPACK_FLOAT_HI_EXPR:
3678 case VEC_UNPACK_FLOAT_LO_EXPR:
3679 /* FIXME. */
3680 return false;
3682 case NEGATE_EXPR:
3683 case ABS_EXPR:
3684 case BIT_NOT_EXPR:
3685 case PAREN_EXPR:
3686 case CONJ_EXPR:
3687 break;
3689 case VEC_DUPLICATE_EXPR:
3690 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3691 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3693 error ("vec_duplicate should be from a scalar to a like vector");
3694 debug_generic_expr (lhs_type);
3695 debug_generic_expr (rhs1_type);
3696 return true;
3698 return false;
3700 default:
3701 gcc_unreachable ();
3704 /* For the remaining codes assert there is no conversion involved. */
3705 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3707 error ("non-trivial conversion in unary operation");
3708 debug_generic_expr (lhs_type);
3709 debug_generic_expr (rhs1_type);
3710 return true;
3713 return false;
3716 /* Verify a gimple assignment statement STMT with a binary rhs.
3717 Returns true if anything is wrong. */
3719 static bool
3720 verify_gimple_assign_binary (gassign *stmt)
3722 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3723 tree lhs = gimple_assign_lhs (stmt);
3724 tree lhs_type = TREE_TYPE (lhs);
3725 tree rhs1 = gimple_assign_rhs1 (stmt);
3726 tree rhs1_type = TREE_TYPE (rhs1);
3727 tree rhs2 = gimple_assign_rhs2 (stmt);
3728 tree rhs2_type = TREE_TYPE (rhs2);
3730 if (!is_gimple_reg (lhs))
3732 error ("non-register as LHS of binary operation");
3733 return true;
3736 if (!is_gimple_val (rhs1)
3737 || !is_gimple_val (rhs2))
3739 error ("invalid operands in binary operation");
3740 return true;
3743 /* First handle operations that involve different types. */
3744 switch (rhs_code)
3746 case COMPLEX_EXPR:
3748 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3749 || !(INTEGRAL_TYPE_P (rhs1_type)
3750 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3751 || !(INTEGRAL_TYPE_P (rhs2_type)
3752 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3754 error ("type mismatch in complex expression");
3755 debug_generic_expr (lhs_type);
3756 debug_generic_expr (rhs1_type);
3757 debug_generic_expr (rhs2_type);
3758 return true;
3761 return false;
3764 case LSHIFT_EXPR:
3765 case RSHIFT_EXPR:
3766 case LROTATE_EXPR:
3767 case RROTATE_EXPR:
3769 /* Shifts and rotates are ok on integral types, fixed point
3770 types and integer vector types. */
3771 if ((!INTEGRAL_TYPE_P (rhs1_type)
3772 && !FIXED_POINT_TYPE_P (rhs1_type)
3773 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3774 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3775 || (!INTEGRAL_TYPE_P (rhs2_type)
3776 /* Vector shifts of vectors are also ok. */
3777 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3778 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3779 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3780 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3781 || !useless_type_conversion_p (lhs_type, rhs1_type))
3783 error ("type mismatch in shift expression");
3784 debug_generic_expr (lhs_type);
3785 debug_generic_expr (rhs1_type);
3786 debug_generic_expr (rhs2_type);
3787 return true;
3790 return false;
3793 case WIDEN_LSHIFT_EXPR:
3795 if (!INTEGRAL_TYPE_P (lhs_type)
3796 || !INTEGRAL_TYPE_P (rhs1_type)
3797 || TREE_CODE (rhs2) != INTEGER_CST
3798 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3800 error ("type mismatch in widening vector shift expression");
3801 debug_generic_expr (lhs_type);
3802 debug_generic_expr (rhs1_type);
3803 debug_generic_expr (rhs2_type);
3804 return true;
3807 return false;
3810 case VEC_WIDEN_LSHIFT_HI_EXPR:
3811 case VEC_WIDEN_LSHIFT_LO_EXPR:
3813 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3814 || TREE_CODE (lhs_type) != VECTOR_TYPE
3815 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3816 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3817 || TREE_CODE (rhs2) != INTEGER_CST
3818 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3819 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3821 error ("type mismatch in widening vector shift expression");
3822 debug_generic_expr (lhs_type);
3823 debug_generic_expr (rhs1_type);
3824 debug_generic_expr (rhs2_type);
3825 return true;
3828 return false;
3831 case PLUS_EXPR:
3832 case MINUS_EXPR:
3834 tree lhs_etype = lhs_type;
3835 tree rhs1_etype = rhs1_type;
3836 tree rhs2_etype = rhs2_type;
3837 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3839 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3840 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3842 error ("invalid non-vector operands to vector valued plus");
3843 return true;
3845 lhs_etype = TREE_TYPE (lhs_type);
3846 rhs1_etype = TREE_TYPE (rhs1_type);
3847 rhs2_etype = TREE_TYPE (rhs2_type);
3849 if (POINTER_TYPE_P (lhs_etype)
3850 || POINTER_TYPE_P (rhs1_etype)
3851 || POINTER_TYPE_P (rhs2_etype))
3853 error ("invalid (pointer) operands to plus/minus");
3854 return true;
3857 /* Continue with generic binary expression handling. */
3858 break;
3861 case POINTER_PLUS_EXPR:
3863 if (!POINTER_TYPE_P (rhs1_type)
3864 || !useless_type_conversion_p (lhs_type, rhs1_type)
3865 || !ptrofftype_p (rhs2_type))
3867 error ("type mismatch in pointer plus expression");
3868 debug_generic_stmt (lhs_type);
3869 debug_generic_stmt (rhs1_type);
3870 debug_generic_stmt (rhs2_type);
3871 return true;
3874 return false;
3877 case POINTER_DIFF_EXPR:
3879 if (!POINTER_TYPE_P (rhs1_type)
3880 || !POINTER_TYPE_P (rhs2_type)
3881 /* Because we special-case pointers to void we allow difference
3882 of arbitrary pointers with the same mode. */
3883 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3884 || TREE_CODE (lhs_type) != INTEGER_TYPE
3885 || TYPE_UNSIGNED (lhs_type)
3886 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3888 error ("type mismatch in pointer diff expression");
3889 debug_generic_stmt (lhs_type);
3890 debug_generic_stmt (rhs1_type);
3891 debug_generic_stmt (rhs2_type);
3892 return true;
3895 return false;
3898 case TRUTH_ANDIF_EXPR:
3899 case TRUTH_ORIF_EXPR:
3900 case TRUTH_AND_EXPR:
3901 case TRUTH_OR_EXPR:
3902 case TRUTH_XOR_EXPR:
3904 gcc_unreachable ();
3906 case LT_EXPR:
3907 case LE_EXPR:
3908 case GT_EXPR:
3909 case GE_EXPR:
3910 case EQ_EXPR:
3911 case NE_EXPR:
3912 case UNORDERED_EXPR:
3913 case ORDERED_EXPR:
3914 case UNLT_EXPR:
3915 case UNLE_EXPR:
3916 case UNGT_EXPR:
3917 case UNGE_EXPR:
3918 case UNEQ_EXPR:
3919 case LTGT_EXPR:
3920 /* Comparisons are also binary, but the result type is not
3921 connected to the operand types. */
3922 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3924 case WIDEN_MULT_EXPR:
3925 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3926 return true;
3927 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3928 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3930 case WIDEN_SUM_EXPR:
3932 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3933 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3934 && ((!INTEGRAL_TYPE_P (rhs1_type)
3935 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3936 || (!INTEGRAL_TYPE_P (lhs_type)
3937 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3938 || !useless_type_conversion_p (lhs_type, rhs2_type)
3939 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3940 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3942 error ("type mismatch in widening sum reduction");
3943 debug_generic_expr (lhs_type);
3944 debug_generic_expr (rhs1_type);
3945 debug_generic_expr (rhs2_type);
3946 return true;
3948 return false;
3951 case VEC_WIDEN_MULT_HI_EXPR:
3952 case VEC_WIDEN_MULT_LO_EXPR:
3953 case VEC_WIDEN_MULT_EVEN_EXPR:
3954 case VEC_WIDEN_MULT_ODD_EXPR:
3956 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3957 || TREE_CODE (lhs_type) != VECTOR_TYPE
3958 || !types_compatible_p (rhs1_type, rhs2_type)
3959 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3960 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3962 error ("type mismatch in vector widening multiplication");
3963 debug_generic_expr (lhs_type);
3964 debug_generic_expr (rhs1_type);
3965 debug_generic_expr (rhs2_type);
3966 return true;
3968 return false;
3971 case VEC_PACK_TRUNC_EXPR:
3972 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
3973 vector boolean types. */
3974 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
3975 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3976 && types_compatible_p (rhs1_type, rhs2_type)
3977 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3978 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
3979 return false;
3981 /* Fallthru. */
3982 case VEC_PACK_SAT_EXPR:
3983 case VEC_PACK_FIX_TRUNC_EXPR:
3985 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3986 || TREE_CODE (lhs_type) != VECTOR_TYPE
3987 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
3988 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3989 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
3990 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3991 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
3992 || !types_compatible_p (rhs1_type, rhs2_type)
3993 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
3994 2 * GET_MODE_SIZE (element_mode (lhs_type))))
3996 error ("type mismatch in vector pack expression");
3997 debug_generic_expr (lhs_type);
3998 debug_generic_expr (rhs1_type);
3999 debug_generic_expr (rhs2_type);
4000 return true;
4003 return false;
4006 case MULT_EXPR:
4007 case MULT_HIGHPART_EXPR:
4008 case TRUNC_DIV_EXPR:
4009 case CEIL_DIV_EXPR:
4010 case FLOOR_DIV_EXPR:
4011 case ROUND_DIV_EXPR:
4012 case TRUNC_MOD_EXPR:
4013 case CEIL_MOD_EXPR:
4014 case FLOOR_MOD_EXPR:
4015 case ROUND_MOD_EXPR:
4016 case RDIV_EXPR:
4017 case EXACT_DIV_EXPR:
4018 case MIN_EXPR:
4019 case MAX_EXPR:
4020 case BIT_IOR_EXPR:
4021 case BIT_XOR_EXPR:
4022 case BIT_AND_EXPR:
4023 /* Continue with generic binary expression handling. */
4024 break;
4026 case VEC_SERIES_EXPR:
4027 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4029 error ("type mismatch in series expression");
4030 debug_generic_expr (rhs1_type);
4031 debug_generic_expr (rhs2_type);
4032 return true;
4034 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4035 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4037 error ("vector type expected in series expression");
4038 debug_generic_expr (lhs_type);
4039 return true;
4041 return false;
4043 default:
4044 gcc_unreachable ();
4047 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4048 || !useless_type_conversion_p (lhs_type, rhs2_type))
4050 error ("type mismatch in binary expression");
4051 debug_generic_stmt (lhs_type);
4052 debug_generic_stmt (rhs1_type);
4053 debug_generic_stmt (rhs2_type);
4054 return true;
4057 return false;
4060 /* Verify a gimple assignment statement STMT with a ternary rhs.
4061 Returns true if anything is wrong. */
4063 static bool
4064 verify_gimple_assign_ternary (gassign *stmt)
4066 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4067 tree lhs = gimple_assign_lhs (stmt);
4068 tree lhs_type = TREE_TYPE (lhs);
4069 tree rhs1 = gimple_assign_rhs1 (stmt);
4070 tree rhs1_type = TREE_TYPE (rhs1);
4071 tree rhs2 = gimple_assign_rhs2 (stmt);
4072 tree rhs2_type = TREE_TYPE (rhs2);
4073 tree rhs3 = gimple_assign_rhs3 (stmt);
4074 tree rhs3_type = TREE_TYPE (rhs3);
4076 if (!is_gimple_reg (lhs))
4078 error ("non-register as LHS of ternary operation");
4079 return true;
4082 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4083 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4084 || !is_gimple_val (rhs2)
4085 || !is_gimple_val (rhs3))
4087 error ("invalid operands in ternary operation");
4088 return true;
4091 /* First handle operations that involve different types. */
4092 switch (rhs_code)
4094 case WIDEN_MULT_PLUS_EXPR:
4095 case WIDEN_MULT_MINUS_EXPR:
4096 if ((!INTEGRAL_TYPE_P (rhs1_type)
4097 && !FIXED_POINT_TYPE_P (rhs1_type))
4098 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4099 || !useless_type_conversion_p (lhs_type, rhs3_type)
4100 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4101 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4103 error ("type mismatch in widening multiply-accumulate expression");
4104 debug_generic_expr (lhs_type);
4105 debug_generic_expr (rhs1_type);
4106 debug_generic_expr (rhs2_type);
4107 debug_generic_expr (rhs3_type);
4108 return true;
4110 break;
4112 case FMA_EXPR:
4113 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4114 || !useless_type_conversion_p (lhs_type, rhs2_type)
4115 || !useless_type_conversion_p (lhs_type, rhs3_type))
4117 error ("type mismatch in fused multiply-add expression");
4118 debug_generic_expr (lhs_type);
4119 debug_generic_expr (rhs1_type);
4120 debug_generic_expr (rhs2_type);
4121 debug_generic_expr (rhs3_type);
4122 return true;
4124 break;
4126 case VEC_COND_EXPR:
4127 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4128 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4129 TYPE_VECTOR_SUBPARTS (lhs_type)))
4131 error ("the first argument of a VEC_COND_EXPR must be of a "
4132 "boolean vector type of the same number of elements "
4133 "as the result");
4134 debug_generic_expr (lhs_type);
4135 debug_generic_expr (rhs1_type);
4136 return true;
4138 /* Fallthrough. */
4139 case COND_EXPR:
4140 if (!is_gimple_val (rhs1)
4141 && verify_gimple_comparison (TREE_TYPE (rhs1),
4142 TREE_OPERAND (rhs1, 0),
4143 TREE_OPERAND (rhs1, 1),
4144 TREE_CODE (rhs1)))
4145 return true;
4146 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4147 || !useless_type_conversion_p (lhs_type, rhs3_type))
4149 error ("type mismatch in conditional expression");
4150 debug_generic_expr (lhs_type);
4151 debug_generic_expr (rhs2_type);
4152 debug_generic_expr (rhs3_type);
4153 return true;
4155 break;
4157 case VEC_PERM_EXPR:
4158 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4159 || !useless_type_conversion_p (lhs_type, rhs2_type))
4161 error ("type mismatch in vector permute expression");
4162 debug_generic_expr (lhs_type);
4163 debug_generic_expr (rhs1_type);
4164 debug_generic_expr (rhs2_type);
4165 debug_generic_expr (rhs3_type);
4166 return true;
4169 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4170 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4171 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4173 error ("vector types expected in vector permute expression");
4174 debug_generic_expr (lhs_type);
4175 debug_generic_expr (rhs1_type);
4176 debug_generic_expr (rhs2_type);
4177 debug_generic_expr (rhs3_type);
4178 return true;
4181 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4182 TYPE_VECTOR_SUBPARTS (rhs2_type))
4183 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4184 TYPE_VECTOR_SUBPARTS (rhs3_type))
4185 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4186 TYPE_VECTOR_SUBPARTS (lhs_type)))
4188 error ("vectors with different element number found "
4189 "in vector permute expression");
4190 debug_generic_expr (lhs_type);
4191 debug_generic_expr (rhs1_type);
4192 debug_generic_expr (rhs2_type);
4193 debug_generic_expr (rhs3_type);
4194 return true;
4197 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4198 || (TREE_CODE (rhs3) != VECTOR_CST
4199 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4200 (TREE_TYPE (rhs3_type)))
4201 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4202 (TREE_TYPE (rhs1_type))))))
4204 error ("invalid mask type in vector permute expression");
4205 debug_generic_expr (lhs_type);
4206 debug_generic_expr (rhs1_type);
4207 debug_generic_expr (rhs2_type);
4208 debug_generic_expr (rhs3_type);
4209 return true;
4212 return false;
4214 case SAD_EXPR:
4215 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4216 || !useless_type_conversion_p (lhs_type, rhs3_type)
4217 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4218 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4220 error ("type mismatch in sad expression");
4221 debug_generic_expr (lhs_type);
4222 debug_generic_expr (rhs1_type);
4223 debug_generic_expr (rhs2_type);
4224 debug_generic_expr (rhs3_type);
4225 return true;
4228 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4229 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4230 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4232 error ("vector types expected in sad expression");
4233 debug_generic_expr (lhs_type);
4234 debug_generic_expr (rhs1_type);
4235 debug_generic_expr (rhs2_type);
4236 debug_generic_expr (rhs3_type);
4237 return true;
4240 return false;
4242 case BIT_INSERT_EXPR:
4243 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4245 error ("type mismatch in BIT_INSERT_EXPR");
4246 debug_generic_expr (lhs_type);
4247 debug_generic_expr (rhs1_type);
4248 return true;
4250 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4251 && INTEGRAL_TYPE_P (rhs2_type))
4252 || (VECTOR_TYPE_P (rhs1_type)
4253 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4255 error ("not allowed type combination in BIT_INSERT_EXPR");
4256 debug_generic_expr (rhs1_type);
4257 debug_generic_expr (rhs2_type);
4258 return true;
4260 if (! tree_fits_uhwi_p (rhs3)
4261 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4262 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4264 error ("invalid position or size in BIT_INSERT_EXPR");
4265 return true;
4267 if (INTEGRAL_TYPE_P (rhs1_type))
4269 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4270 if (bitpos >= TYPE_PRECISION (rhs1_type)
4271 || (bitpos + TYPE_PRECISION (rhs2_type)
4272 > TYPE_PRECISION (rhs1_type)))
4274 error ("insertion out of range in BIT_INSERT_EXPR");
4275 return true;
4278 else if (VECTOR_TYPE_P (rhs1_type))
4280 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4281 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4282 if (bitpos % bitsize != 0)
4284 error ("vector insertion not at element boundary");
4285 return true;
4288 return false;
4290 case DOT_PROD_EXPR:
4292 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4293 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4294 && ((!INTEGRAL_TYPE_P (rhs1_type)
4295 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4296 || (!INTEGRAL_TYPE_P (lhs_type)
4297 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4298 || !types_compatible_p (rhs1_type, rhs2_type)
4299 || !useless_type_conversion_p (lhs_type, rhs3_type)
4300 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4301 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4303 error ("type mismatch in dot product reduction");
4304 debug_generic_expr (lhs_type);
4305 debug_generic_expr (rhs1_type);
4306 debug_generic_expr (rhs2_type);
4307 return true;
4309 return false;
4312 case REALIGN_LOAD_EXPR:
4313 /* FIXME. */
4314 return false;
4316 default:
4317 gcc_unreachable ();
4319 return false;
4322 /* Verify a gimple assignment statement STMT with a single rhs.
4323 Returns true if anything is wrong. */
4325 static bool
4326 verify_gimple_assign_single (gassign *stmt)
4328 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4329 tree lhs = gimple_assign_lhs (stmt);
4330 tree lhs_type = TREE_TYPE (lhs);
4331 tree rhs1 = gimple_assign_rhs1 (stmt);
4332 tree rhs1_type = TREE_TYPE (rhs1);
4333 bool res = false;
4335 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4337 error ("non-trivial conversion at assignment");
4338 debug_generic_expr (lhs_type);
4339 debug_generic_expr (rhs1_type);
4340 return true;
4343 if (gimple_clobber_p (stmt)
4344 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4346 error ("non-decl/MEM_REF LHS in clobber statement");
4347 debug_generic_expr (lhs);
4348 return true;
4351 if (handled_component_p (lhs)
4352 || TREE_CODE (lhs) == MEM_REF
4353 || TREE_CODE (lhs) == TARGET_MEM_REF)
4354 res |= verify_types_in_gimple_reference (lhs, true);
4356 /* Special codes we cannot handle via their class. */
4357 switch (rhs_code)
4359 case ADDR_EXPR:
4361 tree op = TREE_OPERAND (rhs1, 0);
4362 if (!is_gimple_addressable (op))
4364 error ("invalid operand in unary expression");
4365 return true;
4368 /* Technically there is no longer a need for matching types, but
4369 gimple hygiene asks for this check. In LTO we can end up
4370 combining incompatible units and thus end up with addresses
4371 of globals that change their type to a common one. */
4372 if (!in_lto_p
4373 && !types_compatible_p (TREE_TYPE (op),
4374 TREE_TYPE (TREE_TYPE (rhs1)))
4375 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4376 TREE_TYPE (op)))
4378 error ("type mismatch in address expression");
4379 debug_generic_stmt (TREE_TYPE (rhs1));
4380 debug_generic_stmt (TREE_TYPE (op));
4381 return true;
4384 return (verify_address (rhs1, true)
4385 || verify_types_in_gimple_reference (op, true));
4388 /* tcc_reference */
4389 case INDIRECT_REF:
4390 error ("INDIRECT_REF in gimple IL");
4391 return true;
4393 case COMPONENT_REF:
4394 case BIT_FIELD_REF:
4395 case ARRAY_REF:
4396 case ARRAY_RANGE_REF:
4397 case VIEW_CONVERT_EXPR:
4398 case REALPART_EXPR:
4399 case IMAGPART_EXPR:
4400 case TARGET_MEM_REF:
4401 case MEM_REF:
4402 if (!is_gimple_reg (lhs)
4403 && is_gimple_reg_type (TREE_TYPE (lhs)))
4405 error ("invalid rhs for gimple memory store");
4406 debug_generic_stmt (lhs);
4407 debug_generic_stmt (rhs1);
4408 return true;
4410 return res || verify_types_in_gimple_reference (rhs1, false);
4412 /* tcc_constant */
4413 case SSA_NAME:
4414 case INTEGER_CST:
4415 case REAL_CST:
4416 case FIXED_CST:
4417 case COMPLEX_CST:
4418 case VECTOR_CST:
4419 case STRING_CST:
4420 return res;
4422 /* tcc_declaration */
4423 case CONST_DECL:
4424 return res;
4425 case VAR_DECL:
4426 case PARM_DECL:
4427 if (!is_gimple_reg (lhs)
4428 && !is_gimple_reg (rhs1)
4429 && is_gimple_reg_type (TREE_TYPE (lhs)))
4431 error ("invalid rhs for gimple memory store");
4432 debug_generic_stmt (lhs);
4433 debug_generic_stmt (rhs1);
4434 return true;
4436 return res;
4438 case CONSTRUCTOR:
4439 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4441 unsigned int i;
4442 tree elt_i, elt_v, elt_t = NULL_TREE;
4444 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4445 return res;
4446 /* For vector CONSTRUCTORs we require that either it is empty
4447 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4448 (then the element count must be correct to cover the whole
4449 outer vector and index must be NULL on all elements, or it is
4450 a CONSTRUCTOR of scalar elements, where we as an exception allow
4451 smaller number of elements (assuming zero filling) and
4452 consecutive indexes as compared to NULL indexes (such
4453 CONSTRUCTORs can appear in the IL from FEs). */
4454 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4456 if (elt_t == NULL_TREE)
4458 elt_t = TREE_TYPE (elt_v);
4459 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4461 tree elt_t = TREE_TYPE (elt_v);
4462 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4463 TREE_TYPE (elt_t)))
4465 error ("incorrect type of vector CONSTRUCTOR"
4466 " elements");
4467 debug_generic_stmt (rhs1);
4468 return true;
4470 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4471 * TYPE_VECTOR_SUBPARTS (elt_t),
4472 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4474 error ("incorrect number of vector CONSTRUCTOR"
4475 " elements");
4476 debug_generic_stmt (rhs1);
4477 return true;
4480 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4481 elt_t))
4483 error ("incorrect type of vector CONSTRUCTOR elements");
4484 debug_generic_stmt (rhs1);
4485 return true;
4487 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4488 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4490 error ("incorrect number of vector CONSTRUCTOR elements");
4491 debug_generic_stmt (rhs1);
4492 return true;
4495 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4497 error ("incorrect type of vector CONSTRUCTOR elements");
4498 debug_generic_stmt (rhs1);
4499 return true;
4501 if (elt_i != NULL_TREE
4502 && (TREE_CODE (elt_t) == VECTOR_TYPE
4503 || TREE_CODE (elt_i) != INTEGER_CST
4504 || compare_tree_int (elt_i, i) != 0))
4506 error ("vector CONSTRUCTOR with non-NULL element index");
4507 debug_generic_stmt (rhs1);
4508 return true;
4510 if (!is_gimple_val (elt_v))
4512 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4513 debug_generic_stmt (rhs1);
4514 return true;
4518 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4520 error ("non-vector CONSTRUCTOR with elements");
4521 debug_generic_stmt (rhs1);
4522 return true;
4524 return res;
4526 case ASSERT_EXPR:
4527 /* FIXME. */
4528 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4529 if (rhs1 == boolean_false_node)
4531 error ("ASSERT_EXPR with an always-false condition");
4532 debug_generic_stmt (rhs1);
4533 return true;
4535 break;
4537 case OBJ_TYPE_REF:
4538 case WITH_SIZE_EXPR:
4539 /* FIXME. */
4540 return res;
4542 default:;
4545 return res;
4548 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4549 is a problem, otherwise false. */
4551 static bool
4552 verify_gimple_assign (gassign *stmt)
4554 switch (gimple_assign_rhs_class (stmt))
4556 case GIMPLE_SINGLE_RHS:
4557 return verify_gimple_assign_single (stmt);
4559 case GIMPLE_UNARY_RHS:
4560 return verify_gimple_assign_unary (stmt);
4562 case GIMPLE_BINARY_RHS:
4563 return verify_gimple_assign_binary (stmt);
4565 case GIMPLE_TERNARY_RHS:
4566 return verify_gimple_assign_ternary (stmt);
4568 default:
4569 gcc_unreachable ();
4573 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4574 is a problem, otherwise false. */
4576 static bool
4577 verify_gimple_return (greturn *stmt)
4579 tree op = gimple_return_retval (stmt);
4580 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4582 /* We cannot test for present return values as we do not fix up missing
4583 return values from the original source. */
4584 if (op == NULL)
4585 return false;
4587 if (!is_gimple_val (op)
4588 && TREE_CODE (op) != RESULT_DECL)
4590 error ("invalid operand in return statement");
4591 debug_generic_stmt (op);
4592 return true;
4595 if ((TREE_CODE (op) == RESULT_DECL
4596 && DECL_BY_REFERENCE (op))
4597 || (TREE_CODE (op) == SSA_NAME
4598 && SSA_NAME_VAR (op)
4599 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4600 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4601 op = TREE_TYPE (op);
4603 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4605 error ("invalid conversion in return statement");
4606 debug_generic_stmt (restype);
4607 debug_generic_stmt (TREE_TYPE (op));
4608 return true;
4611 return false;
4615 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4616 is a problem, otherwise false. */
4618 static bool
4619 verify_gimple_goto (ggoto *stmt)
4621 tree dest = gimple_goto_dest (stmt);
4623 /* ??? We have two canonical forms of direct goto destinations, a
4624 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4625 if (TREE_CODE (dest) != LABEL_DECL
4626 && (!is_gimple_val (dest)
4627 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4629 error ("goto destination is neither a label nor a pointer");
4630 return true;
4633 return false;
4636 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4637 is a problem, otherwise false. */
4639 static bool
4640 verify_gimple_switch (gswitch *stmt)
4642 unsigned int i, n;
4643 tree elt, prev_upper_bound = NULL_TREE;
4644 tree index_type, elt_type = NULL_TREE;
4646 if (!is_gimple_val (gimple_switch_index (stmt)))
4648 error ("invalid operand to switch statement");
4649 debug_generic_stmt (gimple_switch_index (stmt));
4650 return true;
4653 index_type = TREE_TYPE (gimple_switch_index (stmt));
4654 if (! INTEGRAL_TYPE_P (index_type))
4656 error ("non-integral type switch statement");
4657 debug_generic_expr (index_type);
4658 return true;
4661 elt = gimple_switch_label (stmt, 0);
4662 if (CASE_LOW (elt) != NULL_TREE
4663 || CASE_HIGH (elt) != NULL_TREE
4664 || CASE_CHAIN (elt) != NULL_TREE)
4666 error ("invalid default case label in switch statement");
4667 debug_generic_expr (elt);
4668 return true;
4671 n = gimple_switch_num_labels (stmt);
4672 for (i = 1; i < n; i++)
4674 elt = gimple_switch_label (stmt, i);
4676 if (CASE_CHAIN (elt))
4678 error ("invalid CASE_CHAIN");
4679 debug_generic_expr (elt);
4680 return true;
4682 if (! CASE_LOW (elt))
4684 error ("invalid case label in switch statement");
4685 debug_generic_expr (elt);
4686 return true;
4688 if (CASE_HIGH (elt)
4689 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4691 error ("invalid case range in switch statement");
4692 debug_generic_expr (elt);
4693 return true;
4696 if (elt_type)
4698 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4699 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4701 error ("type mismatch for case label in switch statement");
4702 debug_generic_expr (elt);
4703 return true;
4706 else
4708 elt_type = TREE_TYPE (CASE_LOW (elt));
4709 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4711 error ("type precision mismatch in switch statement");
4712 return true;
4716 if (prev_upper_bound)
4718 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4720 error ("case labels not sorted in switch statement");
4721 return true;
4725 prev_upper_bound = CASE_HIGH (elt);
4726 if (! prev_upper_bound)
4727 prev_upper_bound = CASE_LOW (elt);
4730 return false;
4733 /* Verify a gimple debug statement STMT.
4734 Returns true if anything is wrong. */
4736 static bool
4737 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4739 /* There isn't much that could be wrong in a gimple debug stmt. A
4740 gimple debug bind stmt, for example, maps a tree, that's usually
4741 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4742 component or member of an aggregate type, to another tree, that
4743 can be an arbitrary expression. These stmts expand into debug
4744 insns, and are converted to debug notes by var-tracking.c. */
4745 return false;
4748 /* Verify a gimple label statement STMT.
4749 Returns true if anything is wrong. */
4751 static bool
4752 verify_gimple_label (glabel *stmt)
4754 tree decl = gimple_label_label (stmt);
4755 int uid;
4756 bool err = false;
4758 if (TREE_CODE (decl) != LABEL_DECL)
4759 return true;
4760 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4761 && DECL_CONTEXT (decl) != current_function_decl)
4763 error ("label's context is not the current function decl");
4764 err |= true;
4767 uid = LABEL_DECL_UID (decl);
4768 if (cfun->cfg
4769 && (uid == -1
4770 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4772 error ("incorrect entry in label_to_block_map");
4773 err |= true;
4776 uid = EH_LANDING_PAD_NR (decl);
4777 if (uid)
4779 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4780 if (decl != lp->post_landing_pad)
4782 error ("incorrect setting of landing pad number");
4783 err |= true;
4787 return err;
4790 /* Verify a gimple cond statement STMT.
4791 Returns true if anything is wrong. */
4793 static bool
4794 verify_gimple_cond (gcond *stmt)
4796 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4798 error ("invalid comparison code in gimple cond");
4799 return true;
4801 if (!(!gimple_cond_true_label (stmt)
4802 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4803 || !(!gimple_cond_false_label (stmt)
4804 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4806 error ("invalid labels in gimple cond");
4807 return true;
4810 return verify_gimple_comparison (boolean_type_node,
4811 gimple_cond_lhs (stmt),
4812 gimple_cond_rhs (stmt),
4813 gimple_cond_code (stmt));
4816 /* Verify the GIMPLE statement STMT. Returns true if there is an
4817 error, otherwise false. */
4819 static bool
4820 verify_gimple_stmt (gimple *stmt)
4822 switch (gimple_code (stmt))
4824 case GIMPLE_ASSIGN:
4825 return verify_gimple_assign (as_a <gassign *> (stmt));
4827 case GIMPLE_LABEL:
4828 return verify_gimple_label (as_a <glabel *> (stmt));
4830 case GIMPLE_CALL:
4831 return verify_gimple_call (as_a <gcall *> (stmt));
4833 case GIMPLE_COND:
4834 return verify_gimple_cond (as_a <gcond *> (stmt));
4836 case GIMPLE_GOTO:
4837 return verify_gimple_goto (as_a <ggoto *> (stmt));
4839 case GIMPLE_SWITCH:
4840 return verify_gimple_switch (as_a <gswitch *> (stmt));
4842 case GIMPLE_RETURN:
4843 return verify_gimple_return (as_a <greturn *> (stmt));
4845 case GIMPLE_ASM:
4846 return false;
4848 case GIMPLE_TRANSACTION:
4849 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4851 /* Tuples that do not have tree operands. */
4852 case GIMPLE_NOP:
4853 case GIMPLE_PREDICT:
4854 case GIMPLE_RESX:
4855 case GIMPLE_EH_DISPATCH:
4856 case GIMPLE_EH_MUST_NOT_THROW:
4857 return false;
4859 CASE_GIMPLE_OMP:
4860 /* OpenMP directives are validated by the FE and never operated
4861 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4862 non-gimple expressions when the main index variable has had
4863 its address taken. This does not affect the loop itself
4864 because the header of an GIMPLE_OMP_FOR is merely used to determine
4865 how to setup the parallel iteration. */
4866 return false;
4868 case GIMPLE_DEBUG:
4869 return verify_gimple_debug (stmt);
4871 default:
4872 gcc_unreachable ();
4876 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4877 and false otherwise. */
4879 static bool
4880 verify_gimple_phi (gphi *phi)
4882 bool err = false;
4883 unsigned i;
4884 tree phi_result = gimple_phi_result (phi);
4885 bool virtual_p;
4887 if (!phi_result)
4889 error ("invalid PHI result");
4890 return true;
4893 virtual_p = virtual_operand_p (phi_result);
4894 if (TREE_CODE (phi_result) != SSA_NAME
4895 || (virtual_p
4896 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4898 error ("invalid PHI result");
4899 err = true;
4902 for (i = 0; i < gimple_phi_num_args (phi); i++)
4904 tree t = gimple_phi_arg_def (phi, i);
4906 if (!t)
4908 error ("missing PHI def");
4909 err |= true;
4910 continue;
4912 /* Addressable variables do have SSA_NAMEs but they
4913 are not considered gimple values. */
4914 else if ((TREE_CODE (t) == SSA_NAME
4915 && virtual_p != virtual_operand_p (t))
4916 || (virtual_p
4917 && (TREE_CODE (t) != SSA_NAME
4918 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4919 || (!virtual_p
4920 && !is_gimple_val (t)))
4922 error ("invalid PHI argument");
4923 debug_generic_expr (t);
4924 err |= true;
4926 #ifdef ENABLE_TYPES_CHECKING
4927 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4929 error ("incompatible types in PHI argument %u", i);
4930 debug_generic_stmt (TREE_TYPE (phi_result));
4931 debug_generic_stmt (TREE_TYPE (t));
4932 err |= true;
4934 #endif
4937 return err;
4940 /* Verify the GIMPLE statements inside the sequence STMTS. */
4942 static bool
4943 verify_gimple_in_seq_2 (gimple_seq stmts)
4945 gimple_stmt_iterator ittr;
4946 bool err = false;
4948 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4950 gimple *stmt = gsi_stmt (ittr);
4952 switch (gimple_code (stmt))
4954 case GIMPLE_BIND:
4955 err |= verify_gimple_in_seq_2 (
4956 gimple_bind_body (as_a <gbind *> (stmt)));
4957 break;
4959 case GIMPLE_TRY:
4960 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4961 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4962 break;
4964 case GIMPLE_EH_FILTER:
4965 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4966 break;
4968 case GIMPLE_EH_ELSE:
4970 geh_else *eh_else = as_a <geh_else *> (stmt);
4971 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4972 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4974 break;
4976 case GIMPLE_CATCH:
4977 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4978 as_a <gcatch *> (stmt)));
4979 break;
4981 case GIMPLE_TRANSACTION:
4982 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4983 break;
4985 default:
4987 bool err2 = verify_gimple_stmt (stmt);
4988 if (err2)
4989 debug_gimple_stmt (stmt);
4990 err |= err2;
4995 return err;
4998 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4999 is a problem, otherwise false. */
5001 static bool
5002 verify_gimple_transaction (gtransaction *stmt)
5004 tree lab;
5006 lab = gimple_transaction_label_norm (stmt);
5007 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5008 return true;
5009 lab = gimple_transaction_label_uninst (stmt);
5010 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5011 return true;
5012 lab = gimple_transaction_label_over (stmt);
5013 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5014 return true;
5016 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5020 /* Verify the GIMPLE statements inside the statement list STMTS. */
5022 DEBUG_FUNCTION void
5023 verify_gimple_in_seq (gimple_seq stmts)
5025 timevar_push (TV_TREE_STMT_VERIFY);
5026 if (verify_gimple_in_seq_2 (stmts))
5027 internal_error ("verify_gimple failed");
5028 timevar_pop (TV_TREE_STMT_VERIFY);
5031 /* Return true when the T can be shared. */
5033 static bool
5034 tree_node_can_be_shared (tree t)
5036 if (IS_TYPE_OR_DECL_P (t)
5037 || TREE_CODE (t) == SSA_NAME
5038 || TREE_CODE (t) == IDENTIFIER_NODE
5039 || TREE_CODE (t) == CASE_LABEL_EXPR
5040 || is_gimple_min_invariant (t))
5041 return true;
5043 if (t == error_mark_node)
5044 return true;
5046 return false;
5049 /* Called via walk_tree. Verify tree sharing. */
5051 static tree
5052 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5054 hash_set<void *> *visited = (hash_set<void *> *) data;
5056 if (tree_node_can_be_shared (*tp))
5058 *walk_subtrees = false;
5059 return NULL;
5062 if (visited->add (*tp))
5063 return *tp;
5065 return NULL;
5068 /* Called via walk_gimple_stmt. Verify tree sharing. */
5070 static tree
5071 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5073 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5074 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5077 static bool eh_error_found;
5078 bool
5079 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5080 hash_set<gimple *> *visited)
5082 if (!visited->contains (stmt))
5084 error ("dead STMT in EH table");
5085 debug_gimple_stmt (stmt);
5086 eh_error_found = true;
5088 return true;
5091 /* Verify if the location LOCs block is in BLOCKS. */
5093 static bool
5094 verify_location (hash_set<tree> *blocks, location_t loc)
5096 tree block = LOCATION_BLOCK (loc);
5097 if (block != NULL_TREE
5098 && !blocks->contains (block))
5100 error ("location references block not in block tree");
5101 return true;
5103 if (block != NULL_TREE)
5104 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5105 return false;
5108 /* Called via walk_tree. Verify that expressions have no blocks. */
5110 static tree
5111 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5113 if (!EXPR_P (*tp))
5115 *walk_subtrees = false;
5116 return NULL;
5119 location_t loc = EXPR_LOCATION (*tp);
5120 if (LOCATION_BLOCK (loc) != NULL)
5121 return *tp;
5123 return NULL;
5126 /* Called via walk_tree. Verify locations of expressions. */
5128 static tree
5129 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5131 hash_set<tree> *blocks = (hash_set<tree> *) data;
5132 tree t = *tp;
5134 /* ??? This doesn't really belong here but there's no good place to
5135 stick this remainder of old verify_expr. */
5136 /* ??? This barfs on debug stmts which contain binds to vars with
5137 different function context. */
5138 #if 0
5139 if (VAR_P (t)
5140 || TREE_CODE (t) == PARM_DECL
5141 || TREE_CODE (t) == RESULT_DECL)
5143 tree context = decl_function_context (t);
5144 if (context != cfun->decl
5145 && !SCOPE_FILE_SCOPE_P (context)
5146 && !TREE_STATIC (t)
5147 && !DECL_EXTERNAL (t))
5149 error ("local declaration from a different function");
5150 return t;
5153 #endif
5155 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5157 tree x = DECL_DEBUG_EXPR (t);
5158 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5159 if (addr)
5160 return addr;
5162 if ((VAR_P (t)
5163 || TREE_CODE (t) == PARM_DECL
5164 || TREE_CODE (t) == RESULT_DECL)
5165 && DECL_HAS_VALUE_EXPR_P (t))
5167 tree x = DECL_VALUE_EXPR (t);
5168 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5169 if (addr)
5170 return addr;
5173 if (!EXPR_P (t))
5175 *walk_subtrees = false;
5176 return NULL;
5179 location_t loc = EXPR_LOCATION (t);
5180 if (verify_location (blocks, loc))
5181 return t;
5183 return NULL;
5186 /* Called via walk_gimple_op. Verify locations of expressions. */
5188 static tree
5189 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5191 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5192 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5195 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5197 static void
5198 collect_subblocks (hash_set<tree> *blocks, tree block)
5200 tree t;
5201 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5203 blocks->add (t);
5204 collect_subblocks (blocks, t);
5208 /* Verify the GIMPLE statements in the CFG of FN. */
5210 DEBUG_FUNCTION void
5211 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5213 basic_block bb;
5214 bool err = false;
5216 timevar_push (TV_TREE_STMT_VERIFY);
5217 hash_set<void *> visited;
5218 hash_set<gimple *> visited_throwing_stmts;
5220 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5221 hash_set<tree> blocks;
5222 if (DECL_INITIAL (fn->decl))
5224 blocks.add (DECL_INITIAL (fn->decl));
5225 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5228 FOR_EACH_BB_FN (bb, fn)
5230 gimple_stmt_iterator gsi;
5232 for (gphi_iterator gpi = gsi_start_phis (bb);
5233 !gsi_end_p (gpi);
5234 gsi_next (&gpi))
5236 gphi *phi = gpi.phi ();
5237 bool err2 = false;
5238 unsigned i;
5240 if (gimple_bb (phi) != bb)
5242 error ("gimple_bb (phi) is set to a wrong basic block");
5243 err2 = true;
5246 err2 |= verify_gimple_phi (phi);
5248 /* Only PHI arguments have locations. */
5249 if (gimple_location (phi) != UNKNOWN_LOCATION)
5251 error ("PHI node with location");
5252 err2 = true;
5255 for (i = 0; i < gimple_phi_num_args (phi); i++)
5257 tree arg = gimple_phi_arg_def (phi, i);
5258 tree addr = walk_tree (&arg, verify_node_sharing_1,
5259 &visited, NULL);
5260 if (addr)
5262 error ("incorrect sharing of tree nodes");
5263 debug_generic_expr (addr);
5264 err2 |= true;
5266 location_t loc = gimple_phi_arg_location (phi, i);
5267 if (virtual_operand_p (gimple_phi_result (phi))
5268 && loc != UNKNOWN_LOCATION)
5270 error ("virtual PHI with argument locations");
5271 err2 = true;
5273 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5274 if (addr)
5276 debug_generic_expr (addr);
5277 err2 = true;
5279 err2 |= verify_location (&blocks, loc);
5282 if (err2)
5283 debug_gimple_stmt (phi);
5284 err |= err2;
5287 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5289 gimple *stmt = gsi_stmt (gsi);
5290 bool err2 = false;
5291 struct walk_stmt_info wi;
5292 tree addr;
5293 int lp_nr;
5295 if (gimple_bb (stmt) != bb)
5297 error ("gimple_bb (stmt) is set to a wrong basic block");
5298 err2 = true;
5301 err2 |= verify_gimple_stmt (stmt);
5302 err2 |= verify_location (&blocks, gimple_location (stmt));
5304 memset (&wi, 0, sizeof (wi));
5305 wi.info = (void *) &visited;
5306 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5307 if (addr)
5309 error ("incorrect sharing of tree nodes");
5310 debug_generic_expr (addr);
5311 err2 |= true;
5314 memset (&wi, 0, sizeof (wi));
5315 wi.info = (void *) &blocks;
5316 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5317 if (addr)
5319 debug_generic_expr (addr);
5320 err2 |= true;
5323 /* If the statement is marked as part of an EH region, then it is
5324 expected that the statement could throw. Verify that when we
5325 have optimizations that simplify statements such that we prove
5326 that they cannot throw, that we update other data structures
5327 to match. */
5328 lp_nr = lookup_stmt_eh_lp (stmt);
5329 if (lp_nr != 0)
5330 visited_throwing_stmts.add (stmt);
5331 if (lp_nr > 0)
5333 if (!stmt_could_throw_p (stmt))
5335 if (verify_nothrow)
5337 error ("statement marked for throw, but doesn%'t");
5338 err2 |= true;
5341 else if (!gsi_one_before_end_p (gsi))
5343 error ("statement marked for throw in middle of block");
5344 err2 |= true;
5348 if (err2)
5349 debug_gimple_stmt (stmt);
5350 err |= err2;
5354 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5355 eh_error_found = false;
5356 if (eh_table)
5357 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5358 (&visited_throwing_stmts);
5360 if (err || eh_error_found)
5361 internal_error ("verify_gimple failed");
5363 verify_histograms ();
5364 timevar_pop (TV_TREE_STMT_VERIFY);
5368 /* Verifies that the flow information is OK. */
5370 static int
5371 gimple_verify_flow_info (void)
5373 int err = 0;
5374 basic_block bb;
5375 gimple_stmt_iterator gsi;
5376 gimple *stmt;
5377 edge e;
5378 edge_iterator ei;
5380 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5381 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5383 error ("ENTRY_BLOCK has IL associated with it");
5384 err = 1;
5387 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5388 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5390 error ("EXIT_BLOCK has IL associated with it");
5391 err = 1;
5394 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5395 if (e->flags & EDGE_FALLTHRU)
5397 error ("fallthru to exit from bb %d", e->src->index);
5398 err = 1;
5401 FOR_EACH_BB_FN (bb, cfun)
5403 bool found_ctrl_stmt = false;
5405 stmt = NULL;
5407 /* Skip labels on the start of basic block. */
5408 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5410 tree label;
5411 gimple *prev_stmt = stmt;
5413 stmt = gsi_stmt (gsi);
5415 if (gimple_code (stmt) != GIMPLE_LABEL)
5416 break;
5418 label = gimple_label_label (as_a <glabel *> (stmt));
5419 if (prev_stmt && DECL_NONLOCAL (label))
5421 error ("nonlocal label ");
5422 print_generic_expr (stderr, label);
5423 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5424 bb->index);
5425 err = 1;
5428 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5430 error ("EH landing pad label ");
5431 print_generic_expr (stderr, label);
5432 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5433 bb->index);
5434 err = 1;
5437 if (label_to_block (label) != bb)
5439 error ("label ");
5440 print_generic_expr (stderr, label);
5441 fprintf (stderr, " to block does not match in bb %d",
5442 bb->index);
5443 err = 1;
5446 if (decl_function_context (label) != current_function_decl)
5448 error ("label ");
5449 print_generic_expr (stderr, label);
5450 fprintf (stderr, " has incorrect context in bb %d",
5451 bb->index);
5452 err = 1;
5456 /* Verify that body of basic block BB is free of control flow. */
5457 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5459 gimple *stmt = gsi_stmt (gsi);
5461 if (found_ctrl_stmt)
5463 error ("control flow in the middle of basic block %d",
5464 bb->index);
5465 err = 1;
5468 if (stmt_ends_bb_p (stmt))
5469 found_ctrl_stmt = true;
5471 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5473 error ("label ");
5474 print_generic_expr (stderr, gimple_label_label (label_stmt));
5475 fprintf (stderr, " in the middle of basic block %d", bb->index);
5476 err = 1;
5480 gsi = gsi_last_nondebug_bb (bb);
5481 if (gsi_end_p (gsi))
5482 continue;
5484 stmt = gsi_stmt (gsi);
5486 if (gimple_code (stmt) == GIMPLE_LABEL)
5487 continue;
5489 err |= verify_eh_edges (stmt);
5491 if (is_ctrl_stmt (stmt))
5493 FOR_EACH_EDGE (e, ei, bb->succs)
5494 if (e->flags & EDGE_FALLTHRU)
5496 error ("fallthru edge after a control statement in bb %d",
5497 bb->index);
5498 err = 1;
5502 if (gimple_code (stmt) != GIMPLE_COND)
5504 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5505 after anything else but if statement. */
5506 FOR_EACH_EDGE (e, ei, bb->succs)
5507 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5509 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5510 bb->index);
5511 err = 1;
5515 switch (gimple_code (stmt))
5517 case GIMPLE_COND:
5519 edge true_edge;
5520 edge false_edge;
5522 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5524 if (!true_edge
5525 || !false_edge
5526 || !(true_edge->flags & EDGE_TRUE_VALUE)
5527 || !(false_edge->flags & EDGE_FALSE_VALUE)
5528 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5529 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5530 || EDGE_COUNT (bb->succs) >= 3)
5532 error ("wrong outgoing edge flags at end of bb %d",
5533 bb->index);
5534 err = 1;
5537 break;
5539 case GIMPLE_GOTO:
5540 if (simple_goto_p (stmt))
5542 error ("explicit goto at end of bb %d", bb->index);
5543 err = 1;
5545 else
5547 /* FIXME. We should double check that the labels in the
5548 destination blocks have their address taken. */
5549 FOR_EACH_EDGE (e, ei, bb->succs)
5550 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5551 | EDGE_FALSE_VALUE))
5552 || !(e->flags & EDGE_ABNORMAL))
5554 error ("wrong outgoing edge flags at end of bb %d",
5555 bb->index);
5556 err = 1;
5559 break;
5561 case GIMPLE_CALL:
5562 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5563 break;
5564 /* fallthru */
5565 case GIMPLE_RETURN:
5566 if (!single_succ_p (bb)
5567 || (single_succ_edge (bb)->flags
5568 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5569 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5571 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5572 err = 1;
5574 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5576 error ("return edge does not point to exit in bb %d",
5577 bb->index);
5578 err = 1;
5580 break;
5582 case GIMPLE_SWITCH:
5584 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5585 tree prev;
5586 edge e;
5587 size_t i, n;
5589 n = gimple_switch_num_labels (switch_stmt);
5591 /* Mark all the destination basic blocks. */
5592 for (i = 0; i < n; ++i)
5594 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5595 basic_block label_bb = label_to_block (lab);
5596 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5597 label_bb->aux = (void *)1;
5600 /* Verify that the case labels are sorted. */
5601 prev = gimple_switch_label (switch_stmt, 0);
5602 for (i = 1; i < n; ++i)
5604 tree c = gimple_switch_label (switch_stmt, i);
5605 if (!CASE_LOW (c))
5607 error ("found default case not at the start of "
5608 "case vector");
5609 err = 1;
5610 continue;
5612 if (CASE_LOW (prev)
5613 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5615 error ("case labels not sorted: ");
5616 print_generic_expr (stderr, prev);
5617 fprintf (stderr," is greater than ");
5618 print_generic_expr (stderr, c);
5619 fprintf (stderr," but comes before it.\n");
5620 err = 1;
5622 prev = c;
5624 /* VRP will remove the default case if it can prove it will
5625 never be executed. So do not verify there always exists
5626 a default case here. */
5628 FOR_EACH_EDGE (e, ei, bb->succs)
5630 if (!e->dest->aux)
5632 error ("extra outgoing edge %d->%d",
5633 bb->index, e->dest->index);
5634 err = 1;
5637 e->dest->aux = (void *)2;
5638 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5639 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5641 error ("wrong outgoing edge flags at end of bb %d",
5642 bb->index);
5643 err = 1;
5647 /* Check that we have all of them. */
5648 for (i = 0; i < n; ++i)
5650 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5651 basic_block label_bb = label_to_block (lab);
5653 if (label_bb->aux != (void *)2)
5655 error ("missing edge %i->%i", bb->index, label_bb->index);
5656 err = 1;
5660 FOR_EACH_EDGE (e, ei, bb->succs)
5661 e->dest->aux = (void *)0;
5663 break;
5665 case GIMPLE_EH_DISPATCH:
5666 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5667 break;
5669 default:
5670 break;
5674 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5675 verify_dominators (CDI_DOMINATORS);
5677 return err;
5681 /* Updates phi nodes after creating a forwarder block joined
5682 by edge FALLTHRU. */
5684 static void
5685 gimple_make_forwarder_block (edge fallthru)
5687 edge e;
5688 edge_iterator ei;
5689 basic_block dummy, bb;
5690 tree var;
5691 gphi_iterator gsi;
5693 dummy = fallthru->src;
5694 bb = fallthru->dest;
5696 if (single_pred_p (bb))
5697 return;
5699 /* If we redirected a branch we must create new PHI nodes at the
5700 start of BB. */
5701 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5703 gphi *phi, *new_phi;
5705 phi = gsi.phi ();
5706 var = gimple_phi_result (phi);
5707 new_phi = create_phi_node (var, bb);
5708 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5709 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5710 UNKNOWN_LOCATION);
5713 /* Add the arguments we have stored on edges. */
5714 FOR_EACH_EDGE (e, ei, bb->preds)
5716 if (e == fallthru)
5717 continue;
5719 flush_pending_stmts (e);
5724 /* Return a non-special label in the head of basic block BLOCK.
5725 Create one if it doesn't exist. */
5727 tree
5728 gimple_block_label (basic_block bb)
5730 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5731 bool first = true;
5732 tree label;
5733 glabel *stmt;
5735 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5737 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5738 if (!stmt)
5739 break;
5740 label = gimple_label_label (stmt);
5741 if (!DECL_NONLOCAL (label))
5743 if (!first)
5744 gsi_move_before (&i, &s);
5745 return label;
5749 label = create_artificial_label (UNKNOWN_LOCATION);
5750 stmt = gimple_build_label (label);
5751 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5752 return label;
5756 /* Attempt to perform edge redirection by replacing a possibly complex
5757 jump instruction by a goto or by removing the jump completely.
5758 This can apply only if all edges now point to the same block. The
5759 parameters and return values are equivalent to
5760 redirect_edge_and_branch. */
5762 static edge
5763 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5765 basic_block src = e->src;
5766 gimple_stmt_iterator i;
5767 gimple *stmt;
5769 /* We can replace or remove a complex jump only when we have exactly
5770 two edges. */
5771 if (EDGE_COUNT (src->succs) != 2
5772 /* Verify that all targets will be TARGET. Specifically, the
5773 edge that is not E must also go to TARGET. */
5774 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5775 return NULL;
5777 i = gsi_last_bb (src);
5778 if (gsi_end_p (i))
5779 return NULL;
5781 stmt = gsi_stmt (i);
5783 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5785 gsi_remove (&i, true);
5786 e = ssa_redirect_edge (e, target);
5787 e->flags = EDGE_FALLTHRU;
5788 return e;
5791 return NULL;
5795 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5796 edge representing the redirected branch. */
5798 static edge
5799 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5801 basic_block bb = e->src;
5802 gimple_stmt_iterator gsi;
5803 edge ret;
5804 gimple *stmt;
5806 if (e->flags & EDGE_ABNORMAL)
5807 return NULL;
5809 if (e->dest == dest)
5810 return NULL;
5812 if (e->flags & EDGE_EH)
5813 return redirect_eh_edge (e, dest);
5815 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5817 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5818 if (ret)
5819 return ret;
5822 gsi = gsi_last_nondebug_bb (bb);
5823 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5825 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5827 case GIMPLE_COND:
5828 /* For COND_EXPR, we only need to redirect the edge. */
5829 break;
5831 case GIMPLE_GOTO:
5832 /* No non-abnormal edges should lead from a non-simple goto, and
5833 simple ones should be represented implicitly. */
5834 gcc_unreachable ();
5836 case GIMPLE_SWITCH:
5838 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5839 tree label = gimple_block_label (dest);
5840 tree cases = get_cases_for_edge (e, switch_stmt);
5842 /* If we have a list of cases associated with E, then use it
5843 as it's a lot faster than walking the entire case vector. */
5844 if (cases)
5846 edge e2 = find_edge (e->src, dest);
5847 tree last, first;
5849 first = cases;
5850 while (cases)
5852 last = cases;
5853 CASE_LABEL (cases) = label;
5854 cases = CASE_CHAIN (cases);
5857 /* If there was already an edge in the CFG, then we need
5858 to move all the cases associated with E to E2. */
5859 if (e2)
5861 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5863 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5864 CASE_CHAIN (cases2) = first;
5866 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5868 else
5870 size_t i, n = gimple_switch_num_labels (switch_stmt);
5872 for (i = 0; i < n; i++)
5874 tree elt = gimple_switch_label (switch_stmt, i);
5875 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5876 CASE_LABEL (elt) = label;
5880 break;
5882 case GIMPLE_ASM:
5884 gasm *asm_stmt = as_a <gasm *> (stmt);
5885 int i, n = gimple_asm_nlabels (asm_stmt);
5886 tree label = NULL;
5888 for (i = 0; i < n; ++i)
5890 tree cons = gimple_asm_label_op (asm_stmt, i);
5891 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5893 if (!label)
5894 label = gimple_block_label (dest);
5895 TREE_VALUE (cons) = label;
5899 /* If we didn't find any label matching the former edge in the
5900 asm labels, we must be redirecting the fallthrough
5901 edge. */
5902 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5904 break;
5906 case GIMPLE_RETURN:
5907 gsi_remove (&gsi, true);
5908 e->flags |= EDGE_FALLTHRU;
5909 break;
5911 case GIMPLE_OMP_RETURN:
5912 case GIMPLE_OMP_CONTINUE:
5913 case GIMPLE_OMP_SECTIONS_SWITCH:
5914 case GIMPLE_OMP_FOR:
5915 /* The edges from OMP constructs can be simply redirected. */
5916 break;
5918 case GIMPLE_EH_DISPATCH:
5919 if (!(e->flags & EDGE_FALLTHRU))
5920 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5921 break;
5923 case GIMPLE_TRANSACTION:
5924 if (e->flags & EDGE_TM_ABORT)
5925 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5926 gimple_block_label (dest));
5927 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5928 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5929 gimple_block_label (dest));
5930 else
5931 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5932 gimple_block_label (dest));
5933 break;
5935 default:
5936 /* Otherwise it must be a fallthru edge, and we don't need to
5937 do anything besides redirecting it. */
5938 gcc_assert (e->flags & EDGE_FALLTHRU);
5939 break;
5942 /* Update/insert PHI nodes as necessary. */
5944 /* Now update the edges in the CFG. */
5945 e = ssa_redirect_edge (e, dest);
5947 return e;
5950 /* Returns true if it is possible to remove edge E by redirecting
5951 it to the destination of the other edge from E->src. */
5953 static bool
5954 gimple_can_remove_branch_p (const_edge e)
5956 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5957 return false;
5959 return true;
5962 /* Simple wrapper, as we can always redirect fallthru edges. */
5964 static basic_block
5965 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5967 e = gimple_redirect_edge_and_branch (e, dest);
5968 gcc_assert (e);
5970 return NULL;
5974 /* Splits basic block BB after statement STMT (but at least after the
5975 labels). If STMT is NULL, BB is split just after the labels. */
5977 static basic_block
5978 gimple_split_block (basic_block bb, void *stmt)
5980 gimple_stmt_iterator gsi;
5981 gimple_stmt_iterator gsi_tgt;
5982 gimple_seq list;
5983 basic_block new_bb;
5984 edge e;
5985 edge_iterator ei;
5987 new_bb = create_empty_bb (bb);
5989 /* Redirect the outgoing edges. */
5990 new_bb->succs = bb->succs;
5991 bb->succs = NULL;
5992 FOR_EACH_EDGE (e, ei, new_bb->succs)
5993 e->src = new_bb;
5995 /* Get a stmt iterator pointing to the first stmt to move. */
5996 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5997 gsi = gsi_after_labels (bb);
5998 else
6000 gsi = gsi_for_stmt ((gimple *) stmt);
6001 gsi_next (&gsi);
6004 /* Move everything from GSI to the new basic block. */
6005 if (gsi_end_p (gsi))
6006 return new_bb;
6008 /* Split the statement list - avoid re-creating new containers as this
6009 brings ugly quadratic memory consumption in the inliner.
6010 (We are still quadratic since we need to update stmt BB pointers,
6011 sadly.) */
6012 gsi_split_seq_before (&gsi, &list);
6013 set_bb_seq (new_bb, list);
6014 for (gsi_tgt = gsi_start (list);
6015 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6016 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6018 return new_bb;
6022 /* Moves basic block BB after block AFTER. */
6024 static bool
6025 gimple_move_block_after (basic_block bb, basic_block after)
6027 if (bb->prev_bb == after)
6028 return true;
6030 unlink_block (bb);
6031 link_block (bb, after);
6033 return true;
6037 /* Return TRUE if block BB has no executable statements, otherwise return
6038 FALSE. */
6040 static bool
6041 gimple_empty_block_p (basic_block bb)
6043 /* BB must have no executable statements. */
6044 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6045 if (phi_nodes (bb))
6046 return false;
6047 if (gsi_end_p (gsi))
6048 return true;
6049 if (is_gimple_debug (gsi_stmt (gsi)))
6050 gsi_next_nondebug (&gsi);
6051 return gsi_end_p (gsi);
6055 /* Split a basic block if it ends with a conditional branch and if the
6056 other part of the block is not empty. */
6058 static basic_block
6059 gimple_split_block_before_cond_jump (basic_block bb)
6061 gimple *last, *split_point;
6062 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6063 if (gsi_end_p (gsi))
6064 return NULL;
6065 last = gsi_stmt (gsi);
6066 if (gimple_code (last) != GIMPLE_COND
6067 && gimple_code (last) != GIMPLE_SWITCH)
6068 return NULL;
6069 gsi_prev (&gsi);
6070 split_point = gsi_stmt (gsi);
6071 return split_block (bb, split_point)->dest;
6075 /* Return true if basic_block can be duplicated. */
6077 static bool
6078 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6080 return true;
6083 /* Create a duplicate of the basic block BB. NOTE: This does not
6084 preserve SSA form. */
6086 static basic_block
6087 gimple_duplicate_bb (basic_block bb)
6089 basic_block new_bb;
6090 gimple_stmt_iterator gsi_tgt;
6092 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6094 /* Copy the PHI nodes. We ignore PHI node arguments here because
6095 the incoming edges have not been setup yet. */
6096 for (gphi_iterator gpi = gsi_start_phis (bb);
6097 !gsi_end_p (gpi);
6098 gsi_next (&gpi))
6100 gphi *phi, *copy;
6101 phi = gpi.phi ();
6102 copy = create_phi_node (NULL_TREE, new_bb);
6103 create_new_def_for (gimple_phi_result (phi), copy,
6104 gimple_phi_result_ptr (copy));
6105 gimple_set_uid (copy, gimple_uid (phi));
6108 gsi_tgt = gsi_start_bb (new_bb);
6109 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6110 !gsi_end_p (gsi);
6111 gsi_next (&gsi))
6113 def_operand_p def_p;
6114 ssa_op_iter op_iter;
6115 tree lhs;
6116 gimple *stmt, *copy;
6118 stmt = gsi_stmt (gsi);
6119 if (gimple_code (stmt) == GIMPLE_LABEL)
6120 continue;
6122 /* Don't duplicate label debug stmts. */
6123 if (gimple_debug_bind_p (stmt)
6124 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6125 == LABEL_DECL)
6126 continue;
6128 /* Create a new copy of STMT and duplicate STMT's virtual
6129 operands. */
6130 copy = gimple_copy (stmt);
6131 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6133 maybe_duplicate_eh_stmt (copy, stmt);
6134 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6136 /* When copying around a stmt writing into a local non-user
6137 aggregate, make sure it won't share stack slot with other
6138 vars. */
6139 lhs = gimple_get_lhs (stmt);
6140 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6142 tree base = get_base_address (lhs);
6143 if (base
6144 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6145 && DECL_IGNORED_P (base)
6146 && !TREE_STATIC (base)
6147 && !DECL_EXTERNAL (base)
6148 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6149 DECL_NONSHAREABLE (base) = 1;
6152 /* Create new names for all the definitions created by COPY and
6153 add replacement mappings for each new name. */
6154 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6155 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6158 return new_bb;
6161 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6163 static void
6164 add_phi_args_after_copy_edge (edge e_copy)
6166 basic_block bb, bb_copy = e_copy->src, dest;
6167 edge e;
6168 edge_iterator ei;
6169 gphi *phi, *phi_copy;
6170 tree def;
6171 gphi_iterator psi, psi_copy;
6173 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6174 return;
6176 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6178 if (e_copy->dest->flags & BB_DUPLICATED)
6179 dest = get_bb_original (e_copy->dest);
6180 else
6181 dest = e_copy->dest;
6183 e = find_edge (bb, dest);
6184 if (!e)
6186 /* During loop unrolling the target of the latch edge is copied.
6187 In this case we are not looking for edge to dest, but to
6188 duplicated block whose original was dest. */
6189 FOR_EACH_EDGE (e, ei, bb->succs)
6191 if ((e->dest->flags & BB_DUPLICATED)
6192 && get_bb_original (e->dest) == dest)
6193 break;
6196 gcc_assert (e != NULL);
6199 for (psi = gsi_start_phis (e->dest),
6200 psi_copy = gsi_start_phis (e_copy->dest);
6201 !gsi_end_p (psi);
6202 gsi_next (&psi), gsi_next (&psi_copy))
6204 phi = psi.phi ();
6205 phi_copy = psi_copy.phi ();
6206 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6207 add_phi_arg (phi_copy, def, e_copy,
6208 gimple_phi_arg_location_from_edge (phi, e));
6213 /* Basic block BB_COPY was created by code duplication. Add phi node
6214 arguments for edges going out of BB_COPY. The blocks that were
6215 duplicated have BB_DUPLICATED set. */
6217 void
6218 add_phi_args_after_copy_bb (basic_block bb_copy)
6220 edge e_copy;
6221 edge_iterator ei;
6223 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6225 add_phi_args_after_copy_edge (e_copy);
6229 /* Blocks in REGION_COPY array of length N_REGION were created by
6230 duplication of basic blocks. Add phi node arguments for edges
6231 going from these blocks. If E_COPY is not NULL, also add
6232 phi node arguments for its destination.*/
6234 void
6235 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6236 edge e_copy)
6238 unsigned i;
6240 for (i = 0; i < n_region; i++)
6241 region_copy[i]->flags |= BB_DUPLICATED;
6243 for (i = 0; i < n_region; i++)
6244 add_phi_args_after_copy_bb (region_copy[i]);
6245 if (e_copy)
6246 add_phi_args_after_copy_edge (e_copy);
6248 for (i = 0; i < n_region; i++)
6249 region_copy[i]->flags &= ~BB_DUPLICATED;
6252 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6253 important exit edge EXIT. By important we mean that no SSA name defined
6254 inside region is live over the other exit edges of the region. All entry
6255 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6256 to the duplicate of the region. Dominance and loop information is
6257 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6258 UPDATE_DOMINANCE is false then we assume that the caller will update the
6259 dominance information after calling this function. The new basic
6260 blocks are stored to REGION_COPY in the same order as they had in REGION,
6261 provided that REGION_COPY is not NULL.
6262 The function returns false if it is unable to copy the region,
6263 true otherwise. */
6265 bool
6266 gimple_duplicate_sese_region (edge entry, edge exit,
6267 basic_block *region, unsigned n_region,
6268 basic_block *region_copy,
6269 bool update_dominance)
6271 unsigned i;
6272 bool free_region_copy = false, copying_header = false;
6273 struct loop *loop = entry->dest->loop_father;
6274 edge exit_copy;
6275 vec<basic_block> doms = vNULL;
6276 edge redirected;
6277 profile_count total_count = profile_count::uninitialized ();
6278 profile_count entry_count = profile_count::uninitialized ();
6280 if (!can_copy_bbs_p (region, n_region))
6281 return false;
6283 /* Some sanity checking. Note that we do not check for all possible
6284 missuses of the functions. I.e. if you ask to copy something weird,
6285 it will work, but the state of structures probably will not be
6286 correct. */
6287 for (i = 0; i < n_region; i++)
6289 /* We do not handle subloops, i.e. all the blocks must belong to the
6290 same loop. */
6291 if (region[i]->loop_father != loop)
6292 return false;
6294 if (region[i] != entry->dest
6295 && region[i] == loop->header)
6296 return false;
6299 /* In case the function is used for loop header copying (which is the primary
6300 use), ensure that EXIT and its copy will be new latch and entry edges. */
6301 if (loop->header == entry->dest)
6303 copying_header = true;
6305 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6306 return false;
6308 for (i = 0; i < n_region; i++)
6309 if (region[i] != exit->src
6310 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6311 return false;
6314 initialize_original_copy_tables ();
6316 if (copying_header)
6317 set_loop_copy (loop, loop_outer (loop));
6318 else
6319 set_loop_copy (loop, loop);
6321 if (!region_copy)
6323 region_copy = XNEWVEC (basic_block, n_region);
6324 free_region_copy = true;
6327 /* Record blocks outside the region that are dominated by something
6328 inside. */
6329 if (update_dominance)
6331 doms.create (0);
6332 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6335 if (entry->dest->count.initialized_p ())
6337 total_count = entry->dest->count;
6338 entry_count = entry->count ();
6339 /* Fix up corner cases, to avoid division by zero or creation of negative
6340 frequencies. */
6341 if (entry_count > total_count)
6342 entry_count = total_count;
6345 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6346 split_edge_bb_loc (entry), update_dominance);
6347 if (total_count.initialized_p () && entry_count.initialized_p ())
6349 scale_bbs_frequencies_profile_count (region, n_region,
6350 total_count - entry_count,
6351 total_count);
6352 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6353 total_count);
6356 if (copying_header)
6358 loop->header = exit->dest;
6359 loop->latch = exit->src;
6362 /* Redirect the entry and add the phi node arguments. */
6363 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6364 gcc_assert (redirected != NULL);
6365 flush_pending_stmts (entry);
6367 /* Concerning updating of dominators: We must recount dominators
6368 for entry block and its copy. Anything that is outside of the
6369 region, but was dominated by something inside needs recounting as
6370 well. */
6371 if (update_dominance)
6373 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6374 doms.safe_push (get_bb_original (entry->dest));
6375 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6376 doms.release ();
6379 /* Add the other PHI node arguments. */
6380 add_phi_args_after_copy (region_copy, n_region, NULL);
6382 if (free_region_copy)
6383 free (region_copy);
6385 free_original_copy_tables ();
6386 return true;
6389 /* Checks if BB is part of the region defined by N_REGION BBS. */
6390 static bool
6391 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6393 unsigned int n;
6395 for (n = 0; n < n_region; n++)
6397 if (bb == bbs[n])
6398 return true;
6400 return false;
6403 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6404 are stored to REGION_COPY in the same order in that they appear
6405 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6406 the region, EXIT an exit from it. The condition guarding EXIT
6407 is moved to ENTRY. Returns true if duplication succeeds, false
6408 otherwise.
6410 For example,
6412 some_code;
6413 if (cond)
6415 else
6418 is transformed to
6420 if (cond)
6422 some_code;
6425 else
6427 some_code;
6432 bool
6433 gimple_duplicate_sese_tail (edge entry, edge exit,
6434 basic_block *region, unsigned n_region,
6435 basic_block *region_copy)
6437 unsigned i;
6438 bool free_region_copy = false;
6439 struct loop *loop = exit->dest->loop_father;
6440 struct loop *orig_loop = entry->dest->loop_father;
6441 basic_block switch_bb, entry_bb, nentry_bb;
6442 vec<basic_block> doms;
6443 profile_count total_count = profile_count::uninitialized (),
6444 exit_count = profile_count::uninitialized ();
6445 edge exits[2], nexits[2], e;
6446 gimple_stmt_iterator gsi;
6447 gimple *cond_stmt;
6448 edge sorig, snew;
6449 basic_block exit_bb;
6450 gphi_iterator psi;
6451 gphi *phi;
6452 tree def;
6453 struct loop *target, *aloop, *cloop;
6455 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6456 exits[0] = exit;
6457 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6459 if (!can_copy_bbs_p (region, n_region))
6460 return false;
6462 initialize_original_copy_tables ();
6463 set_loop_copy (orig_loop, loop);
6465 target= loop;
6466 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6468 if (bb_part_of_region_p (aloop->header, region, n_region))
6470 cloop = duplicate_loop (aloop, target);
6471 duplicate_subloops (aloop, cloop);
6475 if (!region_copy)
6477 region_copy = XNEWVEC (basic_block, n_region);
6478 free_region_copy = true;
6481 gcc_assert (!need_ssa_update_p (cfun));
6483 /* Record blocks outside the region that are dominated by something
6484 inside. */
6485 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6487 total_count = exit->src->count;
6488 exit_count = exit->count ();
6489 /* Fix up corner cases, to avoid division by zero or creation of negative
6490 frequencies. */
6491 if (exit_count > total_count)
6492 exit_count = total_count;
6494 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6495 split_edge_bb_loc (exit), true);
6496 if (total_count.initialized_p () && exit_count.initialized_p ())
6498 scale_bbs_frequencies_profile_count (region, n_region,
6499 total_count - exit_count,
6500 total_count);
6501 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6502 total_count);
6505 /* Create the switch block, and put the exit condition to it. */
6506 entry_bb = entry->dest;
6507 nentry_bb = get_bb_copy (entry_bb);
6508 if (!last_stmt (entry->src)
6509 || !stmt_ends_bb_p (last_stmt (entry->src)))
6510 switch_bb = entry->src;
6511 else
6512 switch_bb = split_edge (entry);
6513 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6515 gsi = gsi_last_bb (switch_bb);
6516 cond_stmt = last_stmt (exit->src);
6517 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6518 cond_stmt = gimple_copy (cond_stmt);
6520 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6522 sorig = single_succ_edge (switch_bb);
6523 sorig->flags = exits[1]->flags;
6524 sorig->probability = exits[1]->probability;
6525 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6526 snew->probability = exits[0]->probability;
6529 /* Register the new edge from SWITCH_BB in loop exit lists. */
6530 rescan_loop_exit (snew, true, false);
6532 /* Add the PHI node arguments. */
6533 add_phi_args_after_copy (region_copy, n_region, snew);
6535 /* Get rid of now superfluous conditions and associated edges (and phi node
6536 arguments). */
6537 exit_bb = exit->dest;
6539 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6540 PENDING_STMT (e) = NULL;
6542 /* The latch of ORIG_LOOP was copied, and so was the backedge
6543 to the original header. We redirect this backedge to EXIT_BB. */
6544 for (i = 0; i < n_region; i++)
6545 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6547 gcc_assert (single_succ_edge (region_copy[i]));
6548 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6549 PENDING_STMT (e) = NULL;
6550 for (psi = gsi_start_phis (exit_bb);
6551 !gsi_end_p (psi);
6552 gsi_next (&psi))
6554 phi = psi.phi ();
6555 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6556 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6559 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6560 PENDING_STMT (e) = NULL;
6562 /* Anything that is outside of the region, but was dominated by something
6563 inside needs to update dominance info. */
6564 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6565 doms.release ();
6566 /* Update the SSA web. */
6567 update_ssa (TODO_update_ssa);
6569 if (free_region_copy)
6570 free (region_copy);
6572 free_original_copy_tables ();
6573 return true;
6576 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6577 adding blocks when the dominator traversal reaches EXIT. This
6578 function silently assumes that ENTRY strictly dominates EXIT. */
6580 void
6581 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6582 vec<basic_block> *bbs_p)
6584 basic_block son;
6586 for (son = first_dom_son (CDI_DOMINATORS, entry);
6587 son;
6588 son = next_dom_son (CDI_DOMINATORS, son))
6590 bbs_p->safe_push (son);
6591 if (son != exit)
6592 gather_blocks_in_sese_region (son, exit, bbs_p);
6596 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6597 The duplicates are recorded in VARS_MAP. */
6599 static void
6600 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6601 tree to_context)
6603 tree t = *tp, new_t;
6604 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6606 if (DECL_CONTEXT (t) == to_context)
6607 return;
6609 bool existed;
6610 tree &loc = vars_map->get_or_insert (t, &existed);
6612 if (!existed)
6614 if (SSA_VAR_P (t))
6616 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6617 add_local_decl (f, new_t);
6619 else
6621 gcc_assert (TREE_CODE (t) == CONST_DECL);
6622 new_t = copy_node (t);
6624 DECL_CONTEXT (new_t) = to_context;
6626 loc = new_t;
6628 else
6629 new_t = loc;
6631 *tp = new_t;
6635 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6636 VARS_MAP maps old ssa names and var_decls to the new ones. */
6638 static tree
6639 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6640 tree to_context)
6642 tree new_name;
6644 gcc_assert (!virtual_operand_p (name));
6646 tree *loc = vars_map->get (name);
6648 if (!loc)
6650 tree decl = SSA_NAME_VAR (name);
6651 if (decl)
6653 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6654 replace_by_duplicate_decl (&decl, vars_map, to_context);
6655 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6656 decl, SSA_NAME_DEF_STMT (name));
6658 else
6659 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6660 name, SSA_NAME_DEF_STMT (name));
6662 /* Now that we've used the def stmt to define new_name, make sure it
6663 doesn't define name anymore. */
6664 SSA_NAME_DEF_STMT (name) = NULL;
6666 vars_map->put (name, new_name);
6668 else
6669 new_name = *loc;
6671 return new_name;
6674 struct move_stmt_d
6676 tree orig_block;
6677 tree new_block;
6678 tree from_context;
6679 tree to_context;
6680 hash_map<tree, tree> *vars_map;
6681 htab_t new_label_map;
6682 hash_map<void *, void *> *eh_map;
6683 bool remap_decls_p;
6686 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6687 contained in *TP if it has been ORIG_BLOCK previously and change the
6688 DECL_CONTEXT of every local variable referenced in *TP. */
6690 static tree
6691 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6693 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6694 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6695 tree t = *tp;
6697 if (EXPR_P (t))
6699 tree block = TREE_BLOCK (t);
6700 if (block == NULL_TREE)
6702 else if (block == p->orig_block
6703 || p->orig_block == NULL_TREE)
6704 TREE_SET_BLOCK (t, p->new_block);
6705 else if (flag_checking)
6707 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6708 block = BLOCK_SUPERCONTEXT (block);
6709 gcc_assert (block == p->orig_block);
6712 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6714 if (TREE_CODE (t) == SSA_NAME)
6715 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6716 else if (TREE_CODE (t) == PARM_DECL
6717 && gimple_in_ssa_p (cfun))
6718 *tp = *(p->vars_map->get (t));
6719 else if (TREE_CODE (t) == LABEL_DECL)
6721 if (p->new_label_map)
6723 struct tree_map in, *out;
6724 in.base.from = t;
6725 out = (struct tree_map *)
6726 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6727 if (out)
6728 *tp = t = out->to;
6731 /* For FORCED_LABELs we can end up with references from other
6732 functions if some SESE regions are outlined. It is UB to
6733 jump in between them, but they could be used just for printing
6734 addresses etc. In that case, DECL_CONTEXT on the label should
6735 be the function containing the glabel stmt with that LABEL_DECL,
6736 rather than whatever function a reference to the label was seen
6737 last time. */
6738 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6739 DECL_CONTEXT (t) = p->to_context;
6741 else if (p->remap_decls_p)
6743 /* Replace T with its duplicate. T should no longer appear in the
6744 parent function, so this looks wasteful; however, it may appear
6745 in referenced_vars, and more importantly, as virtual operands of
6746 statements, and in alias lists of other variables. It would be
6747 quite difficult to expunge it from all those places. ??? It might
6748 suffice to do this for addressable variables. */
6749 if ((VAR_P (t) && !is_global_var (t))
6750 || TREE_CODE (t) == CONST_DECL)
6751 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6753 *walk_subtrees = 0;
6755 else if (TYPE_P (t))
6756 *walk_subtrees = 0;
6758 return NULL_TREE;
6761 /* Helper for move_stmt_r. Given an EH region number for the source
6762 function, map that to the duplicate EH regio number in the dest. */
6764 static int
6765 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6767 eh_region old_r, new_r;
6769 old_r = get_eh_region_from_number (old_nr);
6770 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6772 return new_r->index;
6775 /* Similar, but operate on INTEGER_CSTs. */
6777 static tree
6778 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6780 int old_nr, new_nr;
6782 old_nr = tree_to_shwi (old_t_nr);
6783 new_nr = move_stmt_eh_region_nr (old_nr, p);
6785 return build_int_cst (integer_type_node, new_nr);
6788 /* Like move_stmt_op, but for gimple statements.
6790 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6791 contained in the current statement in *GSI_P and change the
6792 DECL_CONTEXT of every local variable referenced in the current
6793 statement. */
6795 static tree
6796 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6797 struct walk_stmt_info *wi)
6799 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6800 gimple *stmt = gsi_stmt (*gsi_p);
6801 tree block = gimple_block (stmt);
6803 if (block == p->orig_block
6804 || (p->orig_block == NULL_TREE
6805 && block != NULL_TREE))
6806 gimple_set_block (stmt, p->new_block);
6808 switch (gimple_code (stmt))
6810 case GIMPLE_CALL:
6811 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6813 tree r, fndecl = gimple_call_fndecl (stmt);
6814 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6815 switch (DECL_FUNCTION_CODE (fndecl))
6817 case BUILT_IN_EH_COPY_VALUES:
6818 r = gimple_call_arg (stmt, 1);
6819 r = move_stmt_eh_region_tree_nr (r, p);
6820 gimple_call_set_arg (stmt, 1, r);
6821 /* FALLTHRU */
6823 case BUILT_IN_EH_POINTER:
6824 case BUILT_IN_EH_FILTER:
6825 r = gimple_call_arg (stmt, 0);
6826 r = move_stmt_eh_region_tree_nr (r, p);
6827 gimple_call_set_arg (stmt, 0, r);
6828 break;
6830 default:
6831 break;
6834 break;
6836 case GIMPLE_RESX:
6838 gresx *resx_stmt = as_a <gresx *> (stmt);
6839 int r = gimple_resx_region (resx_stmt);
6840 r = move_stmt_eh_region_nr (r, p);
6841 gimple_resx_set_region (resx_stmt, r);
6843 break;
6845 case GIMPLE_EH_DISPATCH:
6847 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6848 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6849 r = move_stmt_eh_region_nr (r, p);
6850 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6852 break;
6854 case GIMPLE_OMP_RETURN:
6855 case GIMPLE_OMP_CONTINUE:
6856 break;
6858 case GIMPLE_LABEL:
6860 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6861 so that such labels can be referenced from other regions.
6862 Make sure to update it when seeing a GIMPLE_LABEL though,
6863 that is the owner of the label. */
6864 walk_gimple_op (stmt, move_stmt_op, wi);
6865 *handled_ops_p = true;
6866 tree label = gimple_label_label (as_a <glabel *> (stmt));
6867 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6868 DECL_CONTEXT (label) = p->to_context;
6870 break;
6872 default:
6873 if (is_gimple_omp (stmt))
6875 /* Do not remap variables inside OMP directives. Variables
6876 referenced in clauses and directive header belong to the
6877 parent function and should not be moved into the child
6878 function. */
6879 bool save_remap_decls_p = p->remap_decls_p;
6880 p->remap_decls_p = false;
6881 *handled_ops_p = true;
6883 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6884 move_stmt_op, wi);
6886 p->remap_decls_p = save_remap_decls_p;
6888 break;
6891 return NULL_TREE;
6894 /* Move basic block BB from function CFUN to function DEST_FN. The
6895 block is moved out of the original linked list and placed after
6896 block AFTER in the new list. Also, the block is removed from the
6897 original array of blocks and placed in DEST_FN's array of blocks.
6898 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6899 updated to reflect the moved edges.
6901 The local variables are remapped to new instances, VARS_MAP is used
6902 to record the mapping. */
6904 static void
6905 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6906 basic_block after, bool update_edge_count_p,
6907 struct move_stmt_d *d)
6909 struct control_flow_graph *cfg;
6910 edge_iterator ei;
6911 edge e;
6912 gimple_stmt_iterator si;
6913 unsigned old_len, new_len;
6915 /* Remove BB from dominance structures. */
6916 delete_from_dominance_info (CDI_DOMINATORS, bb);
6918 /* Move BB from its current loop to the copy in the new function. */
6919 if (current_loops)
6921 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6922 if (new_loop)
6923 bb->loop_father = new_loop;
6926 /* Link BB to the new linked list. */
6927 move_block_after (bb, after);
6929 /* Update the edge count in the corresponding flowgraphs. */
6930 if (update_edge_count_p)
6931 FOR_EACH_EDGE (e, ei, bb->succs)
6933 cfun->cfg->x_n_edges--;
6934 dest_cfun->cfg->x_n_edges++;
6937 /* Remove BB from the original basic block array. */
6938 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6939 cfun->cfg->x_n_basic_blocks--;
6941 /* Grow DEST_CFUN's basic block array if needed. */
6942 cfg = dest_cfun->cfg;
6943 cfg->x_n_basic_blocks++;
6944 if (bb->index >= cfg->x_last_basic_block)
6945 cfg->x_last_basic_block = bb->index + 1;
6947 old_len = vec_safe_length (cfg->x_basic_block_info);
6948 if ((unsigned) cfg->x_last_basic_block >= old_len)
6950 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6951 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6954 (*cfg->x_basic_block_info)[bb->index] = bb;
6956 /* Remap the variables in phi nodes. */
6957 for (gphi_iterator psi = gsi_start_phis (bb);
6958 !gsi_end_p (psi); )
6960 gphi *phi = psi.phi ();
6961 use_operand_p use;
6962 tree op = PHI_RESULT (phi);
6963 ssa_op_iter oi;
6964 unsigned i;
6966 if (virtual_operand_p (op))
6968 /* Remove the phi nodes for virtual operands (alias analysis will be
6969 run for the new function, anyway). */
6970 remove_phi_node (&psi, true);
6971 continue;
6974 SET_PHI_RESULT (phi,
6975 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6976 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6978 op = USE_FROM_PTR (use);
6979 if (TREE_CODE (op) == SSA_NAME)
6980 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6983 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6985 location_t locus = gimple_phi_arg_location (phi, i);
6986 tree block = LOCATION_BLOCK (locus);
6988 if (locus == UNKNOWN_LOCATION)
6989 continue;
6990 if (d->orig_block == NULL_TREE || block == d->orig_block)
6992 locus = set_block (locus, d->new_block);
6993 gimple_phi_arg_set_location (phi, i, locus);
6997 gsi_next (&psi);
7000 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7002 gimple *stmt = gsi_stmt (si);
7003 struct walk_stmt_info wi;
7005 memset (&wi, 0, sizeof (wi));
7006 wi.info = d;
7007 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7009 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7011 tree label = gimple_label_label (label_stmt);
7012 int uid = LABEL_DECL_UID (label);
7014 gcc_assert (uid > -1);
7016 old_len = vec_safe_length (cfg->x_label_to_block_map);
7017 if (old_len <= (unsigned) uid)
7019 new_len = 3 * uid / 2 + 1;
7020 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7023 (*cfg->x_label_to_block_map)[uid] = bb;
7024 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7026 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7028 if (uid >= dest_cfun->cfg->last_label_uid)
7029 dest_cfun->cfg->last_label_uid = uid + 1;
7032 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7033 remove_stmt_from_eh_lp_fn (cfun, stmt);
7035 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7036 gimple_remove_stmt_histograms (cfun, stmt);
7038 /* We cannot leave any operands allocated from the operand caches of
7039 the current function. */
7040 free_stmt_operands (cfun, stmt);
7041 push_cfun (dest_cfun);
7042 update_stmt (stmt);
7043 pop_cfun ();
7046 FOR_EACH_EDGE (e, ei, bb->succs)
7047 if (e->goto_locus != UNKNOWN_LOCATION)
7049 tree block = LOCATION_BLOCK (e->goto_locus);
7050 if (d->orig_block == NULL_TREE
7051 || block == d->orig_block)
7052 e->goto_locus = set_block (e->goto_locus, d->new_block);
7056 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7057 the outermost EH region. Use REGION as the incoming base EH region. */
7059 static eh_region
7060 find_outermost_region_in_block (struct function *src_cfun,
7061 basic_block bb, eh_region region)
7063 gimple_stmt_iterator si;
7065 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7067 gimple *stmt = gsi_stmt (si);
7068 eh_region stmt_region;
7069 int lp_nr;
7071 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7072 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7073 if (stmt_region)
7075 if (region == NULL)
7076 region = stmt_region;
7077 else if (stmt_region != region)
7079 region = eh_region_outermost (src_cfun, stmt_region, region);
7080 gcc_assert (region != NULL);
7085 return region;
7088 static tree
7089 new_label_mapper (tree decl, void *data)
7091 htab_t hash = (htab_t) data;
7092 struct tree_map *m;
7093 void **slot;
7095 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7097 m = XNEW (struct tree_map);
7098 m->hash = DECL_UID (decl);
7099 m->base.from = decl;
7100 m->to = create_artificial_label (UNKNOWN_LOCATION);
7101 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7102 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7103 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7105 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7106 gcc_assert (*slot == NULL);
7108 *slot = m;
7110 return m->to;
7113 /* Tree walker to replace the decls used inside value expressions by
7114 duplicates. */
7116 static tree
7117 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7119 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7121 switch (TREE_CODE (*tp))
7123 case VAR_DECL:
7124 case PARM_DECL:
7125 case RESULT_DECL:
7126 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7127 break;
7128 default:
7129 break;
7132 if (IS_TYPE_OR_DECL_P (*tp))
7133 *walk_subtrees = false;
7135 return NULL;
7138 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7139 subblocks. */
7141 static void
7142 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7143 tree to_context)
7145 tree *tp, t;
7147 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7149 t = *tp;
7150 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7151 continue;
7152 replace_by_duplicate_decl (&t, vars_map, to_context);
7153 if (t != *tp)
7155 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7157 tree x = DECL_VALUE_EXPR (*tp);
7158 struct replace_decls_d rd = { vars_map, to_context };
7159 unshare_expr (x);
7160 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7161 SET_DECL_VALUE_EXPR (t, x);
7162 DECL_HAS_VALUE_EXPR_P (t) = 1;
7164 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7165 *tp = t;
7169 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7170 replace_block_vars_by_duplicates (block, vars_map, to_context);
7173 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7174 from FN1 to FN2. */
7176 static void
7177 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7178 struct loop *loop)
7180 /* Discard it from the old loop array. */
7181 (*get_loops (fn1))[loop->num] = NULL;
7183 /* Place it in the new loop array, assigning it a new number. */
7184 loop->num = number_of_loops (fn2);
7185 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7187 /* Recurse to children. */
7188 for (loop = loop->inner; loop; loop = loop->next)
7189 fixup_loop_arrays_after_move (fn1, fn2, loop);
7192 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7193 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7195 DEBUG_FUNCTION void
7196 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7198 basic_block bb;
7199 edge_iterator ei;
7200 edge e;
7201 bitmap bbs = BITMAP_ALLOC (NULL);
7202 int i;
7204 gcc_assert (entry != NULL);
7205 gcc_assert (entry != exit);
7206 gcc_assert (bbs_p != NULL);
7208 gcc_assert (bbs_p->length () > 0);
7210 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7211 bitmap_set_bit (bbs, bb->index);
7213 gcc_assert (bitmap_bit_p (bbs, entry->index));
7214 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7216 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7218 if (bb == entry)
7220 gcc_assert (single_pred_p (entry));
7221 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7223 else
7224 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7226 e = ei_edge (ei);
7227 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7230 if (bb == exit)
7232 gcc_assert (single_succ_p (exit));
7233 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7235 else
7236 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7238 e = ei_edge (ei);
7239 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7243 BITMAP_FREE (bbs);
7246 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7248 bool
7249 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7251 bitmap release_names = (bitmap)data;
7253 if (TREE_CODE (from) != SSA_NAME)
7254 return true;
7256 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7257 return true;
7260 /* Return LOOP_DIST_ALIAS call if present in BB. */
7262 static gimple *
7263 find_loop_dist_alias (basic_block bb)
7265 gimple *g = last_stmt (bb);
7266 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7267 return NULL;
7269 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7270 gsi_prev (&gsi);
7271 if (gsi_end_p (gsi))
7272 return NULL;
7274 g = gsi_stmt (gsi);
7275 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7276 return g;
7277 return NULL;
7280 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7281 to VALUE and update any immediate uses of it's LHS. */
7283 void
7284 fold_loop_internal_call (gimple *g, tree value)
7286 tree lhs = gimple_call_lhs (g);
7287 use_operand_p use_p;
7288 imm_use_iterator iter;
7289 gimple *use_stmt;
7290 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7292 update_call_from_tree (&gsi, value);
7293 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7295 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7296 SET_USE (use_p, value);
7297 update_stmt (use_stmt);
7301 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7302 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7303 single basic block in the original CFG and the new basic block is
7304 returned. DEST_CFUN must not have a CFG yet.
7306 Note that the region need not be a pure SESE region. Blocks inside
7307 the region may contain calls to abort/exit. The only restriction
7308 is that ENTRY_BB should be the only entry point and it must
7309 dominate EXIT_BB.
7311 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7312 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7313 to the new function.
7315 All local variables referenced in the region are assumed to be in
7316 the corresponding BLOCK_VARS and unexpanded variable lists
7317 associated with DEST_CFUN.
7319 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7320 reimplement move_sese_region_to_fn by duplicating the region rather than
7321 moving it. */
7323 basic_block
7324 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7325 basic_block exit_bb, tree orig_block)
7327 vec<basic_block> bbs, dom_bbs;
7328 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7329 basic_block after, bb, *entry_pred, *exit_succ, abb;
7330 struct function *saved_cfun = cfun;
7331 int *entry_flag, *exit_flag;
7332 profile_probability *entry_prob, *exit_prob;
7333 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7334 edge e;
7335 edge_iterator ei;
7336 htab_t new_label_map;
7337 hash_map<void *, void *> *eh_map;
7338 struct loop *loop = entry_bb->loop_father;
7339 struct loop *loop0 = get_loop (saved_cfun, 0);
7340 struct move_stmt_d d;
7342 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7343 region. */
7344 gcc_assert (entry_bb != exit_bb
7345 && (!exit_bb
7346 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7348 /* Collect all the blocks in the region. Manually add ENTRY_BB
7349 because it won't be added by dfs_enumerate_from. */
7350 bbs.create (0);
7351 bbs.safe_push (entry_bb);
7352 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7354 if (flag_checking)
7355 verify_sese (entry_bb, exit_bb, &bbs);
7357 /* The blocks that used to be dominated by something in BBS will now be
7358 dominated by the new block. */
7359 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7360 bbs.address (),
7361 bbs.length ());
7363 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7364 the predecessor edges to ENTRY_BB and the successor edges to
7365 EXIT_BB so that we can re-attach them to the new basic block that
7366 will replace the region. */
7367 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7368 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7369 entry_flag = XNEWVEC (int, num_entry_edges);
7370 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7371 i = 0;
7372 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7374 entry_prob[i] = e->probability;
7375 entry_flag[i] = e->flags;
7376 entry_pred[i++] = e->src;
7377 remove_edge (e);
7380 if (exit_bb)
7382 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7383 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7384 exit_flag = XNEWVEC (int, num_exit_edges);
7385 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7386 i = 0;
7387 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7389 exit_prob[i] = e->probability;
7390 exit_flag[i] = e->flags;
7391 exit_succ[i++] = e->dest;
7392 remove_edge (e);
7395 else
7397 num_exit_edges = 0;
7398 exit_succ = NULL;
7399 exit_flag = NULL;
7400 exit_prob = NULL;
7403 /* Switch context to the child function to initialize DEST_FN's CFG. */
7404 gcc_assert (dest_cfun->cfg == NULL);
7405 push_cfun (dest_cfun);
7407 init_empty_tree_cfg ();
7409 /* Initialize EH information for the new function. */
7410 eh_map = NULL;
7411 new_label_map = NULL;
7412 if (saved_cfun->eh)
7414 eh_region region = NULL;
7416 FOR_EACH_VEC_ELT (bbs, i, bb)
7417 region = find_outermost_region_in_block (saved_cfun, bb, region);
7419 init_eh_for_function ();
7420 if (region != NULL)
7422 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7423 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7424 new_label_mapper, new_label_map);
7428 /* Initialize an empty loop tree. */
7429 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7430 init_loops_structure (dest_cfun, loops, 1);
7431 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7432 set_loops_for_fn (dest_cfun, loops);
7434 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7436 /* Move the outlined loop tree part. */
7437 num_nodes = bbs.length ();
7438 FOR_EACH_VEC_ELT (bbs, i, bb)
7440 if (bb->loop_father->header == bb)
7442 struct loop *this_loop = bb->loop_father;
7443 struct loop *outer = loop_outer (this_loop);
7444 if (outer == loop
7445 /* If the SESE region contains some bbs ending with
7446 a noreturn call, those are considered to belong
7447 to the outermost loop in saved_cfun, rather than
7448 the entry_bb's loop_father. */
7449 || outer == loop0)
7451 if (outer != loop)
7452 num_nodes -= this_loop->num_nodes;
7453 flow_loop_tree_node_remove (bb->loop_father);
7454 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7455 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7458 else if (bb->loop_father == loop0 && loop0 != loop)
7459 num_nodes--;
7461 /* Remove loop exits from the outlined region. */
7462 if (loops_for_fn (saved_cfun)->exits)
7463 FOR_EACH_EDGE (e, ei, bb->succs)
7465 struct loops *l = loops_for_fn (saved_cfun);
7466 loop_exit **slot
7467 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7468 NO_INSERT);
7469 if (slot)
7470 l->exits->clear_slot (slot);
7474 /* Adjust the number of blocks in the tree root of the outlined part. */
7475 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7477 /* Setup a mapping to be used by move_block_to_fn. */
7478 loop->aux = current_loops->tree_root;
7479 loop0->aux = current_loops->tree_root;
7481 /* Fix up orig_loop_num. If the block referenced in it has been moved
7482 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7483 struct loop *dloop;
7484 signed char *moved_orig_loop_num = NULL;
7485 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7486 if (dloop->orig_loop_num)
7488 if (moved_orig_loop_num == NULL)
7489 moved_orig_loop_num
7490 = XCNEWVEC (signed char, vec_safe_length (larray));
7491 if ((*larray)[dloop->orig_loop_num] != NULL
7492 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7494 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7495 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7496 moved_orig_loop_num[dloop->orig_loop_num]++;
7497 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7499 else
7501 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7502 dloop->orig_loop_num = 0;
7505 pop_cfun ();
7507 if (moved_orig_loop_num)
7509 FOR_EACH_VEC_ELT (bbs, i, bb)
7511 gimple *g = find_loop_dist_alias (bb);
7512 if (g == NULL)
7513 continue;
7515 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7516 gcc_assert (orig_loop_num
7517 && (unsigned) orig_loop_num < vec_safe_length (larray));
7518 if (moved_orig_loop_num[orig_loop_num] == 2)
7520 /* If we have moved both loops with this orig_loop_num into
7521 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7522 too, update the first argument. */
7523 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7524 && (get_loop (saved_cfun, dloop->orig_loop_num)
7525 == NULL));
7526 tree t = build_int_cst (integer_type_node,
7527 (*larray)[dloop->orig_loop_num]->num);
7528 gimple_call_set_arg (g, 0, t);
7529 update_stmt (g);
7530 /* Make sure the following loop will not update it. */
7531 moved_orig_loop_num[orig_loop_num] = 0;
7533 else
7534 /* Otherwise at least one of the loops stayed in saved_cfun.
7535 Remove the LOOP_DIST_ALIAS call. */
7536 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7538 FOR_EACH_BB_FN (bb, saved_cfun)
7540 gimple *g = find_loop_dist_alias (bb);
7541 if (g == NULL)
7542 continue;
7543 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7544 gcc_assert (orig_loop_num
7545 && (unsigned) orig_loop_num < vec_safe_length (larray));
7546 if (moved_orig_loop_num[orig_loop_num])
7547 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7548 of the corresponding loops was moved, remove it. */
7549 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7551 XDELETEVEC (moved_orig_loop_num);
7553 ggc_free (larray);
7555 /* Move blocks from BBS into DEST_CFUN. */
7556 gcc_assert (bbs.length () >= 2);
7557 after = dest_cfun->cfg->x_entry_block_ptr;
7558 hash_map<tree, tree> vars_map;
7560 memset (&d, 0, sizeof (d));
7561 d.orig_block = orig_block;
7562 d.new_block = DECL_INITIAL (dest_cfun->decl);
7563 d.from_context = cfun->decl;
7564 d.to_context = dest_cfun->decl;
7565 d.vars_map = &vars_map;
7566 d.new_label_map = new_label_map;
7567 d.eh_map = eh_map;
7568 d.remap_decls_p = true;
7570 if (gimple_in_ssa_p (cfun))
7571 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7573 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7574 set_ssa_default_def (dest_cfun, arg, narg);
7575 vars_map.put (arg, narg);
7578 FOR_EACH_VEC_ELT (bbs, i, bb)
7580 /* No need to update edge counts on the last block. It has
7581 already been updated earlier when we detached the region from
7582 the original CFG. */
7583 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7584 after = bb;
7587 loop->aux = NULL;
7588 loop0->aux = NULL;
7589 /* Loop sizes are no longer correct, fix them up. */
7590 loop->num_nodes -= num_nodes;
7591 for (struct loop *outer = loop_outer (loop);
7592 outer; outer = loop_outer (outer))
7593 outer->num_nodes -= num_nodes;
7594 loop0->num_nodes -= bbs.length () - num_nodes;
7596 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7598 struct loop *aloop;
7599 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7600 if (aloop != NULL)
7602 if (aloop->simduid)
7604 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7605 d.to_context);
7606 dest_cfun->has_simduid_loops = true;
7608 if (aloop->force_vectorize)
7609 dest_cfun->has_force_vectorize_loops = true;
7613 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7614 if (orig_block)
7616 tree block;
7617 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7618 == NULL_TREE);
7619 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7620 = BLOCK_SUBBLOCKS (orig_block);
7621 for (block = BLOCK_SUBBLOCKS (orig_block);
7622 block; block = BLOCK_CHAIN (block))
7623 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7624 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7627 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7628 &vars_map, dest_cfun->decl);
7630 if (new_label_map)
7631 htab_delete (new_label_map);
7632 if (eh_map)
7633 delete eh_map;
7635 if (gimple_in_ssa_p (cfun))
7637 /* We need to release ssa-names in a defined order, so first find them,
7638 and then iterate in ascending version order. */
7639 bitmap release_names = BITMAP_ALLOC (NULL);
7640 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7641 bitmap_iterator bi;
7642 unsigned i;
7643 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7644 release_ssa_name (ssa_name (i));
7645 BITMAP_FREE (release_names);
7648 /* Rewire the entry and exit blocks. The successor to the entry
7649 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7650 the child function. Similarly, the predecessor of DEST_FN's
7651 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7652 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7653 various CFG manipulation function get to the right CFG.
7655 FIXME, this is silly. The CFG ought to become a parameter to
7656 these helpers. */
7657 push_cfun (dest_cfun);
7658 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7659 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7660 if (exit_bb)
7662 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7663 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7665 else
7666 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7667 pop_cfun ();
7669 /* Back in the original function, the SESE region has disappeared,
7670 create a new basic block in its place. */
7671 bb = create_empty_bb (entry_pred[0]);
7672 if (current_loops)
7673 add_bb_to_loop (bb, loop);
7674 for (i = 0; i < num_entry_edges; i++)
7676 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7677 e->probability = entry_prob[i];
7680 for (i = 0; i < num_exit_edges; i++)
7682 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7683 e->probability = exit_prob[i];
7686 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7687 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7688 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7689 dom_bbs.release ();
7691 if (exit_bb)
7693 free (exit_prob);
7694 free (exit_flag);
7695 free (exit_succ);
7697 free (entry_prob);
7698 free (entry_flag);
7699 free (entry_pred);
7700 bbs.release ();
7702 return bb;
7705 /* Dump default def DEF to file FILE using FLAGS and indentation
7706 SPC. */
7708 static void
7709 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7711 for (int i = 0; i < spc; ++i)
7712 fprintf (file, " ");
7713 dump_ssaname_info_to_file (file, def, spc);
7715 print_generic_expr (file, TREE_TYPE (def), flags);
7716 fprintf (file, " ");
7717 print_generic_expr (file, def, flags);
7718 fprintf (file, " = ");
7719 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7720 fprintf (file, ";\n");
7723 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7725 static void
7726 print_no_sanitize_attr_value (FILE *file, tree value)
7728 unsigned int flags = tree_to_uhwi (value);
7729 bool first = true;
7730 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7732 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7734 if (!first)
7735 fprintf (file, " | ");
7736 fprintf (file, "%s", sanitizer_opts[i].name);
7737 first = false;
7742 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7745 void
7746 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7748 tree arg, var, old_current_fndecl = current_function_decl;
7749 struct function *dsf;
7750 bool ignore_topmost_bind = false, any_var = false;
7751 basic_block bb;
7752 tree chain;
7753 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7754 && decl_is_tm_clone (fndecl));
7755 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7757 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7759 fprintf (file, "__attribute__((");
7761 bool first = true;
7762 tree chain;
7763 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7764 first = false, chain = TREE_CHAIN (chain))
7766 if (!first)
7767 fprintf (file, ", ");
7769 tree name = get_attribute_name (chain);
7770 print_generic_expr (file, name, dump_flags);
7771 if (TREE_VALUE (chain) != NULL_TREE)
7773 fprintf (file, " (");
7775 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7776 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7777 else
7778 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7779 fprintf (file, ")");
7783 fprintf (file, "))\n");
7786 current_function_decl = fndecl;
7787 if (flags & TDF_GIMPLE)
7789 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7790 dump_flags | TDF_SLIM);
7791 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7793 else
7794 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7796 arg = DECL_ARGUMENTS (fndecl);
7797 while (arg)
7799 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7800 fprintf (file, " ");
7801 print_generic_expr (file, arg, dump_flags);
7802 if (DECL_CHAIN (arg))
7803 fprintf (file, ", ");
7804 arg = DECL_CHAIN (arg);
7806 fprintf (file, ")\n");
7808 dsf = DECL_STRUCT_FUNCTION (fndecl);
7809 if (dsf && (flags & TDF_EH))
7810 dump_eh_tree (file, dsf);
7812 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7814 dump_node (fndecl, TDF_SLIM | flags, file);
7815 current_function_decl = old_current_fndecl;
7816 return;
7819 /* When GIMPLE is lowered, the variables are no longer available in
7820 BIND_EXPRs, so display them separately. */
7821 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7823 unsigned ix;
7824 ignore_topmost_bind = true;
7826 fprintf (file, "{\n");
7827 if (gimple_in_ssa_p (fun)
7828 && (flags & TDF_ALIAS))
7830 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7831 arg = DECL_CHAIN (arg))
7833 tree def = ssa_default_def (fun, arg);
7834 if (def)
7835 dump_default_def (file, def, 2, flags);
7838 tree res = DECL_RESULT (fun->decl);
7839 if (res != NULL_TREE
7840 && DECL_BY_REFERENCE (res))
7842 tree def = ssa_default_def (fun, res);
7843 if (def)
7844 dump_default_def (file, def, 2, flags);
7847 tree static_chain = fun->static_chain_decl;
7848 if (static_chain != NULL_TREE)
7850 tree def = ssa_default_def (fun, static_chain);
7851 if (def)
7852 dump_default_def (file, def, 2, flags);
7856 if (!vec_safe_is_empty (fun->local_decls))
7857 FOR_EACH_LOCAL_DECL (fun, ix, var)
7859 print_generic_decl (file, var, flags);
7860 fprintf (file, "\n");
7862 any_var = true;
7865 tree name;
7867 if (gimple_in_ssa_p (cfun))
7868 FOR_EACH_SSA_NAME (ix, name, cfun)
7870 if (!SSA_NAME_VAR (name))
7872 fprintf (file, " ");
7873 print_generic_expr (file, TREE_TYPE (name), flags);
7874 fprintf (file, " ");
7875 print_generic_expr (file, name, flags);
7876 fprintf (file, ";\n");
7878 any_var = true;
7883 if (fun && fun->decl == fndecl
7884 && fun->cfg
7885 && basic_block_info_for_fn (fun))
7887 /* If the CFG has been built, emit a CFG-based dump. */
7888 if (!ignore_topmost_bind)
7889 fprintf (file, "{\n");
7891 if (any_var && n_basic_blocks_for_fn (fun))
7892 fprintf (file, "\n");
7894 FOR_EACH_BB_FN (bb, fun)
7895 dump_bb (file, bb, 2, flags);
7897 fprintf (file, "}\n");
7899 else if (fun->curr_properties & PROP_gimple_any)
7901 /* The function is now in GIMPLE form but the CFG has not been
7902 built yet. Emit the single sequence of GIMPLE statements
7903 that make up its body. */
7904 gimple_seq body = gimple_body (fndecl);
7906 if (gimple_seq_first_stmt (body)
7907 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7908 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7909 print_gimple_seq (file, body, 0, flags);
7910 else
7912 if (!ignore_topmost_bind)
7913 fprintf (file, "{\n");
7915 if (any_var)
7916 fprintf (file, "\n");
7918 print_gimple_seq (file, body, 2, flags);
7919 fprintf (file, "}\n");
7922 else
7924 int indent;
7926 /* Make a tree based dump. */
7927 chain = DECL_SAVED_TREE (fndecl);
7928 if (chain && TREE_CODE (chain) == BIND_EXPR)
7930 if (ignore_topmost_bind)
7932 chain = BIND_EXPR_BODY (chain);
7933 indent = 2;
7935 else
7936 indent = 0;
7938 else
7940 if (!ignore_topmost_bind)
7942 fprintf (file, "{\n");
7943 /* No topmost bind, pretend it's ignored for later. */
7944 ignore_topmost_bind = true;
7946 indent = 2;
7949 if (any_var)
7950 fprintf (file, "\n");
7952 print_generic_stmt_indented (file, chain, flags, indent);
7953 if (ignore_topmost_bind)
7954 fprintf (file, "}\n");
7957 if (flags & TDF_ENUMERATE_LOCALS)
7958 dump_enumerated_decls (file, flags);
7959 fprintf (file, "\n\n");
7961 current_function_decl = old_current_fndecl;
7964 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7966 DEBUG_FUNCTION void
7967 debug_function (tree fn, dump_flags_t flags)
7969 dump_function_to_file (fn, stderr, flags);
7973 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7975 static void
7976 print_pred_bbs (FILE *file, basic_block bb)
7978 edge e;
7979 edge_iterator ei;
7981 FOR_EACH_EDGE (e, ei, bb->preds)
7982 fprintf (file, "bb_%d ", e->src->index);
7986 /* Print on FILE the indexes for the successors of basic_block BB. */
7988 static void
7989 print_succ_bbs (FILE *file, basic_block bb)
7991 edge e;
7992 edge_iterator ei;
7994 FOR_EACH_EDGE (e, ei, bb->succs)
7995 fprintf (file, "bb_%d ", e->dest->index);
7998 /* Print to FILE the basic block BB following the VERBOSITY level. */
8000 void
8001 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8003 char *s_indent = (char *) alloca ((size_t) indent + 1);
8004 memset ((void *) s_indent, ' ', (size_t) indent);
8005 s_indent[indent] = '\0';
8007 /* Print basic_block's header. */
8008 if (verbosity >= 2)
8010 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8011 print_pred_bbs (file, bb);
8012 fprintf (file, "}, succs = {");
8013 print_succ_bbs (file, bb);
8014 fprintf (file, "})\n");
8017 /* Print basic_block's body. */
8018 if (verbosity >= 3)
8020 fprintf (file, "%s {\n", s_indent);
8021 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8022 fprintf (file, "%s }\n", s_indent);
8026 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8028 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8029 VERBOSITY level this outputs the contents of the loop, or just its
8030 structure. */
8032 static void
8033 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8035 char *s_indent;
8036 basic_block bb;
8038 if (loop == NULL)
8039 return;
8041 s_indent = (char *) alloca ((size_t) indent + 1);
8042 memset ((void *) s_indent, ' ', (size_t) indent);
8043 s_indent[indent] = '\0';
8045 /* Print loop's header. */
8046 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8047 if (loop->header)
8048 fprintf (file, "header = %d", loop->header->index);
8049 else
8051 fprintf (file, "deleted)\n");
8052 return;
8054 if (loop->latch)
8055 fprintf (file, ", latch = %d", loop->latch->index);
8056 else
8057 fprintf (file, ", multiple latches");
8058 fprintf (file, ", niter = ");
8059 print_generic_expr (file, loop->nb_iterations);
8061 if (loop->any_upper_bound)
8063 fprintf (file, ", upper_bound = ");
8064 print_decu (loop->nb_iterations_upper_bound, file);
8066 if (loop->any_likely_upper_bound)
8068 fprintf (file, ", likely_upper_bound = ");
8069 print_decu (loop->nb_iterations_likely_upper_bound, file);
8072 if (loop->any_estimate)
8074 fprintf (file, ", estimate = ");
8075 print_decu (loop->nb_iterations_estimate, file);
8077 if (loop->unroll)
8078 fprintf (file, ", unroll = %d", loop->unroll);
8079 fprintf (file, ")\n");
8081 /* Print loop's body. */
8082 if (verbosity >= 1)
8084 fprintf (file, "%s{\n", s_indent);
8085 FOR_EACH_BB_FN (bb, cfun)
8086 if (bb->loop_father == loop)
8087 print_loops_bb (file, bb, indent, verbosity);
8089 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8090 fprintf (file, "%s}\n", s_indent);
8094 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8095 spaces. Following VERBOSITY level this outputs the contents of the
8096 loop, or just its structure. */
8098 static void
8099 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8100 int verbosity)
8102 if (loop == NULL)
8103 return;
8105 print_loop (file, loop, indent, verbosity);
8106 print_loop_and_siblings (file, loop->next, indent, verbosity);
8109 /* Follow a CFG edge from the entry point of the program, and on entry
8110 of a loop, pretty print the loop structure on FILE. */
8112 void
8113 print_loops (FILE *file, int verbosity)
8115 basic_block bb;
8117 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8118 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8119 if (bb && bb->loop_father)
8120 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8123 /* Dump a loop. */
8125 DEBUG_FUNCTION void
8126 debug (struct loop &ref)
8128 print_loop (stderr, &ref, 0, /*verbosity*/0);
8131 DEBUG_FUNCTION void
8132 debug (struct loop *ptr)
8134 if (ptr)
8135 debug (*ptr);
8136 else
8137 fprintf (stderr, "<nil>\n");
8140 /* Dump a loop verbosely. */
8142 DEBUG_FUNCTION void
8143 debug_verbose (struct loop &ref)
8145 print_loop (stderr, &ref, 0, /*verbosity*/3);
8148 DEBUG_FUNCTION void
8149 debug_verbose (struct loop *ptr)
8151 if (ptr)
8152 debug (*ptr);
8153 else
8154 fprintf (stderr, "<nil>\n");
8158 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8160 DEBUG_FUNCTION void
8161 debug_loops (int verbosity)
8163 print_loops (stderr, verbosity);
8166 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8168 DEBUG_FUNCTION void
8169 debug_loop (struct loop *loop, int verbosity)
8171 print_loop (stderr, loop, 0, verbosity);
8174 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8175 level. */
8177 DEBUG_FUNCTION void
8178 debug_loop_num (unsigned num, int verbosity)
8180 debug_loop (get_loop (cfun, num), verbosity);
8183 /* Return true if BB ends with a call, possibly followed by some
8184 instructions that must stay with the call. Return false,
8185 otherwise. */
8187 static bool
8188 gimple_block_ends_with_call_p (basic_block bb)
8190 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8191 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8195 /* Return true if BB ends with a conditional branch. Return false,
8196 otherwise. */
8198 static bool
8199 gimple_block_ends_with_condjump_p (const_basic_block bb)
8201 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8202 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8206 /* Return true if statement T may terminate execution of BB in ways not
8207 explicitly represtented in the CFG. */
8209 bool
8210 stmt_can_terminate_bb_p (gimple *t)
8212 tree fndecl = NULL_TREE;
8213 int call_flags = 0;
8215 /* Eh exception not handled internally terminates execution of the whole
8216 function. */
8217 if (stmt_can_throw_external (t))
8218 return true;
8220 /* NORETURN and LONGJMP calls already have an edge to exit.
8221 CONST and PURE calls do not need one.
8222 We don't currently check for CONST and PURE here, although
8223 it would be a good idea, because those attributes are
8224 figured out from the RTL in mark_constant_function, and
8225 the counter incrementation code from -fprofile-arcs
8226 leads to different results from -fbranch-probabilities. */
8227 if (is_gimple_call (t))
8229 fndecl = gimple_call_fndecl (t);
8230 call_flags = gimple_call_flags (t);
8233 if (is_gimple_call (t)
8234 && fndecl
8235 && DECL_BUILT_IN (fndecl)
8236 && (call_flags & ECF_NOTHROW)
8237 && !(call_flags & ECF_RETURNS_TWICE)
8238 /* fork() doesn't really return twice, but the effect of
8239 wrapping it in __gcov_fork() which calls __gcov_flush()
8240 and clears the counters before forking has the same
8241 effect as returning twice. Force a fake edge. */
8242 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8243 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8244 return false;
8246 if (is_gimple_call (t))
8248 edge_iterator ei;
8249 edge e;
8250 basic_block bb;
8252 if (call_flags & (ECF_PURE | ECF_CONST)
8253 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8254 return false;
8256 /* Function call may do longjmp, terminate program or do other things.
8257 Special case noreturn that have non-abnormal edges out as in this case
8258 the fact is sufficiently represented by lack of edges out of T. */
8259 if (!(call_flags & ECF_NORETURN))
8260 return true;
8262 bb = gimple_bb (t);
8263 FOR_EACH_EDGE (e, ei, bb->succs)
8264 if ((e->flags & EDGE_FAKE) == 0)
8265 return true;
8268 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8269 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8270 return true;
8272 return false;
8276 /* Add fake edges to the function exit for any non constant and non
8277 noreturn calls (or noreturn calls with EH/abnormal edges),
8278 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8279 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8280 that were split.
8282 The goal is to expose cases in which entering a basic block does
8283 not imply that all subsequent instructions must be executed. */
8285 static int
8286 gimple_flow_call_edges_add (sbitmap blocks)
8288 int i;
8289 int blocks_split = 0;
8290 int last_bb = last_basic_block_for_fn (cfun);
8291 bool check_last_block = false;
8293 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8294 return 0;
8296 if (! blocks)
8297 check_last_block = true;
8298 else
8299 check_last_block = bitmap_bit_p (blocks,
8300 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8302 /* In the last basic block, before epilogue generation, there will be
8303 a fallthru edge to EXIT. Special care is required if the last insn
8304 of the last basic block is a call because make_edge folds duplicate
8305 edges, which would result in the fallthru edge also being marked
8306 fake, which would result in the fallthru edge being removed by
8307 remove_fake_edges, which would result in an invalid CFG.
8309 Moreover, we can't elide the outgoing fake edge, since the block
8310 profiler needs to take this into account in order to solve the minimal
8311 spanning tree in the case that the call doesn't return.
8313 Handle this by adding a dummy instruction in a new last basic block. */
8314 if (check_last_block)
8316 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8317 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8318 gimple *t = NULL;
8320 if (!gsi_end_p (gsi))
8321 t = gsi_stmt (gsi);
8323 if (t && stmt_can_terminate_bb_p (t))
8325 edge e;
8327 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8328 if (e)
8330 gsi_insert_on_edge (e, gimple_build_nop ());
8331 gsi_commit_edge_inserts ();
8336 /* Now add fake edges to the function exit for any non constant
8337 calls since there is no way that we can determine if they will
8338 return or not... */
8339 for (i = 0; i < last_bb; i++)
8341 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8342 gimple_stmt_iterator gsi;
8343 gimple *stmt, *last_stmt;
8345 if (!bb)
8346 continue;
8348 if (blocks && !bitmap_bit_p (blocks, i))
8349 continue;
8351 gsi = gsi_last_nondebug_bb (bb);
8352 if (!gsi_end_p (gsi))
8354 last_stmt = gsi_stmt (gsi);
8357 stmt = gsi_stmt (gsi);
8358 if (stmt_can_terminate_bb_p (stmt))
8360 edge e;
8362 /* The handling above of the final block before the
8363 epilogue should be enough to verify that there is
8364 no edge to the exit block in CFG already.
8365 Calling make_edge in such case would cause us to
8366 mark that edge as fake and remove it later. */
8367 if (flag_checking && stmt == last_stmt)
8369 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8370 gcc_assert (e == NULL);
8373 /* Note that the following may create a new basic block
8374 and renumber the existing basic blocks. */
8375 if (stmt != last_stmt)
8377 e = split_block (bb, stmt);
8378 if (e)
8379 blocks_split++;
8381 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8382 e->probability = profile_probability::guessed_never ();
8384 gsi_prev (&gsi);
8386 while (!gsi_end_p (gsi));
8390 if (blocks_split)
8391 checking_verify_flow_info ();
8393 return blocks_split;
8396 /* Removes edge E and all the blocks dominated by it, and updates dominance
8397 information. The IL in E->src needs to be updated separately.
8398 If dominance info is not available, only the edge E is removed.*/
8400 void
8401 remove_edge_and_dominated_blocks (edge e)
8403 vec<basic_block> bbs_to_remove = vNULL;
8404 vec<basic_block> bbs_to_fix_dom = vNULL;
8405 edge f;
8406 edge_iterator ei;
8407 bool none_removed = false;
8408 unsigned i;
8409 basic_block bb, dbb;
8410 bitmap_iterator bi;
8412 /* If we are removing a path inside a non-root loop that may change
8413 loop ownership of blocks or remove loops. Mark loops for fixup. */
8414 if (current_loops
8415 && loop_outer (e->src->loop_father) != NULL
8416 && e->src->loop_father == e->dest->loop_father)
8417 loops_state_set (LOOPS_NEED_FIXUP);
8419 if (!dom_info_available_p (CDI_DOMINATORS))
8421 remove_edge (e);
8422 return;
8425 /* No updating is needed for edges to exit. */
8426 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8428 if (cfgcleanup_altered_bbs)
8429 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8430 remove_edge (e);
8431 return;
8434 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8435 that is not dominated by E->dest, then this set is empty. Otherwise,
8436 all the basic blocks dominated by E->dest are removed.
8438 Also, to DF_IDOM we store the immediate dominators of the blocks in
8439 the dominance frontier of E (i.e., of the successors of the
8440 removed blocks, if there are any, and of E->dest otherwise). */
8441 FOR_EACH_EDGE (f, ei, e->dest->preds)
8443 if (f == e)
8444 continue;
8446 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8448 none_removed = true;
8449 break;
8453 auto_bitmap df, df_idom;
8454 if (none_removed)
8455 bitmap_set_bit (df_idom,
8456 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8457 else
8459 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8460 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8462 FOR_EACH_EDGE (f, ei, bb->succs)
8464 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8465 bitmap_set_bit (df, f->dest->index);
8468 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8469 bitmap_clear_bit (df, bb->index);
8471 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8473 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8474 bitmap_set_bit (df_idom,
8475 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8479 if (cfgcleanup_altered_bbs)
8481 /* Record the set of the altered basic blocks. */
8482 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8483 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8486 /* Remove E and the cancelled blocks. */
8487 if (none_removed)
8488 remove_edge (e);
8489 else
8491 /* Walk backwards so as to get a chance to substitute all
8492 released DEFs into debug stmts. See
8493 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8494 details. */
8495 for (i = bbs_to_remove.length (); i-- > 0; )
8496 delete_basic_block (bbs_to_remove[i]);
8499 /* Update the dominance information. The immediate dominator may change only
8500 for blocks whose immediate dominator belongs to DF_IDOM:
8502 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8503 removal. Let Z the arbitrary block such that idom(Z) = Y and
8504 Z dominates X after the removal. Before removal, there exists a path P
8505 from Y to X that avoids Z. Let F be the last edge on P that is
8506 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8507 dominates W, and because of P, Z does not dominate W), and W belongs to
8508 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8509 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8511 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8512 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8513 dbb;
8514 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8515 bbs_to_fix_dom.safe_push (dbb);
8518 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8520 bbs_to_remove.release ();
8521 bbs_to_fix_dom.release ();
8524 /* Purge dead EH edges from basic block BB. */
8526 bool
8527 gimple_purge_dead_eh_edges (basic_block bb)
8529 bool changed = false;
8530 edge e;
8531 edge_iterator ei;
8532 gimple *stmt = last_stmt (bb);
8534 if (stmt && stmt_can_throw_internal (stmt))
8535 return false;
8537 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8539 if (e->flags & EDGE_EH)
8541 remove_edge_and_dominated_blocks (e);
8542 changed = true;
8544 else
8545 ei_next (&ei);
8548 return changed;
8551 /* Purge dead EH edges from basic block listed in BLOCKS. */
8553 bool
8554 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8556 bool changed = false;
8557 unsigned i;
8558 bitmap_iterator bi;
8560 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8562 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8564 /* Earlier gimple_purge_dead_eh_edges could have removed
8565 this basic block already. */
8566 gcc_assert (bb || changed);
8567 if (bb != NULL)
8568 changed |= gimple_purge_dead_eh_edges (bb);
8571 return changed;
8574 /* Purge dead abnormal call edges from basic block BB. */
8576 bool
8577 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8579 bool changed = false;
8580 edge e;
8581 edge_iterator ei;
8582 gimple *stmt = last_stmt (bb);
8584 if (!cfun->has_nonlocal_label
8585 && !cfun->calls_setjmp)
8586 return false;
8588 if (stmt && stmt_can_make_abnormal_goto (stmt))
8589 return false;
8591 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8593 if (e->flags & EDGE_ABNORMAL)
8595 if (e->flags & EDGE_FALLTHRU)
8596 e->flags &= ~EDGE_ABNORMAL;
8597 else
8598 remove_edge_and_dominated_blocks (e);
8599 changed = true;
8601 else
8602 ei_next (&ei);
8605 return changed;
8608 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8610 bool
8611 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8613 bool changed = false;
8614 unsigned i;
8615 bitmap_iterator bi;
8617 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8619 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8621 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8622 this basic block already. */
8623 gcc_assert (bb || changed);
8624 if (bb != NULL)
8625 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8628 return changed;
8631 /* This function is called whenever a new edge is created or
8632 redirected. */
8634 static void
8635 gimple_execute_on_growing_pred (edge e)
8637 basic_block bb = e->dest;
8639 if (!gimple_seq_empty_p (phi_nodes (bb)))
8640 reserve_phi_args_for_new_edge (bb);
8643 /* This function is called immediately before edge E is removed from
8644 the edge vector E->dest->preds. */
8646 static void
8647 gimple_execute_on_shrinking_pred (edge e)
8649 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8650 remove_phi_args (e);
8653 /*---------------------------------------------------------------------------
8654 Helper functions for Loop versioning
8655 ---------------------------------------------------------------------------*/
8657 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8658 of 'first'. Both of them are dominated by 'new_head' basic block. When
8659 'new_head' was created by 'second's incoming edge it received phi arguments
8660 on the edge by split_edge(). Later, additional edge 'e' was created to
8661 connect 'new_head' and 'first'. Now this routine adds phi args on this
8662 additional edge 'e' that new_head to second edge received as part of edge
8663 splitting. */
8665 static void
8666 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8667 basic_block new_head, edge e)
8669 gphi *phi1, *phi2;
8670 gphi_iterator psi1, psi2;
8671 tree def;
8672 edge e2 = find_edge (new_head, second);
8674 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8675 edge, we should always have an edge from NEW_HEAD to SECOND. */
8676 gcc_assert (e2 != NULL);
8678 /* Browse all 'second' basic block phi nodes and add phi args to
8679 edge 'e' for 'first' head. PHI args are always in correct order. */
8681 for (psi2 = gsi_start_phis (second),
8682 psi1 = gsi_start_phis (first);
8683 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8684 gsi_next (&psi2), gsi_next (&psi1))
8686 phi1 = psi1.phi ();
8687 phi2 = psi2.phi ();
8688 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8689 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8694 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8695 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8696 the destination of the ELSE part. */
8698 static void
8699 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8700 basic_block second_head ATTRIBUTE_UNUSED,
8701 basic_block cond_bb, void *cond_e)
8703 gimple_stmt_iterator gsi;
8704 gimple *new_cond_expr;
8705 tree cond_expr = (tree) cond_e;
8706 edge e0;
8708 /* Build new conditional expr */
8709 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8710 NULL_TREE, NULL_TREE);
8712 /* Add new cond in cond_bb. */
8713 gsi = gsi_last_bb (cond_bb);
8714 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8716 /* Adjust edges appropriately to connect new head with first head
8717 as well as second head. */
8718 e0 = single_succ_edge (cond_bb);
8719 e0->flags &= ~EDGE_FALLTHRU;
8720 e0->flags |= EDGE_FALSE_VALUE;
8724 /* Do book-keeping of basic block BB for the profile consistency checker.
8725 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8726 then do post-pass accounting. Store the counting in RECORD. */
8727 static void
8728 gimple_account_profile_record (basic_block bb, int after_pass,
8729 struct profile_record *record)
8731 gimple_stmt_iterator i;
8732 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8734 record->size[after_pass]
8735 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8736 if (bb->count.initialized_p ())
8737 record->time[after_pass]
8738 += estimate_num_insns (gsi_stmt (i),
8739 &eni_time_weights) * bb->count.to_gcov_type ();
8740 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8741 record->time[after_pass]
8742 += estimate_num_insns (gsi_stmt (i),
8743 &eni_time_weights) * bb->count.to_frequency (cfun);
8747 struct cfg_hooks gimple_cfg_hooks = {
8748 "gimple",
8749 gimple_verify_flow_info,
8750 gimple_dump_bb, /* dump_bb */
8751 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8752 create_bb, /* create_basic_block */
8753 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8754 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8755 gimple_can_remove_branch_p, /* can_remove_branch_p */
8756 remove_bb, /* delete_basic_block */
8757 gimple_split_block, /* split_block */
8758 gimple_move_block_after, /* move_block_after */
8759 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8760 gimple_merge_blocks, /* merge_blocks */
8761 gimple_predict_edge, /* predict_edge */
8762 gimple_predicted_by_p, /* predicted_by_p */
8763 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8764 gimple_duplicate_bb, /* duplicate_block */
8765 gimple_split_edge, /* split_edge */
8766 gimple_make_forwarder_block, /* make_forward_block */
8767 NULL, /* tidy_fallthru_edge */
8768 NULL, /* force_nonfallthru */
8769 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8770 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8771 gimple_flow_call_edges_add, /* flow_call_edges_add */
8772 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8773 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8774 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8775 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8776 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8777 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8778 flush_pending_stmts, /* flush_pending_stmts */
8779 gimple_empty_block_p, /* block_empty_p */
8780 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8781 gimple_account_profile_record,
8785 /* Split all critical edges. */
8787 unsigned int
8788 split_critical_edges (void)
8790 basic_block bb;
8791 edge e;
8792 edge_iterator ei;
8794 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8795 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8796 mappings around the calls to split_edge. */
8797 start_recording_case_labels ();
8798 FOR_ALL_BB_FN (bb, cfun)
8800 FOR_EACH_EDGE (e, ei, bb->succs)
8802 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8803 split_edge (e);
8804 /* PRE inserts statements to edges and expects that
8805 since split_critical_edges was done beforehand, committing edge
8806 insertions will not split more edges. In addition to critical
8807 edges we must split edges that have multiple successors and
8808 end by control flow statements, such as RESX.
8809 Go ahead and split them too. This matches the logic in
8810 gimple_find_edge_insert_loc. */
8811 else if ((!single_pred_p (e->dest)
8812 || !gimple_seq_empty_p (phi_nodes (e->dest))
8813 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8814 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8815 && !(e->flags & EDGE_ABNORMAL))
8817 gimple_stmt_iterator gsi;
8819 gsi = gsi_last_bb (e->src);
8820 if (!gsi_end_p (gsi)
8821 && stmt_ends_bb_p (gsi_stmt (gsi))
8822 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8823 && !gimple_call_builtin_p (gsi_stmt (gsi),
8824 BUILT_IN_RETURN)))
8825 split_edge (e);
8829 end_recording_case_labels ();
8830 return 0;
8833 namespace {
8835 const pass_data pass_data_split_crit_edges =
8837 GIMPLE_PASS, /* type */
8838 "crited", /* name */
8839 OPTGROUP_NONE, /* optinfo_flags */
8840 TV_TREE_SPLIT_EDGES, /* tv_id */
8841 PROP_cfg, /* properties_required */
8842 PROP_no_crit_edges, /* properties_provided */
8843 0, /* properties_destroyed */
8844 0, /* todo_flags_start */
8845 0, /* todo_flags_finish */
8848 class pass_split_crit_edges : public gimple_opt_pass
8850 public:
8851 pass_split_crit_edges (gcc::context *ctxt)
8852 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8855 /* opt_pass methods: */
8856 virtual unsigned int execute (function *) { return split_critical_edges (); }
8858 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8859 }; // class pass_split_crit_edges
8861 } // anon namespace
8863 gimple_opt_pass *
8864 make_pass_split_crit_edges (gcc::context *ctxt)
8866 return new pass_split_crit_edges (ctxt);
8870 /* Insert COND expression which is GIMPLE_COND after STMT
8871 in basic block BB with appropriate basic block split
8872 and creation of a new conditionally executed basic block.
8873 Update profile so the new bb is visited with probability PROB.
8874 Return created basic block. */
8875 basic_block
8876 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8877 profile_probability prob)
8879 edge fall = split_block (bb, stmt);
8880 gimple_stmt_iterator iter = gsi_last_bb (bb);
8881 basic_block new_bb;
8883 /* Insert cond statement. */
8884 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8885 if (gsi_end_p (iter))
8886 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8887 else
8888 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8890 /* Create conditionally executed block. */
8891 new_bb = create_empty_bb (bb);
8892 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8893 e->probability = prob;
8894 new_bb->count = e->count ();
8895 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8897 /* Fix edge for split bb. */
8898 fall->flags = EDGE_FALSE_VALUE;
8899 fall->probability -= e->probability;
8901 /* Update dominance info. */
8902 if (dom_info_available_p (CDI_DOMINATORS))
8904 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8905 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8908 /* Update loop info. */
8909 if (current_loops)
8910 add_bb_to_loop (new_bb, bb->loop_father);
8912 return new_bb;
8915 /* Build a ternary operation and gimplify it. Emit code before GSI.
8916 Return the gimple_val holding the result. */
8918 tree
8919 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8920 tree type, tree a, tree b, tree c)
8922 tree ret;
8923 location_t loc = gimple_location (gsi_stmt (*gsi));
8925 ret = fold_build3_loc (loc, code, type, a, b, c);
8926 STRIP_NOPS (ret);
8928 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8929 GSI_SAME_STMT);
8932 /* Build a binary operation and gimplify it. Emit code before GSI.
8933 Return the gimple_val holding the result. */
8935 tree
8936 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8937 tree type, tree a, tree b)
8939 tree ret;
8941 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8942 STRIP_NOPS (ret);
8944 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8945 GSI_SAME_STMT);
8948 /* Build a unary operation and gimplify it. Emit code before GSI.
8949 Return the gimple_val holding the result. */
8951 tree
8952 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8953 tree a)
8955 tree ret;
8957 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8958 STRIP_NOPS (ret);
8960 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8961 GSI_SAME_STMT);
8966 /* Given a basic block B which ends with a conditional and has
8967 precisely two successors, determine which of the edges is taken if
8968 the conditional is true and which is taken if the conditional is
8969 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8971 void
8972 extract_true_false_edges_from_block (basic_block b,
8973 edge *true_edge,
8974 edge *false_edge)
8976 edge e = EDGE_SUCC (b, 0);
8978 if (e->flags & EDGE_TRUE_VALUE)
8980 *true_edge = e;
8981 *false_edge = EDGE_SUCC (b, 1);
8983 else
8985 *false_edge = e;
8986 *true_edge = EDGE_SUCC (b, 1);
8991 /* From a controlling predicate in the immediate dominator DOM of
8992 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8993 predicate evaluates to true and false and store them to
8994 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8995 they are non-NULL. Returns true if the edges can be determined,
8996 else return false. */
8998 bool
8999 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9000 edge *true_controlled_edge,
9001 edge *false_controlled_edge)
9003 basic_block bb = phiblock;
9004 edge true_edge, false_edge, tem;
9005 edge e0 = NULL, e1 = NULL;
9007 /* We have to verify that one edge into the PHI node is dominated
9008 by the true edge of the predicate block and the other edge
9009 dominated by the false edge. This ensures that the PHI argument
9010 we are going to take is completely determined by the path we
9011 take from the predicate block.
9012 We can only use BB dominance checks below if the destination of
9013 the true/false edges are dominated by their edge, thus only
9014 have a single predecessor. */
9015 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9016 tem = EDGE_PRED (bb, 0);
9017 if (tem == true_edge
9018 || (single_pred_p (true_edge->dest)
9019 && (tem->src == true_edge->dest
9020 || dominated_by_p (CDI_DOMINATORS,
9021 tem->src, true_edge->dest))))
9022 e0 = tem;
9023 else if (tem == false_edge
9024 || (single_pred_p (false_edge->dest)
9025 && (tem->src == false_edge->dest
9026 || dominated_by_p (CDI_DOMINATORS,
9027 tem->src, false_edge->dest))))
9028 e1 = tem;
9029 else
9030 return false;
9031 tem = EDGE_PRED (bb, 1);
9032 if (tem == true_edge
9033 || (single_pred_p (true_edge->dest)
9034 && (tem->src == true_edge->dest
9035 || dominated_by_p (CDI_DOMINATORS,
9036 tem->src, true_edge->dest))))
9037 e0 = tem;
9038 else if (tem == false_edge
9039 || (single_pred_p (false_edge->dest)
9040 && (tem->src == false_edge->dest
9041 || dominated_by_p (CDI_DOMINATORS,
9042 tem->src, false_edge->dest))))
9043 e1 = tem;
9044 else
9045 return false;
9046 if (!e0 || !e1)
9047 return false;
9049 if (true_controlled_edge)
9050 *true_controlled_edge = e0;
9051 if (false_controlled_edge)
9052 *false_controlled_edge = e1;
9054 return true;
9057 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9058 range [low, high]. Place associated stmts before *GSI. */
9060 void
9061 generate_range_test (basic_block bb, tree index, tree low, tree high,
9062 tree *lhs, tree *rhs)
9064 tree type = TREE_TYPE (index);
9065 tree utype = unsigned_type_for (type);
9067 low = fold_convert (type, low);
9068 high = fold_convert (type, high);
9070 tree tmp = make_ssa_name (type);
9071 gassign *sub1
9072 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
9074 *lhs = make_ssa_name (utype);
9075 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
9077 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
9078 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9079 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
9080 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
9083 /* Emit return warnings. */
9085 namespace {
9087 const pass_data pass_data_warn_function_return =
9089 GIMPLE_PASS, /* type */
9090 "*warn_function_return", /* name */
9091 OPTGROUP_NONE, /* optinfo_flags */
9092 TV_NONE, /* tv_id */
9093 PROP_cfg, /* properties_required */
9094 0, /* properties_provided */
9095 0, /* properties_destroyed */
9096 0, /* todo_flags_start */
9097 0, /* todo_flags_finish */
9100 class pass_warn_function_return : public gimple_opt_pass
9102 public:
9103 pass_warn_function_return (gcc::context *ctxt)
9104 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9107 /* opt_pass methods: */
9108 virtual unsigned int execute (function *);
9110 }; // class pass_warn_function_return
9112 unsigned int
9113 pass_warn_function_return::execute (function *fun)
9115 source_location location;
9116 gimple *last;
9117 edge e;
9118 edge_iterator ei;
9120 if (!targetm.warn_func_return (fun->decl))
9121 return 0;
9123 /* If we have a path to EXIT, then we do return. */
9124 if (TREE_THIS_VOLATILE (fun->decl)
9125 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9127 location = UNKNOWN_LOCATION;
9128 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9129 (e = ei_safe_edge (ei)); )
9131 last = last_stmt (e->src);
9132 if ((gimple_code (last) == GIMPLE_RETURN
9133 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9134 && location == UNKNOWN_LOCATION
9135 && ((location = LOCATION_LOCUS (gimple_location (last)))
9136 != UNKNOWN_LOCATION)
9137 && !optimize)
9138 break;
9139 /* When optimizing, replace return stmts in noreturn functions
9140 with __builtin_unreachable () call. */
9141 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9143 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9144 gimple *new_stmt = gimple_build_call (fndecl, 0);
9145 gimple_set_location (new_stmt, gimple_location (last));
9146 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9147 gsi_replace (&gsi, new_stmt, true);
9148 remove_edge (e);
9150 else
9151 ei_next (&ei);
9153 if (location == UNKNOWN_LOCATION)
9154 location = cfun->function_end_locus;
9155 warning_at (location, 0, "%<noreturn%> function does return");
9158 /* If we see "return;" in some basic block, then we do reach the end
9159 without returning a value. */
9160 else if (warn_return_type > 0
9161 && !TREE_NO_WARNING (fun->decl)
9162 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9164 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9166 gimple *last = last_stmt (e->src);
9167 greturn *return_stmt = dyn_cast <greturn *> (last);
9168 if (return_stmt
9169 && gimple_return_retval (return_stmt) == NULL
9170 && !gimple_no_warning_p (last))
9172 location = gimple_location (last);
9173 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9174 location = fun->function_end_locus;
9175 warning_at (location, OPT_Wreturn_type,
9176 "control reaches end of non-void function");
9177 TREE_NO_WARNING (fun->decl) = 1;
9178 break;
9181 /* The C++ FE turns fallthrough from the end of non-void function
9182 into __builtin_unreachable () call with BUILTINS_LOCATION.
9183 Recognize those too. */
9184 basic_block bb;
9185 if (!TREE_NO_WARNING (fun->decl))
9186 FOR_EACH_BB_FN (bb, fun)
9187 if (EDGE_COUNT (bb->succs) == 0)
9189 gimple *last = last_stmt (bb);
9190 const enum built_in_function ubsan_missing_ret
9191 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9192 if (last
9193 && ((LOCATION_LOCUS (gimple_location (last))
9194 == BUILTINS_LOCATION
9195 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9196 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9198 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9199 gsi_prev_nondebug (&gsi);
9200 gimple *prev = gsi_stmt (gsi);
9201 if (prev == NULL)
9202 location = UNKNOWN_LOCATION;
9203 else
9204 location = gimple_location (prev);
9205 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9206 location = fun->function_end_locus;
9207 warning_at (location, OPT_Wreturn_type,
9208 "control reaches end of non-void function");
9209 TREE_NO_WARNING (fun->decl) = 1;
9210 break;
9214 return 0;
9217 } // anon namespace
9219 gimple_opt_pass *
9220 make_pass_warn_function_return (gcc::context *ctxt)
9222 return new pass_warn_function_return (ctxt);
9225 /* Walk a gimplified function and warn for functions whose return value is
9226 ignored and attribute((warn_unused_result)) is set. This is done before
9227 inlining, so we don't have to worry about that. */
9229 static void
9230 do_warn_unused_result (gimple_seq seq)
9232 tree fdecl, ftype;
9233 gimple_stmt_iterator i;
9235 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9237 gimple *g = gsi_stmt (i);
9239 switch (gimple_code (g))
9241 case GIMPLE_BIND:
9242 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9243 break;
9244 case GIMPLE_TRY:
9245 do_warn_unused_result (gimple_try_eval (g));
9246 do_warn_unused_result (gimple_try_cleanup (g));
9247 break;
9248 case GIMPLE_CATCH:
9249 do_warn_unused_result (gimple_catch_handler (
9250 as_a <gcatch *> (g)));
9251 break;
9252 case GIMPLE_EH_FILTER:
9253 do_warn_unused_result (gimple_eh_filter_failure (g));
9254 break;
9256 case GIMPLE_CALL:
9257 if (gimple_call_lhs (g))
9258 break;
9259 if (gimple_call_internal_p (g))
9260 break;
9262 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9263 LHS. All calls whose value is ignored should be
9264 represented like this. Look for the attribute. */
9265 fdecl = gimple_call_fndecl (g);
9266 ftype = gimple_call_fntype (g);
9268 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9270 location_t loc = gimple_location (g);
9272 if (fdecl)
9273 warning_at (loc, OPT_Wunused_result,
9274 "ignoring return value of %qD, "
9275 "declared with attribute warn_unused_result",
9276 fdecl);
9277 else
9278 warning_at (loc, OPT_Wunused_result,
9279 "ignoring return value of function "
9280 "declared with attribute warn_unused_result");
9282 break;
9284 default:
9285 /* Not a container, not a call, or a call whose value is used. */
9286 break;
9291 namespace {
9293 const pass_data pass_data_warn_unused_result =
9295 GIMPLE_PASS, /* type */
9296 "*warn_unused_result", /* name */
9297 OPTGROUP_NONE, /* optinfo_flags */
9298 TV_NONE, /* tv_id */
9299 PROP_gimple_any, /* properties_required */
9300 0, /* properties_provided */
9301 0, /* properties_destroyed */
9302 0, /* todo_flags_start */
9303 0, /* todo_flags_finish */
9306 class pass_warn_unused_result : public gimple_opt_pass
9308 public:
9309 pass_warn_unused_result (gcc::context *ctxt)
9310 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9313 /* opt_pass methods: */
9314 virtual bool gate (function *) { return flag_warn_unused_result; }
9315 virtual unsigned int execute (function *)
9317 do_warn_unused_result (gimple_body (current_function_decl));
9318 return 0;
9321 }; // class pass_warn_unused_result
9323 } // anon namespace
9325 gimple_opt_pass *
9326 make_pass_warn_unused_result (gcc::context *ctxt)
9328 return new pass_warn_unused_result (ctxt);
9331 /* IPA passes, compilation of earlier functions or inlining
9332 might have changed some properties, such as marked functions nothrow,
9333 pure, const or noreturn.
9334 Remove redundant edges and basic blocks, and create new ones if necessary.
9336 This pass can't be executed as stand alone pass from pass manager, because
9337 in between inlining and this fixup the verify_flow_info would fail. */
9339 unsigned int
9340 execute_fixup_cfg (void)
9342 basic_block bb;
9343 gimple_stmt_iterator gsi;
9344 int todo = 0;
9345 cgraph_node *node = cgraph_node::get (current_function_decl);
9346 profile_count num = node->count;
9347 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9348 bool scale = num.initialized_p () && !(num == den);
9350 if (scale)
9352 profile_count::adjust_for_ipa_scaling (&num, &den);
9353 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9354 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9355 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9358 FOR_EACH_BB_FN (bb, cfun)
9360 if (scale)
9361 bb->count = bb->count.apply_scale (num, den);
9362 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9364 gimple *stmt = gsi_stmt (gsi);
9365 tree decl = is_gimple_call (stmt)
9366 ? gimple_call_fndecl (stmt)
9367 : NULL;
9368 if (decl)
9370 int flags = gimple_call_flags (stmt);
9371 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9373 if (gimple_purge_dead_abnormal_call_edges (bb))
9374 todo |= TODO_cleanup_cfg;
9376 if (gimple_in_ssa_p (cfun))
9378 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9379 update_stmt (stmt);
9383 if (flags & ECF_NORETURN
9384 && fixup_noreturn_call (stmt))
9385 todo |= TODO_cleanup_cfg;
9388 /* Remove stores to variables we marked write-only.
9389 Keep access when store has side effect, i.e. in case when source
9390 is volatile. */
9391 if (gimple_store_p (stmt)
9392 && !gimple_has_side_effects (stmt))
9394 tree lhs = get_base_address (gimple_get_lhs (stmt));
9396 if (VAR_P (lhs)
9397 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9398 && varpool_node::get (lhs)->writeonly)
9400 unlink_stmt_vdef (stmt);
9401 gsi_remove (&gsi, true);
9402 release_defs (stmt);
9403 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9404 continue;
9407 /* For calls we can simply remove LHS when it is known
9408 to be write-only. */
9409 if (is_gimple_call (stmt)
9410 && gimple_get_lhs (stmt))
9412 tree lhs = get_base_address (gimple_get_lhs (stmt));
9414 if (VAR_P (lhs)
9415 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9416 && varpool_node::get (lhs)->writeonly)
9418 gimple_call_set_lhs (stmt, NULL);
9419 update_stmt (stmt);
9420 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9424 if (maybe_clean_eh_stmt (stmt)
9425 && gimple_purge_dead_eh_edges (bb))
9426 todo |= TODO_cleanup_cfg;
9427 gsi_next (&gsi);
9430 /* If we have a basic block with no successors that does not
9431 end with a control statement or a noreturn call end it with
9432 a call to __builtin_unreachable. This situation can occur
9433 when inlining a noreturn call that does in fact return. */
9434 if (EDGE_COUNT (bb->succs) == 0)
9436 gimple *stmt = last_stmt (bb);
9437 if (!stmt
9438 || (!is_ctrl_stmt (stmt)
9439 && (!is_gimple_call (stmt)
9440 || !gimple_call_noreturn_p (stmt))))
9442 if (stmt && is_gimple_call (stmt))
9443 gimple_call_set_ctrl_altering (stmt, false);
9444 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9445 stmt = gimple_build_call (fndecl, 0);
9446 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9447 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9448 if (!cfun->after_inlining)
9450 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9451 node->create_edge (cgraph_node::get_create (fndecl),
9452 call_stmt, bb->count);
9457 if (scale)
9458 compute_function_frequency ();
9460 if (current_loops
9461 && (todo & TODO_cleanup_cfg))
9462 loops_state_set (LOOPS_NEED_FIXUP);
9464 return todo;
9467 namespace {
9469 const pass_data pass_data_fixup_cfg =
9471 GIMPLE_PASS, /* type */
9472 "fixup_cfg", /* name */
9473 OPTGROUP_NONE, /* optinfo_flags */
9474 TV_NONE, /* tv_id */
9475 PROP_cfg, /* properties_required */
9476 0, /* properties_provided */
9477 0, /* properties_destroyed */
9478 0, /* todo_flags_start */
9479 0, /* todo_flags_finish */
9482 class pass_fixup_cfg : public gimple_opt_pass
9484 public:
9485 pass_fixup_cfg (gcc::context *ctxt)
9486 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9489 /* opt_pass methods: */
9490 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9491 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9493 }; // class pass_fixup_cfg
9495 } // anon namespace
9497 gimple_opt_pass *
9498 make_pass_fixup_cfg (gcc::context *ctxt)
9500 return new pass_fixup_cfg (ctxt);
9503 /* Garbage collection support for edge_def. */
9505 extern void gt_ggc_mx (tree&);
9506 extern void gt_ggc_mx (gimple *&);
9507 extern void gt_ggc_mx (rtx&);
9508 extern void gt_ggc_mx (basic_block&);
9510 static void
9511 gt_ggc_mx (rtx_insn *& x)
9513 if (x)
9514 gt_ggc_mx_rtx_def ((void *) x);
9517 void
9518 gt_ggc_mx (edge_def *e)
9520 tree block = LOCATION_BLOCK (e->goto_locus);
9521 gt_ggc_mx (e->src);
9522 gt_ggc_mx (e->dest);
9523 if (current_ir_type () == IR_GIMPLE)
9524 gt_ggc_mx (e->insns.g);
9525 else
9526 gt_ggc_mx (e->insns.r);
9527 gt_ggc_mx (block);
9530 /* PCH support for edge_def. */
9532 extern void gt_pch_nx (tree&);
9533 extern void gt_pch_nx (gimple *&);
9534 extern void gt_pch_nx (rtx&);
9535 extern void gt_pch_nx (basic_block&);
9537 static void
9538 gt_pch_nx (rtx_insn *& x)
9540 if (x)
9541 gt_pch_nx_rtx_def ((void *) x);
9544 void
9545 gt_pch_nx (edge_def *e)
9547 tree block = LOCATION_BLOCK (e->goto_locus);
9548 gt_pch_nx (e->src);
9549 gt_pch_nx (e->dest);
9550 if (current_ir_type () == IR_GIMPLE)
9551 gt_pch_nx (e->insns.g);
9552 else
9553 gt_pch_nx (e->insns.r);
9554 gt_pch_nx (block);
9557 void
9558 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9560 tree block = LOCATION_BLOCK (e->goto_locus);
9561 op (&(e->src), cookie);
9562 op (&(e->dest), cookie);
9563 if (current_ir_type () == IR_GIMPLE)
9564 op (&(e->insns.g), cookie);
9565 else
9566 op (&(e->insns.r), cookie);
9567 op (&(block), cookie);
9570 #if CHECKING_P
9572 namespace selftest {
9574 /* Helper function for CFG selftests: create a dummy function decl
9575 and push it as cfun. */
9577 static tree
9578 push_fndecl (const char *name)
9580 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9581 /* FIXME: this uses input_location: */
9582 tree fndecl = build_fn_decl (name, fn_type);
9583 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9584 NULL_TREE, integer_type_node);
9585 DECL_RESULT (fndecl) = retval;
9586 push_struct_function (fndecl);
9587 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9588 ASSERT_TRUE (fun != NULL);
9589 init_empty_tree_cfg_for_function (fun);
9590 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9591 ASSERT_EQ (0, n_edges_for_fn (fun));
9592 return fndecl;
9595 /* These tests directly create CFGs.
9596 Compare with the static fns within tree-cfg.c:
9597 - build_gimple_cfg
9598 - make_blocks: calls create_basic_block (seq, bb);
9599 - make_edges. */
9601 /* Verify a simple cfg of the form:
9602 ENTRY -> A -> B -> C -> EXIT. */
9604 static void
9605 test_linear_chain ()
9607 gimple_register_cfg_hooks ();
9609 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9610 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9612 /* Create some empty blocks. */
9613 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9614 basic_block bb_b = create_empty_bb (bb_a);
9615 basic_block bb_c = create_empty_bb (bb_b);
9617 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9618 ASSERT_EQ (0, n_edges_for_fn (fun));
9620 /* Create some edges: a simple linear chain of BBs. */
9621 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9622 make_edge (bb_a, bb_b, 0);
9623 make_edge (bb_b, bb_c, 0);
9624 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9626 /* Verify the edges. */
9627 ASSERT_EQ (4, n_edges_for_fn (fun));
9628 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9629 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9630 ASSERT_EQ (1, bb_a->preds->length ());
9631 ASSERT_EQ (1, bb_a->succs->length ());
9632 ASSERT_EQ (1, bb_b->preds->length ());
9633 ASSERT_EQ (1, bb_b->succs->length ());
9634 ASSERT_EQ (1, bb_c->preds->length ());
9635 ASSERT_EQ (1, bb_c->succs->length ());
9636 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9637 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9639 /* Verify the dominance information
9640 Each BB in our simple chain should be dominated by the one before
9641 it. */
9642 calculate_dominance_info (CDI_DOMINATORS);
9643 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9644 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9645 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9646 ASSERT_EQ (1, dom_by_b.length ());
9647 ASSERT_EQ (bb_c, dom_by_b[0]);
9648 free_dominance_info (CDI_DOMINATORS);
9649 dom_by_b.release ();
9651 /* Similarly for post-dominance: each BB in our chain is post-dominated
9652 by the one after it. */
9653 calculate_dominance_info (CDI_POST_DOMINATORS);
9654 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9655 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9656 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9657 ASSERT_EQ (1, postdom_by_b.length ());
9658 ASSERT_EQ (bb_a, postdom_by_b[0]);
9659 free_dominance_info (CDI_POST_DOMINATORS);
9660 postdom_by_b.release ();
9662 pop_cfun ();
9665 /* Verify a simple CFG of the form:
9666 ENTRY
9670 /t \f
9676 EXIT. */
9678 static void
9679 test_diamond ()
9681 gimple_register_cfg_hooks ();
9683 tree fndecl = push_fndecl ("cfg_test_diamond");
9684 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9686 /* Create some empty blocks. */
9687 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9688 basic_block bb_b = create_empty_bb (bb_a);
9689 basic_block bb_c = create_empty_bb (bb_a);
9690 basic_block bb_d = create_empty_bb (bb_b);
9692 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9693 ASSERT_EQ (0, n_edges_for_fn (fun));
9695 /* Create the edges. */
9696 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9697 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9698 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9699 make_edge (bb_b, bb_d, 0);
9700 make_edge (bb_c, bb_d, 0);
9701 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9703 /* Verify the edges. */
9704 ASSERT_EQ (6, n_edges_for_fn (fun));
9705 ASSERT_EQ (1, bb_a->preds->length ());
9706 ASSERT_EQ (2, bb_a->succs->length ());
9707 ASSERT_EQ (1, bb_b->preds->length ());
9708 ASSERT_EQ (1, bb_b->succs->length ());
9709 ASSERT_EQ (1, bb_c->preds->length ());
9710 ASSERT_EQ (1, bb_c->succs->length ());
9711 ASSERT_EQ (2, bb_d->preds->length ());
9712 ASSERT_EQ (1, bb_d->succs->length ());
9714 /* Verify the dominance information. */
9715 calculate_dominance_info (CDI_DOMINATORS);
9716 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9717 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9718 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9719 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9720 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9721 dom_by_a.release ();
9722 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9723 ASSERT_EQ (0, dom_by_b.length ());
9724 dom_by_b.release ();
9725 free_dominance_info (CDI_DOMINATORS);
9727 /* Similarly for post-dominance. */
9728 calculate_dominance_info (CDI_POST_DOMINATORS);
9729 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9730 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9731 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9732 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9733 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9734 postdom_by_d.release ();
9735 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9736 ASSERT_EQ (0, postdom_by_b.length ());
9737 postdom_by_b.release ();
9738 free_dominance_info (CDI_POST_DOMINATORS);
9740 pop_cfun ();
9743 /* Verify that we can handle a CFG containing a "complete" aka
9744 fully-connected subgraph (where A B C D below all have edges
9745 pointing to each other node, also to themselves).
9746 e.g.:
9747 ENTRY EXIT
9753 A<--->B
9754 ^^ ^^
9755 | \ / |
9756 | X |
9757 | / \ |
9758 VV VV
9759 C<--->D
9762 static void
9763 test_fully_connected ()
9765 gimple_register_cfg_hooks ();
9767 tree fndecl = push_fndecl ("cfg_fully_connected");
9768 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9770 const int n = 4;
9772 /* Create some empty blocks. */
9773 auto_vec <basic_block> subgraph_nodes;
9774 for (int i = 0; i < n; i++)
9775 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9777 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9778 ASSERT_EQ (0, n_edges_for_fn (fun));
9780 /* Create the edges. */
9781 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9782 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9783 for (int i = 0; i < n; i++)
9784 for (int j = 0; j < n; j++)
9785 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9787 /* Verify the edges. */
9788 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9789 /* The first one is linked to ENTRY/EXIT as well as itself and
9790 everything else. */
9791 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9792 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9793 /* The other ones in the subgraph are linked to everything in
9794 the subgraph (including themselves). */
9795 for (int i = 1; i < n; i++)
9797 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9798 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9801 /* Verify the dominance information. */
9802 calculate_dominance_info (CDI_DOMINATORS);
9803 /* The initial block in the subgraph should be dominated by ENTRY. */
9804 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9805 get_immediate_dominator (CDI_DOMINATORS,
9806 subgraph_nodes[0]));
9807 /* Every other block in the subgraph should be dominated by the
9808 initial block. */
9809 for (int i = 1; i < n; i++)
9810 ASSERT_EQ (subgraph_nodes[0],
9811 get_immediate_dominator (CDI_DOMINATORS,
9812 subgraph_nodes[i]));
9813 free_dominance_info (CDI_DOMINATORS);
9815 /* Similarly for post-dominance. */
9816 calculate_dominance_info (CDI_POST_DOMINATORS);
9817 /* The initial block in the subgraph should be postdominated by EXIT. */
9818 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9819 get_immediate_dominator (CDI_POST_DOMINATORS,
9820 subgraph_nodes[0]));
9821 /* Every other block in the subgraph should be postdominated by the
9822 initial block, since that leads to EXIT. */
9823 for (int i = 1; i < n; i++)
9824 ASSERT_EQ (subgraph_nodes[0],
9825 get_immediate_dominator (CDI_POST_DOMINATORS,
9826 subgraph_nodes[i]));
9827 free_dominance_info (CDI_POST_DOMINATORS);
9829 pop_cfun ();
9832 /* Run all of the selftests within this file. */
9834 void
9835 tree_cfg_c_tests ()
9837 test_linear_chain ();
9838 test_diamond ();
9839 test_fully_connected ();
9842 } // namespace selftest
9844 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9845 - loop
9846 - nested loops
9847 - switch statement (a block with many out-edges)
9848 - something that jumps to itself
9849 - etc */
9851 #endif /* CHECKING_P */