New files
[official-gcc.git] / gcc / tree-cfg.c
blobb21ef0eee371f84e33f26be514c692ba78703503
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
70 /* Local declarations. */
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
88 static hash_map<edge, tree> *edge_to_cases;
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
94 static bitmap touched_switch_bbs;
96 /* CFG statistics. */
97 struct cfg_stats_d
99 long num_merged_labels;
102 static struct cfg_stats_d cfg_stats;
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
114 int location_line;
115 int discriminator;
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
133 return item->location_line;
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
143 return a->location_line == b->location_line;
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
202 void
203 init_empty_tree_cfg (void)
205 init_empty_tree_cfg_for_function (cfun);
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
215 static void
216 build_gimple_cfg (gimple_seq seq)
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
223 init_empty_tree_cfg ();
225 make_blocks (seq);
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
258 static void
259 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
311 static void
312 replace_loop_annotate (void)
314 class loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
319 FOR_EACH_LOOP (loop, 0)
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
328 /* Push the global flag_finite_loops state down to individual loops. */
329 loop->finite_p = flag_finite_loops;
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 case annot_expr_parallel_kind:
351 break;
352 default:
353 gcc_unreachable ();
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
364 static unsigned int
365 execute_build_cfg (void)
367 gimple_seq body = gimple_body (current_function_decl);
369 build_gimple_cfg (body);
370 gimple_set_body (current_function_decl, NULL);
371 if (dump_file && (dump_flags & TDF_DETAILS))
373 fprintf (dump_file, "Scope blocks:\n");
374 dump_scope_blocks (dump_file, dump_flags);
376 cleanup_tree_cfg ();
377 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
378 replace_loop_annotate ();
379 return 0;
382 namespace {
384 const pass_data pass_data_build_cfg =
386 GIMPLE_PASS, /* type */
387 "cfg", /* name */
388 OPTGROUP_NONE, /* optinfo_flags */
389 TV_TREE_CFG, /* tv_id */
390 PROP_gimple_leh, /* properties_required */
391 ( PROP_cfg | PROP_loops ), /* properties_provided */
392 0, /* properties_destroyed */
393 0, /* todo_flags_start */
394 0, /* todo_flags_finish */
397 class pass_build_cfg : public gimple_opt_pass
399 public:
400 pass_build_cfg (gcc::context *ctxt)
401 : gimple_opt_pass (pass_data_build_cfg, ctxt)
404 /* opt_pass methods: */
405 virtual unsigned int execute (function *) { return execute_build_cfg (); }
407 }; // class pass_build_cfg
409 } // anon namespace
411 gimple_opt_pass *
412 make_pass_build_cfg (gcc::context *ctxt)
414 return new pass_build_cfg (ctxt);
418 /* Return true if T is a computed goto. */
420 bool
421 computed_goto_p (gimple *t)
423 return (gimple_code (t) == GIMPLE_GOTO
424 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
427 /* Returns true if the sequence of statements STMTS only contains
428 a call to __builtin_unreachable (). */
430 bool
431 gimple_seq_unreachable_p (gimple_seq stmts)
433 if (stmts == NULL
434 /* Return false if -fsanitize=unreachable, we don't want to
435 optimize away those calls, but rather turn them into
436 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
437 later. */
438 || sanitize_flags_p (SANITIZE_UNREACHABLE))
439 return false;
441 gimple_stmt_iterator gsi = gsi_last (stmts);
443 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
444 return false;
446 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
448 gimple *stmt = gsi_stmt (gsi);
449 if (gimple_code (stmt) != GIMPLE_LABEL
450 && !is_gimple_debug (stmt)
451 && !gimple_clobber_p (stmt))
452 return false;
454 return true;
457 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
458 the other edge points to a bb with just __builtin_unreachable ().
459 I.e. return true for C->M edge in:
460 <bb C>:
462 if (something)
463 goto <bb N>;
464 else
465 goto <bb M>;
466 <bb N>:
467 __builtin_unreachable ();
468 <bb M>: */
470 bool
471 assert_unreachable_fallthru_edge_p (edge e)
473 basic_block pred_bb = e->src;
474 gimple *last = last_stmt (pred_bb);
475 if (last && gimple_code (last) == GIMPLE_COND)
477 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
478 if (other_bb == e->dest)
479 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
480 if (EDGE_COUNT (other_bb->succs) == 0)
481 return gimple_seq_unreachable_p (bb_seq (other_bb));
483 return false;
487 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
488 could alter control flow except via eh. We initialize the flag at
489 CFG build time and only ever clear it later. */
491 static void
492 gimple_call_initialize_ctrl_altering (gimple *stmt)
494 int flags = gimple_call_flags (stmt);
496 /* A call alters control flow if it can make an abnormal goto. */
497 if (call_can_make_abnormal_goto (stmt)
498 /* A call also alters control flow if it does not return. */
499 || flags & ECF_NORETURN
500 /* TM ending statements have backedges out of the transaction.
501 Return true so we split the basic block containing them.
502 Note that the TM_BUILTIN test is merely an optimization. */
503 || ((flags & ECF_TM_BUILTIN)
504 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
505 /* BUILT_IN_RETURN call is same as return statement. */
506 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
507 /* IFN_UNIQUE should be the last insn, to make checking for it
508 as cheap as possible. */
509 || (gimple_call_internal_p (stmt)
510 && gimple_call_internal_unique_p (stmt)))
511 gimple_call_set_ctrl_altering (stmt, true);
512 else
513 gimple_call_set_ctrl_altering (stmt, false);
517 /* Insert SEQ after BB and build a flowgraph. */
519 static basic_block
520 make_blocks_1 (gimple_seq seq, basic_block bb)
522 gimple_stmt_iterator i = gsi_start (seq);
523 gimple *stmt = NULL;
524 gimple *prev_stmt = NULL;
525 bool start_new_block = true;
526 bool first_stmt_of_seq = true;
528 while (!gsi_end_p (i))
530 /* PREV_STMT should only be set to a debug stmt if the debug
531 stmt is before nondebug stmts. Once stmt reaches a nondebug
532 nonlabel, prev_stmt will be set to it, so that
533 stmt_starts_bb_p will know to start a new block if a label is
534 found. However, if stmt was a label after debug stmts only,
535 keep the label in prev_stmt even if we find further debug
536 stmts, for there may be other labels after them, and they
537 should land in the same block. */
538 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
539 prev_stmt = stmt;
540 stmt = gsi_stmt (i);
542 if (stmt && is_gimple_call (stmt))
543 gimple_call_initialize_ctrl_altering (stmt);
545 /* If the statement starts a new basic block or if we have determined
546 in a previous pass that we need to create a new block for STMT, do
547 so now. */
548 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
550 if (!first_stmt_of_seq)
551 gsi_split_seq_before (&i, &seq);
552 bb = create_basic_block (seq, bb);
553 start_new_block = false;
554 prev_stmt = NULL;
557 /* Now add STMT to BB and create the subgraphs for special statement
558 codes. */
559 gimple_set_bb (stmt, bb);
561 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
562 next iteration. */
563 if (stmt_ends_bb_p (stmt))
565 /* If the stmt can make abnormal goto use a new temporary
566 for the assignment to the LHS. This makes sure the old value
567 of the LHS is available on the abnormal edge. Otherwise
568 we will end up with overlapping life-ranges for abnormal
569 SSA names. */
570 if (gimple_has_lhs (stmt)
571 && stmt_can_make_abnormal_goto (stmt)
572 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
574 tree lhs = gimple_get_lhs (stmt);
575 tree tmp = create_tmp_var (TREE_TYPE (lhs));
576 gimple *s = gimple_build_assign (lhs, tmp);
577 gimple_set_location (s, gimple_location (stmt));
578 gimple_set_block (s, gimple_block (stmt));
579 gimple_set_lhs (stmt, tmp);
580 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
581 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
582 DECL_GIMPLE_REG_P (tmp) = 1;
583 gsi_insert_after (&i, s, GSI_SAME_STMT);
585 start_new_block = true;
588 gsi_next (&i);
589 first_stmt_of_seq = false;
591 return bb;
594 /* Build a flowgraph for the sequence of stmts SEQ. */
596 static void
597 make_blocks (gimple_seq seq)
599 /* Look for debug markers right before labels, and move the debug
600 stmts after the labels. Accepting labels among debug markers
601 adds no value, just complexity; if we wanted to annotate labels
602 with view numbers (so sequencing among markers would matter) or
603 somesuch, we're probably better off still moving the labels, but
604 adding other debug annotations in their original positions or
605 emitting nonbind or bind markers associated with the labels in
606 the original position of the labels.
608 Moving labels would probably be simpler, but we can't do that:
609 moving labels assigns label ids to them, and doing so because of
610 debug markers makes for -fcompare-debug and possibly even codegen
611 differences. So, we have to move the debug stmts instead. To
612 that end, we scan SEQ backwards, marking the position of the
613 latest (earliest we find) label, and moving debug stmts that are
614 not separated from it by nondebug nonlabel stmts after the
615 label. */
616 if (MAY_HAVE_DEBUG_MARKER_STMTS)
618 gimple_stmt_iterator label = gsi_none ();
620 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
622 gimple *stmt = gsi_stmt (i);
624 /* If this is the first label we encounter (latest in SEQ)
625 before nondebug stmts, record its position. */
626 if (is_a <glabel *> (stmt))
628 if (gsi_end_p (label))
629 label = i;
630 continue;
633 /* Without a recorded label position to move debug stmts to,
634 there's nothing to do. */
635 if (gsi_end_p (label))
636 continue;
638 /* Move the debug stmt at I after LABEL. */
639 if (is_gimple_debug (stmt))
641 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
642 /* As STMT is removed, I advances to the stmt after
643 STMT, so the gsi_prev in the for "increment"
644 expression gets us to the stmt we're to visit after
645 STMT. LABEL, however, would advance to the moved
646 stmt if we passed it to gsi_move_after, so pass it a
647 copy instead, so as to keep LABEL pointing to the
648 LABEL. */
649 gimple_stmt_iterator copy = label;
650 gsi_move_after (&i, &copy);
651 continue;
654 /* There aren't any (more?) debug stmts before label, so
655 there isn't anything else to move after it. */
656 label = gsi_none ();
660 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
663 /* Create and return a new empty basic block after bb AFTER. */
665 static basic_block
666 create_bb (void *h, void *e, basic_block after)
668 basic_block bb;
670 gcc_assert (!e);
672 /* Create and initialize a new basic block. Since alloc_block uses
673 GC allocation that clears memory to allocate a basic block, we do
674 not have to clear the newly allocated basic block here. */
675 bb = alloc_block ();
677 bb->index = last_basic_block_for_fn (cfun);
678 bb->flags = BB_NEW;
679 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
681 /* Add the new block to the linked list of blocks. */
682 link_block (bb, after);
684 /* Grow the basic block array if needed. */
685 if ((size_t) last_basic_block_for_fn (cfun)
686 == basic_block_info_for_fn (cfun)->length ())
688 size_t new_size =
689 (last_basic_block_for_fn (cfun)
690 + (last_basic_block_for_fn (cfun) + 3) / 4);
691 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
694 /* Add the newly created block to the array. */
695 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
697 n_basic_blocks_for_fn (cfun)++;
698 last_basic_block_for_fn (cfun)++;
700 return bb;
704 /*---------------------------------------------------------------------------
705 Edge creation
706 ---------------------------------------------------------------------------*/
708 /* If basic block BB has an abnormal edge to a basic block
709 containing IFN_ABNORMAL_DISPATCHER internal call, return
710 that the dispatcher's basic block, otherwise return NULL. */
712 basic_block
713 get_abnormal_succ_dispatcher (basic_block bb)
715 edge e;
716 edge_iterator ei;
718 FOR_EACH_EDGE (e, ei, bb->succs)
719 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
721 gimple_stmt_iterator gsi
722 = gsi_start_nondebug_after_labels_bb (e->dest);
723 gimple *g = gsi_stmt (gsi);
724 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
725 return e->dest;
727 return NULL;
730 /* Helper function for make_edges. Create a basic block with
731 with ABNORMAL_DISPATCHER internal call in it if needed, and
732 create abnormal edges from BBS to it and from it to FOR_BB
733 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
735 static void
736 handle_abnormal_edges (basic_block *dispatcher_bbs,
737 basic_block for_bb, int *bb_to_omp_idx,
738 auto_vec<basic_block> *bbs, bool computed_goto)
740 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
741 unsigned int idx = 0;
742 basic_block bb;
743 bool inner = false;
745 if (bb_to_omp_idx)
747 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
748 if (bb_to_omp_idx[for_bb->index] != 0)
749 inner = true;
752 /* If the dispatcher has been created already, then there are basic
753 blocks with abnormal edges to it, so just make a new edge to
754 for_bb. */
755 if (*dispatcher == NULL)
757 /* Check if there are any basic blocks that need to have
758 abnormal edges to this dispatcher. If there are none, return
759 early. */
760 if (bb_to_omp_idx == NULL)
762 if (bbs->is_empty ())
763 return;
765 else
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
769 break;
770 if (bb == NULL)
771 return;
774 /* Create the dispatcher bb. */
775 *dispatcher = create_basic_block (NULL, for_bb);
776 if (computed_goto)
778 /* Factor computed gotos into a common computed goto site. Also
779 record the location of that site so that we can un-factor the
780 gotos after we have converted back to normal form. */
781 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
783 /* Create the destination of the factored goto. Each original
784 computed goto will put its desired destination into this
785 variable and jump to the label we create immediately below. */
786 tree var = create_tmp_var (ptr_type_node, "gotovar");
788 /* Build a label for the new block which will contain the
789 factored computed goto. */
790 tree factored_label_decl
791 = create_artificial_label (UNKNOWN_LOCATION);
792 gimple *factored_computed_goto_label
793 = gimple_build_label (factored_label_decl);
794 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
796 /* Build our new computed goto. */
797 gimple *factored_computed_goto = gimple_build_goto (var);
798 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 if (bb_to_omp_idx
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 continue;
806 gsi = gsi_last_bb (bb);
807 gimple *last = gsi_stmt (gsi);
809 gcc_assert (computed_goto_p (last));
811 /* Copy the original computed goto's destination into VAR. */
812 gimple *assignment
813 = gimple_build_assign (var, gimple_goto_dest (last));
814 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
816 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
817 e->goto_locus = gimple_location (last);
818 gsi_remove (&gsi, true);
821 else
823 tree arg = inner ? boolean_true_node : boolean_false_node;
824 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
825 1, arg);
826 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
827 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
829 /* Create predecessor edges of the dispatcher. */
830 FOR_EACH_VEC_ELT (*bbs, idx, bb)
832 if (bb_to_omp_idx
833 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
834 continue;
835 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
840 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
843 /* Creates outgoing edges for BB. Returns 1 when it ends with an
844 computed goto, returns 2 when it ends with a statement that
845 might return to this function via an nonlocal goto, otherwise
846 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
848 static int
849 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
851 gimple *last = last_stmt (bb);
852 bool fallthru = false;
853 int ret = 0;
855 if (!last)
856 return ret;
858 switch (gimple_code (last))
860 case GIMPLE_GOTO:
861 if (make_goto_expr_edges (bb))
862 ret = 1;
863 fallthru = false;
864 break;
865 case GIMPLE_RETURN:
867 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
868 e->goto_locus = gimple_location (last);
869 fallthru = false;
871 break;
872 case GIMPLE_COND:
873 make_cond_expr_edges (bb);
874 fallthru = false;
875 break;
876 case GIMPLE_SWITCH:
877 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
878 fallthru = false;
879 break;
880 case GIMPLE_RESX:
881 make_eh_edges (last);
882 fallthru = false;
883 break;
884 case GIMPLE_EH_DISPATCH:
885 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
886 break;
888 case GIMPLE_CALL:
889 /* If this function receives a nonlocal goto, then we need to
890 make edges from this call site to all the nonlocal goto
891 handlers. */
892 if (stmt_can_make_abnormal_goto (last))
893 ret = 2;
895 /* If this statement has reachable exception handlers, then
896 create abnormal edges to them. */
897 make_eh_edges (last);
899 /* BUILTIN_RETURN is really a return statement. */
900 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
902 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
903 fallthru = false;
905 /* Some calls are known not to return. */
906 else
907 fallthru = !gimple_call_noreturn_p (last);
908 break;
910 case GIMPLE_ASSIGN:
911 /* A GIMPLE_ASSIGN may throw internally and thus be considered
912 control-altering. */
913 if (is_ctrl_altering_stmt (last))
914 make_eh_edges (last);
915 fallthru = true;
916 break;
918 case GIMPLE_ASM:
919 make_gimple_asm_edges (bb);
920 fallthru = true;
921 break;
923 CASE_GIMPLE_OMP:
924 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
925 break;
927 case GIMPLE_TRANSACTION:
929 gtransaction *txn = as_a <gtransaction *> (last);
930 tree label1 = gimple_transaction_label_norm (txn);
931 tree label2 = gimple_transaction_label_uninst (txn);
933 if (label1)
934 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
935 if (label2)
936 make_edge (bb, label_to_block (cfun, label2),
937 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
939 tree label3 = gimple_transaction_label_over (txn);
940 if (gimple_transaction_subcode (txn)
941 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
942 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
944 fallthru = false;
946 break;
948 default:
949 gcc_assert (!stmt_ends_bb_p (last));
950 fallthru = true;
951 break;
954 if (fallthru)
955 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
957 return ret;
960 /* Join all the blocks in the flowgraph. */
962 static void
963 make_edges (void)
965 basic_block bb;
966 struct omp_region *cur_region = NULL;
967 auto_vec<basic_block> ab_edge_goto;
968 auto_vec<basic_block> ab_edge_call;
969 int *bb_to_omp_idx = NULL;
970 int cur_omp_region_idx = 0;
972 /* Create an edge from entry to the first block with executable
973 statements in it. */
974 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
975 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
976 EDGE_FALLTHRU);
978 /* Traverse the basic block array placing edges. */
979 FOR_EACH_BB_FN (bb, cfun)
981 int mer;
983 if (bb_to_omp_idx)
984 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
986 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
987 if (mer == 1)
988 ab_edge_goto.safe_push (bb);
989 else if (mer == 2)
990 ab_edge_call.safe_push (bb);
992 if (cur_region && bb_to_omp_idx == NULL)
993 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
996 /* Computed gotos are hell to deal with, especially if there are
997 lots of them with a large number of destinations. So we factor
998 them to a common computed goto location before we build the
999 edge list. After we convert back to normal form, we will un-factor
1000 the computed gotos since factoring introduces an unwanted jump.
1001 For non-local gotos and abnormal edges from calls to calls that return
1002 twice or forced labels, factor the abnormal edges too, by having all
1003 abnormal edges from the calls go to a common artificial basic block
1004 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005 basic block to all forced labels and calls returning twice.
1006 We do this per-OpenMP structured block, because those regions
1007 are guaranteed to be single entry single exit by the standard,
1008 so it is not allowed to enter or exit such regions abnormally this way,
1009 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010 must not transfer control across SESE region boundaries. */
1011 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1013 gimple_stmt_iterator gsi;
1014 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1015 basic_block *dispatcher_bbs = dispatcher_bb_array;
1016 int count = n_basic_blocks_for_fn (cfun);
1018 if (bb_to_omp_idx)
1019 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1021 FOR_EACH_BB_FN (bb, cfun)
1023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1025 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1026 tree target;
1028 if (!label_stmt)
1029 break;
1031 target = gimple_label_label (label_stmt);
1033 /* Make an edge to every label block that has been marked as a
1034 potential target for a computed goto or a non-local goto. */
1035 if (FORCED_LABEL (target))
1036 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1037 &ab_edge_goto, true);
1038 if (DECL_NONLOCAL (target))
1040 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1041 &ab_edge_call, false);
1042 break;
1046 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1047 gsi_next_nondebug (&gsi);
1048 if (!gsi_end_p (gsi))
1050 /* Make an edge to every setjmp-like call. */
1051 gimple *call_stmt = gsi_stmt (gsi);
1052 if (is_gimple_call (call_stmt)
1053 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1054 || gimple_call_builtin_p (call_stmt,
1055 BUILT_IN_SETJMP_RECEIVER)))
1056 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1057 &ab_edge_call, false);
1061 if (bb_to_omp_idx)
1062 XDELETE (dispatcher_bbs);
1065 XDELETE (bb_to_omp_idx);
1067 omp_free_regions ();
1070 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1071 needed. Returns true if new bbs were created.
1072 Note: This is transitional code, and should not be used for new code. We
1073 should be able to get rid of this by rewriting all target va-arg
1074 gimplification hooks to use an interface gimple_build_cond_value as described
1075 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1077 bool
1078 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1080 gimple *stmt = gsi_stmt (*gsi);
1081 basic_block bb = gimple_bb (stmt);
1082 basic_block lastbb, afterbb;
1083 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1084 edge e;
1085 lastbb = make_blocks_1 (seq, bb);
1086 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1087 return false;
1088 e = split_block (bb, stmt);
1089 /* Move e->dest to come after the new basic blocks. */
1090 afterbb = e->dest;
1091 unlink_block (afterbb);
1092 link_block (afterbb, lastbb);
1093 redirect_edge_succ (e, bb->next_bb);
1094 bb = bb->next_bb;
1095 while (bb != afterbb)
1097 struct omp_region *cur_region = NULL;
1098 profile_count cnt = profile_count::zero ();
1099 bool all = true;
1101 int cur_omp_region_idx = 0;
1102 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1103 gcc_assert (!mer && !cur_region);
1104 add_bb_to_loop (bb, afterbb->loop_father);
1106 edge e;
1107 edge_iterator ei;
1108 FOR_EACH_EDGE (e, ei, bb->preds)
1110 if (e->count ().initialized_p ())
1111 cnt += e->count ();
1112 else
1113 all = false;
1115 tree_guess_outgoing_edge_probabilities (bb);
1116 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1117 bb->count = cnt;
1119 bb = bb->next_bb;
1121 return true;
1124 /* Find the next available discriminator value for LOCUS. The
1125 discriminator distinguishes among several basic blocks that
1126 share a common locus, allowing for more accurate sample-based
1127 profiling. */
1129 static int
1130 next_discriminator_for_locus (int line)
1132 struct locus_discrim_map item;
1133 struct locus_discrim_map **slot;
1135 item.location_line = line;
1136 item.discriminator = 0;
1137 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1138 gcc_assert (slot);
1139 if (*slot == HTAB_EMPTY_ENTRY)
1141 *slot = XNEW (struct locus_discrim_map);
1142 gcc_assert (*slot);
1143 (*slot)->location_line = line;
1144 (*slot)->discriminator = 0;
1146 (*slot)->discriminator++;
1147 return (*slot)->discriminator;
1150 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1152 static bool
1153 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1155 expanded_location to;
1157 if (locus1 == locus2)
1158 return true;
1160 to = expand_location (locus2);
1162 if (from->line != to.line)
1163 return false;
1164 if (from->file == to.file)
1165 return true;
1166 return (from->file != NULL
1167 && to.file != NULL
1168 && filename_cmp (from->file, to.file) == 0);
1171 /* Assign discriminators to each basic block. */
1173 static void
1174 assign_discriminators (void)
1176 basic_block bb;
1178 FOR_EACH_BB_FN (bb, cfun)
1180 edge e;
1181 edge_iterator ei;
1182 gimple *last = last_stmt (bb);
1183 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1185 if (locus == UNKNOWN_LOCATION)
1186 continue;
1188 expanded_location locus_e = expand_location (locus);
1190 FOR_EACH_EDGE (e, ei, bb->succs)
1192 gimple *first = first_non_label_stmt (e->dest);
1193 gimple *last = last_stmt (e->dest);
1194 if ((first && same_line_p (locus, &locus_e,
1195 gimple_location (first)))
1196 || (last && same_line_p (locus, &locus_e,
1197 gimple_location (last))))
1199 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1200 bb->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 else
1203 e->dest->discriminator
1204 = next_discriminator_for_locus (locus_e.line);
1210 /* Create the edges for a GIMPLE_COND starting at block BB. */
1212 static void
1213 make_cond_expr_edges (basic_block bb)
1215 gcond *entry = as_a <gcond *> (last_stmt (bb));
1216 gimple *then_stmt, *else_stmt;
1217 basic_block then_bb, else_bb;
1218 tree then_label, else_label;
1219 edge e;
1221 gcc_assert (entry);
1222 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1224 /* Entry basic blocks for each component. */
1225 then_label = gimple_cond_true_label (entry);
1226 else_label = gimple_cond_false_label (entry);
1227 then_bb = label_to_block (cfun, then_label);
1228 else_bb = label_to_block (cfun, else_label);
1229 then_stmt = first_stmt (then_bb);
1230 else_stmt = first_stmt (else_bb);
1232 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1233 e->goto_locus = gimple_location (then_stmt);
1234 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1235 if (e)
1236 e->goto_locus = gimple_location (else_stmt);
1238 /* We do not need the labels anymore. */
1239 gimple_cond_set_true_label (entry, NULL_TREE);
1240 gimple_cond_set_false_label (entry, NULL_TREE);
1244 /* Called for each element in the hash table (P) as we delete the
1245 edge to cases hash table.
1247 Clear all the CASE_CHAINs to prevent problems with copying of
1248 SWITCH_EXPRs and structure sharing rules, then free the hash table
1249 element. */
1251 bool
1252 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1254 tree t, next;
1256 for (t = value; t; t = next)
1258 next = CASE_CHAIN (t);
1259 CASE_CHAIN (t) = NULL;
1262 return true;
1265 /* Start recording information mapping edges to case labels. */
1267 void
1268 start_recording_case_labels (void)
1270 gcc_assert (edge_to_cases == NULL);
1271 edge_to_cases = new hash_map<edge, tree>;
1272 touched_switch_bbs = BITMAP_ALLOC (NULL);
1275 /* Return nonzero if we are recording information for case labels. */
1277 static bool
1278 recording_case_labels_p (void)
1280 return (edge_to_cases != NULL);
1283 /* Stop recording information mapping edges to case labels and
1284 remove any information we have recorded. */
1285 void
1286 end_recording_case_labels (void)
1288 bitmap_iterator bi;
1289 unsigned i;
1290 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1291 delete edge_to_cases;
1292 edge_to_cases = NULL;
1293 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1295 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1296 if (bb)
1298 gimple *stmt = last_stmt (bb);
1299 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1300 group_case_labels_stmt (as_a <gswitch *> (stmt));
1303 BITMAP_FREE (touched_switch_bbs);
1306 /* If we are inside a {start,end}_recording_cases block, then return
1307 a chain of CASE_LABEL_EXPRs from T which reference E.
1309 Otherwise return NULL. */
1311 static tree
1312 get_cases_for_edge (edge e, gswitch *t)
1314 tree *slot;
1315 size_t i, n;
1317 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1318 chains available. Return NULL so the caller can detect this case. */
1319 if (!recording_case_labels_p ())
1320 return NULL;
1322 slot = edge_to_cases->get (e);
1323 if (slot)
1324 return *slot;
1326 /* If we did not find E in the hash table, then this must be the first
1327 time we have been queried for information about E & T. Add all the
1328 elements from T to the hash table then perform the query again. */
1330 n = gimple_switch_num_labels (t);
1331 for (i = 0; i < n; i++)
1333 tree elt = gimple_switch_label (t, i);
1334 tree lab = CASE_LABEL (elt);
1335 basic_block label_bb = label_to_block (cfun, lab);
1336 edge this_edge = find_edge (e->src, label_bb);
1338 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1339 a new chain. */
1340 tree &s = edge_to_cases->get_or_insert (this_edge);
1341 CASE_CHAIN (elt) = s;
1342 s = elt;
1345 return *edge_to_cases->get (e);
1348 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1350 static void
1351 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1353 size_t i, n;
1355 n = gimple_switch_num_labels (entry);
1357 for (i = 0; i < n; ++i)
1359 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1360 make_edge (bb, label_bb, 0);
1365 /* Return the basic block holding label DEST. */
1367 basic_block
1368 label_to_block (struct function *ifun, tree dest)
1370 int uid = LABEL_DECL_UID (dest);
1372 /* We would die hard when faced by an undefined label. Emit a label to
1373 the very first basic block. This will hopefully make even the dataflow
1374 and undefined variable warnings quite right. */
1375 if (seen_error () && uid < 0)
1377 gimple_stmt_iterator gsi =
1378 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1379 gimple *stmt;
1381 stmt = gimple_build_label (dest);
1382 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1383 uid = LABEL_DECL_UID (dest);
1385 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1386 return NULL;
1387 return (*ifun->cfg->x_label_to_block_map)[uid];
1390 /* Create edges for a goto statement at block BB. Returns true
1391 if abnormal edges should be created. */
1393 static bool
1394 make_goto_expr_edges (basic_block bb)
1396 gimple_stmt_iterator last = gsi_last_bb (bb);
1397 gimple *goto_t = gsi_stmt (last);
1399 /* A simple GOTO creates normal edges. */
1400 if (simple_goto_p (goto_t))
1402 tree dest = gimple_goto_dest (goto_t);
1403 basic_block label_bb = label_to_block (cfun, dest);
1404 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1405 e->goto_locus = gimple_location (goto_t);
1406 gsi_remove (&last, true);
1407 return false;
1410 /* A computed GOTO creates abnormal edges. */
1411 return true;
1414 /* Create edges for an asm statement with labels at block BB. */
1416 static void
1417 make_gimple_asm_edges (basic_block bb)
1419 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1420 int i, n = gimple_asm_nlabels (stmt);
1422 for (i = 0; i < n; ++i)
1424 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1425 basic_block label_bb = label_to_block (cfun, label);
1426 make_edge (bb, label_bb, 0);
1430 /*---------------------------------------------------------------------------
1431 Flowgraph analysis
1432 ---------------------------------------------------------------------------*/
1434 /* Cleanup useless labels in basic blocks. This is something we wish
1435 to do early because it allows us to group case labels before creating
1436 the edges for the CFG, and it speeds up block statement iterators in
1437 all passes later on.
1438 We rerun this pass after CFG is created, to get rid of the labels that
1439 are no longer referenced. After then we do not run it any more, since
1440 (almost) no new labels should be created. */
1442 /* A map from basic block index to the leading label of that block. */
1443 struct label_record
1445 /* The label. */
1446 tree label;
1448 /* True if the label is referenced from somewhere. */
1449 bool used;
1452 /* Given LABEL return the first label in the same basic block. */
1454 static tree
1455 main_block_label (tree label, label_record *label_for_bb)
1457 basic_block bb = label_to_block (cfun, label);
1458 tree main_label = label_for_bb[bb->index].label;
1460 /* label_to_block possibly inserted undefined label into the chain. */
1461 if (!main_label)
1463 label_for_bb[bb->index].label = label;
1464 main_label = label;
1467 label_for_bb[bb->index].used = true;
1468 return main_label;
1471 /* Clean up redundant labels within the exception tree. */
1473 static void
1474 cleanup_dead_labels_eh (label_record *label_for_bb)
1476 eh_landing_pad lp;
1477 eh_region r;
1478 tree lab;
1479 int i;
1481 if (cfun->eh == NULL)
1482 return;
1484 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1485 if (lp && lp->post_landing_pad)
1487 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1488 if (lab != lp->post_landing_pad)
1490 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1491 EH_LANDING_PAD_NR (lab) = lp->index;
1495 FOR_ALL_EH_REGION (r)
1496 switch (r->type)
1498 case ERT_CLEANUP:
1499 case ERT_MUST_NOT_THROW:
1500 break;
1502 case ERT_TRY:
1504 eh_catch c;
1505 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1507 lab = c->label;
1508 if (lab)
1509 c->label = main_block_label (lab, label_for_bb);
1512 break;
1514 case ERT_ALLOWED_EXCEPTIONS:
1515 lab = r->u.allowed.label;
1516 if (lab)
1517 r->u.allowed.label = main_block_label (lab, label_for_bb);
1518 break;
1523 /* Cleanup redundant labels. This is a three-step process:
1524 1) Find the leading label for each block.
1525 2) Redirect all references to labels to the leading labels.
1526 3) Cleanup all useless labels. */
1528 void
1529 cleanup_dead_labels (void)
1531 basic_block bb;
1532 label_record *label_for_bb = XCNEWVEC (struct label_record,
1533 last_basic_block_for_fn (cfun));
1535 /* Find a suitable label for each block. We use the first user-defined
1536 label if there is one, or otherwise just the first label we see. */
1537 FOR_EACH_BB_FN (bb, cfun)
1539 gimple_stmt_iterator i;
1541 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1543 tree label;
1544 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1546 if (!label_stmt)
1547 break;
1549 label = gimple_label_label (label_stmt);
1551 /* If we have not yet seen a label for the current block,
1552 remember this one and see if there are more labels. */
1553 if (!label_for_bb[bb->index].label)
1555 label_for_bb[bb->index].label = label;
1556 continue;
1559 /* If we did see a label for the current block already, but it
1560 is an artificially created label, replace it if the current
1561 label is a user defined label. */
1562 if (!DECL_ARTIFICIAL (label)
1563 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1565 label_for_bb[bb->index].label = label;
1566 break;
1571 /* Now redirect all jumps/branches to the selected label.
1572 First do so for each block ending in a control statement. */
1573 FOR_EACH_BB_FN (bb, cfun)
1575 gimple *stmt = last_stmt (bb);
1576 tree label, new_label;
1578 if (!stmt)
1579 continue;
1581 switch (gimple_code (stmt))
1583 case GIMPLE_COND:
1585 gcond *cond_stmt = as_a <gcond *> (stmt);
1586 label = gimple_cond_true_label (cond_stmt);
1587 if (label)
1589 new_label = main_block_label (label, label_for_bb);
1590 if (new_label != label)
1591 gimple_cond_set_true_label (cond_stmt, new_label);
1594 label = gimple_cond_false_label (cond_stmt);
1595 if (label)
1597 new_label = main_block_label (label, label_for_bb);
1598 if (new_label != label)
1599 gimple_cond_set_false_label (cond_stmt, new_label);
1602 break;
1604 case GIMPLE_SWITCH:
1606 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1607 size_t i, n = gimple_switch_num_labels (switch_stmt);
1609 /* Replace all destination labels. */
1610 for (i = 0; i < n; ++i)
1612 tree case_label = gimple_switch_label (switch_stmt, i);
1613 label = CASE_LABEL (case_label);
1614 new_label = main_block_label (label, label_for_bb);
1615 if (new_label != label)
1616 CASE_LABEL (case_label) = new_label;
1618 break;
1621 case GIMPLE_ASM:
1623 gasm *asm_stmt = as_a <gasm *> (stmt);
1624 int i, n = gimple_asm_nlabels (asm_stmt);
1626 for (i = 0; i < n; ++i)
1628 tree cons = gimple_asm_label_op (asm_stmt, i);
1629 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1630 TREE_VALUE (cons) = label;
1632 break;
1635 /* We have to handle gotos until they're removed, and we don't
1636 remove them until after we've created the CFG edges. */
1637 case GIMPLE_GOTO:
1638 if (!computed_goto_p (stmt))
1640 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1641 label = gimple_goto_dest (goto_stmt);
1642 new_label = main_block_label (label, label_for_bb);
1643 if (new_label != label)
1644 gimple_goto_set_dest (goto_stmt, new_label);
1646 break;
1648 case GIMPLE_TRANSACTION:
1650 gtransaction *txn = as_a <gtransaction *> (stmt);
1652 label = gimple_transaction_label_norm (txn);
1653 if (label)
1655 new_label = main_block_label (label, label_for_bb);
1656 if (new_label != label)
1657 gimple_transaction_set_label_norm (txn, new_label);
1660 label = gimple_transaction_label_uninst (txn);
1661 if (label)
1663 new_label = main_block_label (label, label_for_bb);
1664 if (new_label != label)
1665 gimple_transaction_set_label_uninst (txn, new_label);
1668 label = gimple_transaction_label_over (txn);
1669 if (label)
1671 new_label = main_block_label (label, label_for_bb);
1672 if (new_label != label)
1673 gimple_transaction_set_label_over (txn, new_label);
1676 break;
1678 default:
1679 break;
1683 /* Do the same for the exception region tree labels. */
1684 cleanup_dead_labels_eh (label_for_bb);
1686 /* Finally, purge dead labels. All user-defined labels and labels that
1687 can be the target of non-local gotos and labels which have their
1688 address taken are preserved. */
1689 FOR_EACH_BB_FN (bb, cfun)
1691 gimple_stmt_iterator i;
1692 tree label_for_this_bb = label_for_bb[bb->index].label;
1694 if (!label_for_this_bb)
1695 continue;
1697 /* If the main label of the block is unused, we may still remove it. */
1698 if (!label_for_bb[bb->index].used)
1699 label_for_this_bb = NULL;
1701 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1703 tree label;
1704 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1706 if (!label_stmt)
1707 break;
1709 label = gimple_label_label (label_stmt);
1711 if (label == label_for_this_bb
1712 || !DECL_ARTIFICIAL (label)
1713 || DECL_NONLOCAL (label)
1714 || FORCED_LABEL (label))
1715 gsi_next (&i);
1716 else
1717 gsi_remove (&i, true);
1721 free (label_for_bb);
1724 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1725 the ones jumping to the same label.
1726 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1728 bool
1729 group_case_labels_stmt (gswitch *stmt)
1731 int old_size = gimple_switch_num_labels (stmt);
1732 int i, next_index, new_size;
1733 basic_block default_bb = NULL;
1735 default_bb = gimple_switch_default_bb (cfun, stmt);
1737 /* Look for possible opportunities to merge cases. */
1738 new_size = i = 1;
1739 while (i < old_size)
1741 tree base_case, base_high;
1742 basic_block base_bb;
1744 base_case = gimple_switch_label (stmt, i);
1746 gcc_assert (base_case);
1747 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1749 /* Discard cases that have the same destination as the default case or
1750 whose destiniation blocks have already been removed as unreachable. */
1751 if (base_bb == NULL || base_bb == default_bb)
1753 i++;
1754 continue;
1757 base_high = CASE_HIGH (base_case)
1758 ? CASE_HIGH (base_case)
1759 : CASE_LOW (base_case);
1760 next_index = i + 1;
1762 /* Try to merge case labels. Break out when we reach the end
1763 of the label vector or when we cannot merge the next case
1764 label with the current one. */
1765 while (next_index < old_size)
1767 tree merge_case = gimple_switch_label (stmt, next_index);
1768 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1769 wide_int bhp1 = wi::to_wide (base_high) + 1;
1771 /* Merge the cases if they jump to the same place,
1772 and their ranges are consecutive. */
1773 if (merge_bb == base_bb
1774 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1776 base_high = CASE_HIGH (merge_case) ?
1777 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1778 CASE_HIGH (base_case) = base_high;
1779 next_index++;
1781 else
1782 break;
1785 /* Discard cases that have an unreachable destination block. */
1786 if (EDGE_COUNT (base_bb->succs) == 0
1787 && gimple_seq_unreachable_p (bb_seq (base_bb))
1788 /* Don't optimize this if __builtin_unreachable () is the
1789 implicitly added one by the C++ FE too early, before
1790 -Wreturn-type can be diagnosed. We'll optimize it later
1791 during switchconv pass or any other cfg cleanup. */
1792 && (gimple_in_ssa_p (cfun)
1793 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1794 != BUILTINS_LOCATION)))
1796 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1797 if (base_edge != NULL)
1798 remove_edge_and_dominated_blocks (base_edge);
1799 i = next_index;
1800 continue;
1803 if (new_size < i)
1804 gimple_switch_set_label (stmt, new_size,
1805 gimple_switch_label (stmt, i));
1806 i = next_index;
1807 new_size++;
1810 gcc_assert (new_size <= old_size);
1812 if (new_size < old_size)
1813 gimple_switch_set_num_labels (stmt, new_size);
1815 return new_size < old_size;
1818 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1819 and scan the sorted vector of cases. Combine the ones jumping to the
1820 same label. */
1822 bool
1823 group_case_labels (void)
1825 basic_block bb;
1826 bool changed = false;
1828 FOR_EACH_BB_FN (bb, cfun)
1830 gimple *stmt = last_stmt (bb);
1831 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1832 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1835 return changed;
1838 /* Checks whether we can merge block B into block A. */
1840 static bool
1841 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1843 gimple *stmt;
1845 if (!single_succ_p (a))
1846 return false;
1848 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1849 return false;
1851 if (single_succ (a) != b)
1852 return false;
1854 if (!single_pred_p (b))
1855 return false;
1857 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1858 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1859 return false;
1861 /* If A ends by a statement causing exceptions or something similar, we
1862 cannot merge the blocks. */
1863 stmt = last_stmt (a);
1864 if (stmt && stmt_ends_bb_p (stmt))
1865 return false;
1867 /* Do not allow a block with only a non-local label to be merged. */
1868 if (stmt)
1869 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1870 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1871 return false;
1873 /* Examine the labels at the beginning of B. */
1874 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1875 gsi_next (&gsi))
1877 tree lab;
1878 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1879 if (!label_stmt)
1880 break;
1881 lab = gimple_label_label (label_stmt);
1883 /* Do not remove user forced labels or for -O0 any user labels. */
1884 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1885 return false;
1888 /* Protect simple loop latches. We only want to avoid merging
1889 the latch with the loop header or with a block in another
1890 loop in this case. */
1891 if (current_loops
1892 && b->loop_father->latch == b
1893 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1894 && (b->loop_father->header == a
1895 || b->loop_father != a->loop_father))
1896 return false;
1898 /* It must be possible to eliminate all phi nodes in B. If ssa form
1899 is not up-to-date and a name-mapping is registered, we cannot eliminate
1900 any phis. Symbols marked for renaming are never a problem though. */
1901 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1902 gsi_next (&gsi))
1904 gphi *phi = gsi.phi ();
1905 /* Technically only new names matter. */
1906 if (name_registered_for_update_p (PHI_RESULT (phi)))
1907 return false;
1910 /* When not optimizing, don't merge if we'd lose goto_locus. */
1911 if (!optimize
1912 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1914 location_t goto_locus = single_succ_edge (a)->goto_locus;
1915 gimple_stmt_iterator prev, next;
1916 prev = gsi_last_nondebug_bb (a);
1917 next = gsi_after_labels (b);
1918 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1919 gsi_next_nondebug (&next);
1920 if ((gsi_end_p (prev)
1921 || gimple_location (gsi_stmt (prev)) != goto_locus)
1922 && (gsi_end_p (next)
1923 || gimple_location (gsi_stmt (next)) != goto_locus))
1924 return false;
1927 return true;
1930 /* Replaces all uses of NAME by VAL. */
1932 void
1933 replace_uses_by (tree name, tree val)
1935 imm_use_iterator imm_iter;
1936 use_operand_p use;
1937 gimple *stmt;
1938 edge e;
1940 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1942 /* Mark the block if we change the last stmt in it. */
1943 if (cfgcleanup_altered_bbs
1944 && stmt_ends_bb_p (stmt))
1945 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1947 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1949 replace_exp (use, val);
1951 if (gimple_code (stmt) == GIMPLE_PHI)
1953 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1954 PHI_ARG_INDEX_FROM_USE (use));
1955 if (e->flags & EDGE_ABNORMAL
1956 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1958 /* This can only occur for virtual operands, since
1959 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1960 would prevent replacement. */
1961 gcc_checking_assert (virtual_operand_p (name));
1962 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1967 if (gimple_code (stmt) != GIMPLE_PHI)
1969 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1970 gimple *orig_stmt = stmt;
1971 size_t i;
1973 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1974 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1975 only change sth from non-invariant to invariant, and only
1976 when propagating constants. */
1977 if (is_gimple_min_invariant (val))
1978 for (i = 0; i < gimple_num_ops (stmt); i++)
1980 tree op = gimple_op (stmt, i);
1981 /* Operands may be empty here. For example, the labels
1982 of a GIMPLE_COND are nulled out following the creation
1983 of the corresponding CFG edges. */
1984 if (op && TREE_CODE (op) == ADDR_EXPR)
1985 recompute_tree_invariant_for_addr_expr (op);
1988 if (fold_stmt (&gsi))
1989 stmt = gsi_stmt (gsi);
1991 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1992 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1994 update_stmt (stmt);
1998 gcc_checking_assert (has_zero_uses (name));
2000 /* Also update the trees stored in loop structures. */
2001 if (current_loops)
2003 class loop *loop;
2005 FOR_EACH_LOOP (loop, 0)
2007 substitute_in_loop_info (loop, name, val);
2012 /* Merge block B into block A. */
2014 static void
2015 gimple_merge_blocks (basic_block a, basic_block b)
2017 gimple_stmt_iterator last, gsi;
2018 gphi_iterator psi;
2020 if (dump_file)
2021 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2023 /* Remove all single-valued PHI nodes from block B of the form
2024 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2025 gsi = gsi_last_bb (a);
2026 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2028 gimple *phi = gsi_stmt (psi);
2029 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2030 gimple *copy;
2031 bool may_replace_uses = (virtual_operand_p (def)
2032 || may_propagate_copy (def, use));
2034 /* In case we maintain loop closed ssa form, do not propagate arguments
2035 of loop exit phi nodes. */
2036 if (current_loops
2037 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2038 && !virtual_operand_p (def)
2039 && TREE_CODE (use) == SSA_NAME
2040 && a->loop_father != b->loop_father)
2041 may_replace_uses = false;
2043 if (!may_replace_uses)
2045 gcc_assert (!virtual_operand_p (def));
2047 /* Note that just emitting the copies is fine -- there is no problem
2048 with ordering of phi nodes. This is because A is the single
2049 predecessor of B, therefore results of the phi nodes cannot
2050 appear as arguments of the phi nodes. */
2051 copy = gimple_build_assign (def, use);
2052 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2053 remove_phi_node (&psi, false);
2055 else
2057 /* If we deal with a PHI for virtual operands, we can simply
2058 propagate these without fussing with folding or updating
2059 the stmt. */
2060 if (virtual_operand_p (def))
2062 imm_use_iterator iter;
2063 use_operand_p use_p;
2064 gimple *stmt;
2066 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2067 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2068 SET_USE (use_p, use);
2070 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2071 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2073 else
2074 replace_uses_by (def, use);
2076 remove_phi_node (&psi, true);
2080 /* Ensure that B follows A. */
2081 move_block_after (b, a);
2083 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2084 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2086 /* Remove labels from B and set gimple_bb to A for other statements. */
2087 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2089 gimple *stmt = gsi_stmt (gsi);
2090 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2092 tree label = gimple_label_label (label_stmt);
2093 int lp_nr;
2095 gsi_remove (&gsi, false);
2097 /* Now that we can thread computed gotos, we might have
2098 a situation where we have a forced label in block B
2099 However, the label at the start of block B might still be
2100 used in other ways (think about the runtime checking for
2101 Fortran assigned gotos). So we cannot just delete the
2102 label. Instead we move the label to the start of block A. */
2103 if (FORCED_LABEL (label))
2105 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2106 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2108 /* Other user labels keep around in a form of a debug stmt. */
2109 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2111 gimple *dbg = gimple_build_debug_bind (label,
2112 integer_zero_node,
2113 stmt);
2114 gimple_debug_bind_reset_value (dbg);
2115 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2118 lp_nr = EH_LANDING_PAD_NR (label);
2119 if (lp_nr)
2121 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2122 lp->post_landing_pad = NULL;
2125 else
2127 gimple_set_bb (stmt, a);
2128 gsi_next (&gsi);
2132 /* When merging two BBs, if their counts are different, the larger count
2133 is selected as the new bb count. This is to handle inconsistent
2134 profiles. */
2135 if (a->loop_father == b->loop_father)
2137 a->count = a->count.merge (b->count);
2140 /* Merge the sequences. */
2141 last = gsi_last_bb (a);
2142 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2143 set_bb_seq (b, NULL);
2145 if (cfgcleanup_altered_bbs)
2146 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2150 /* Return the one of two successors of BB that is not reachable by a
2151 complex edge, if there is one. Else, return BB. We use
2152 this in optimizations that use post-dominators for their heuristics,
2153 to catch the cases in C++ where function calls are involved. */
2155 basic_block
2156 single_noncomplex_succ (basic_block bb)
2158 edge e0, e1;
2159 if (EDGE_COUNT (bb->succs) != 2)
2160 return bb;
2162 e0 = EDGE_SUCC (bb, 0);
2163 e1 = EDGE_SUCC (bb, 1);
2164 if (e0->flags & EDGE_COMPLEX)
2165 return e1->dest;
2166 if (e1->flags & EDGE_COMPLEX)
2167 return e0->dest;
2169 return bb;
2172 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2174 void
2175 notice_special_calls (gcall *call)
2177 int flags = gimple_call_flags (call);
2179 if (flags & ECF_MAY_BE_ALLOCA)
2180 cfun->calls_alloca = true;
2181 if (flags & ECF_RETURNS_TWICE)
2182 cfun->calls_setjmp = true;
2186 /* Clear flags set by notice_special_calls. Used by dead code removal
2187 to update the flags. */
2189 void
2190 clear_special_calls (void)
2192 cfun->calls_alloca = false;
2193 cfun->calls_setjmp = false;
2196 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2198 static void
2199 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2201 /* Since this block is no longer reachable, we can just delete all
2202 of its PHI nodes. */
2203 remove_phi_nodes (bb);
2205 /* Remove edges to BB's successors. */
2206 while (EDGE_COUNT (bb->succs) > 0)
2207 remove_edge (EDGE_SUCC (bb, 0));
2211 /* Remove statements of basic block BB. */
2213 static void
2214 remove_bb (basic_block bb)
2216 gimple_stmt_iterator i;
2218 if (dump_file)
2220 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2221 if (dump_flags & TDF_DETAILS)
2223 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2224 fprintf (dump_file, "\n");
2228 if (current_loops)
2230 class loop *loop = bb->loop_father;
2232 /* If a loop gets removed, clean up the information associated
2233 with it. */
2234 if (loop->latch == bb
2235 || loop->header == bb)
2236 free_numbers_of_iterations_estimates (loop);
2239 /* Remove all the instructions in the block. */
2240 if (bb_seq (bb) != NULL)
2242 /* Walk backwards so as to get a chance to substitute all
2243 released DEFs into debug stmts. See
2244 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2245 details. */
2246 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2248 gimple *stmt = gsi_stmt (i);
2249 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2250 if (label_stmt
2251 && (FORCED_LABEL (gimple_label_label (label_stmt))
2252 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2254 basic_block new_bb;
2255 gimple_stmt_iterator new_gsi;
2257 /* A non-reachable non-local label may still be referenced.
2258 But it no longer needs to carry the extra semantics of
2259 non-locality. */
2260 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2262 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2263 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2266 new_bb = bb->prev_bb;
2267 /* Don't move any labels into ENTRY block. */
2268 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2270 new_bb = single_succ (new_bb);
2271 gcc_assert (new_bb != bb);
2273 new_gsi = gsi_after_labels (new_bb);
2274 gsi_remove (&i, false);
2275 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2277 else
2279 /* Release SSA definitions. */
2280 release_defs (stmt);
2281 gsi_remove (&i, true);
2284 if (gsi_end_p (i))
2285 i = gsi_last_bb (bb);
2286 else
2287 gsi_prev (&i);
2291 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2292 bb->il.gimple.seq = NULL;
2293 bb->il.gimple.phi_nodes = NULL;
2297 /* Given a basic block BB and a value VAL for use in the final statement
2298 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2299 the edge that will be taken out of the block.
2300 If VAL is NULL_TREE, then the current value of the final statement's
2301 predicate or index is used.
2302 If the value does not match a unique edge, NULL is returned. */
2304 edge
2305 find_taken_edge (basic_block bb, tree val)
2307 gimple *stmt;
2309 stmt = last_stmt (bb);
2311 /* Handle ENTRY and EXIT. */
2312 if (!stmt)
2313 return NULL;
2315 if (gimple_code (stmt) == GIMPLE_COND)
2316 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2318 if (gimple_code (stmt) == GIMPLE_SWITCH)
2319 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2321 if (computed_goto_p (stmt))
2323 /* Only optimize if the argument is a label, if the argument is
2324 not a label then we cannot construct a proper CFG.
2326 It may be the case that we only need to allow the LABEL_REF to
2327 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2328 appear inside a LABEL_EXPR just to be safe. */
2329 if (val
2330 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2331 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2332 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2335 /* Otherwise we only know the taken successor edge if it's unique. */
2336 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2339 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2340 statement, determine which of the outgoing edges will be taken out of the
2341 block. Return NULL if either edge may be taken. */
2343 static edge
2344 find_taken_edge_computed_goto (basic_block bb, tree val)
2346 basic_block dest;
2347 edge e = NULL;
2349 dest = label_to_block (cfun, val);
2350 if (dest)
2351 e = find_edge (bb, dest);
2353 /* It's possible for find_edge to return NULL here on invalid code
2354 that abuses the labels-as-values extension (e.g. code that attempts to
2355 jump *between* functions via stored labels-as-values; PR 84136).
2356 If so, then we simply return that NULL for the edge.
2357 We don't currently have a way of detecting such invalid code, so we
2358 can't assert that it was the case when a NULL edge occurs here. */
2360 return e;
2363 /* Given COND_STMT and a constant value VAL for use as the predicate,
2364 determine which of the two edges will be taken out of
2365 the statement's block. Return NULL if either edge may be taken.
2366 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2367 is used. */
2369 static edge
2370 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2372 edge true_edge, false_edge;
2374 if (val == NULL_TREE)
2376 /* Use the current value of the predicate. */
2377 if (gimple_cond_true_p (cond_stmt))
2378 val = integer_one_node;
2379 else if (gimple_cond_false_p (cond_stmt))
2380 val = integer_zero_node;
2381 else
2382 return NULL;
2384 else if (TREE_CODE (val) != INTEGER_CST)
2385 return NULL;
2387 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2388 &true_edge, &false_edge);
2390 return (integer_zerop (val) ? false_edge : true_edge);
2393 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2394 which edge will be taken out of the statement's block. Return NULL if any
2395 edge may be taken.
2396 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2397 is used. */
2399 edge
2400 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2402 basic_block dest_bb;
2403 edge e;
2404 tree taken_case;
2406 if (gimple_switch_num_labels (switch_stmt) == 1)
2407 taken_case = gimple_switch_default_label (switch_stmt);
2408 else
2410 if (val == NULL_TREE)
2411 val = gimple_switch_index (switch_stmt);
2412 if (TREE_CODE (val) != INTEGER_CST)
2413 return NULL;
2414 else
2415 taken_case = find_case_label_for_value (switch_stmt, val);
2417 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2419 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2420 gcc_assert (e);
2421 return e;
2425 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2426 We can make optimal use here of the fact that the case labels are
2427 sorted: We can do a binary search for a case matching VAL. */
2429 tree
2430 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2432 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2433 tree default_case = gimple_switch_default_label (switch_stmt);
2435 for (low = 0, high = n; high - low > 1; )
2437 size_t i = (high + low) / 2;
2438 tree t = gimple_switch_label (switch_stmt, i);
2439 int cmp;
2441 /* Cache the result of comparing CASE_LOW and val. */
2442 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2444 if (cmp > 0)
2445 high = i;
2446 else
2447 low = i;
2449 if (CASE_HIGH (t) == NULL)
2451 /* A singe-valued case label. */
2452 if (cmp == 0)
2453 return t;
2455 else
2457 /* A case range. We can only handle integer ranges. */
2458 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2459 return t;
2463 return default_case;
2467 /* Dump a basic block on stderr. */
2469 void
2470 gimple_debug_bb (basic_block bb)
2472 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2476 /* Dump basic block with index N on stderr. */
2478 basic_block
2479 gimple_debug_bb_n (int n)
2481 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2482 return BASIC_BLOCK_FOR_FN (cfun, n);
2486 /* Dump the CFG on stderr.
2488 FLAGS are the same used by the tree dumping functions
2489 (see TDF_* in dumpfile.h). */
2491 void
2492 gimple_debug_cfg (dump_flags_t flags)
2494 gimple_dump_cfg (stderr, flags);
2498 /* Dump the program showing basic block boundaries on the given FILE.
2500 FLAGS are the same used by the tree dumping functions (see TDF_* in
2501 tree.h). */
2503 void
2504 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2506 if (flags & TDF_DETAILS)
2508 dump_function_header (file, current_function_decl, flags);
2509 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2510 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2511 last_basic_block_for_fn (cfun));
2513 brief_dump_cfg (file, flags);
2514 fprintf (file, "\n");
2517 if (flags & TDF_STATS)
2518 dump_cfg_stats (file);
2520 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2524 /* Dump CFG statistics on FILE. */
2526 void
2527 dump_cfg_stats (FILE *file)
2529 static long max_num_merged_labels = 0;
2530 unsigned long size, total = 0;
2531 long num_edges;
2532 basic_block bb;
2533 const char * const fmt_str = "%-30s%-13s%12s\n";
2534 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2535 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2536 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2537 const char *funcname = current_function_name ();
2539 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2541 fprintf (file, "---------------------------------------------------------\n");
2542 fprintf (file, fmt_str, "", " Number of ", "Memory");
2543 fprintf (file, fmt_str, "", " instances ", "used ");
2544 fprintf (file, "---------------------------------------------------------\n");
2546 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2547 total += size;
2548 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2549 SIZE_AMOUNT (size));
2551 num_edges = 0;
2552 FOR_EACH_BB_FN (bb, cfun)
2553 num_edges += EDGE_COUNT (bb->succs);
2554 size = num_edges * sizeof (class edge_def);
2555 total += size;
2556 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2560 SIZE_AMOUNT (total));
2561 fprintf (file, "---------------------------------------------------------\n");
2562 fprintf (file, "\n");
2564 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2565 max_num_merged_labels = cfg_stats.num_merged_labels;
2567 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2568 cfg_stats.num_merged_labels, max_num_merged_labels);
2570 fprintf (file, "\n");
2574 /* Dump CFG statistics on stderr. Keep extern so that it's always
2575 linked in the final executable. */
2577 DEBUG_FUNCTION void
2578 debug_cfg_stats (void)
2580 dump_cfg_stats (stderr);
2583 /*---------------------------------------------------------------------------
2584 Miscellaneous helpers
2585 ---------------------------------------------------------------------------*/
2587 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2588 flow. Transfers of control flow associated with EH are excluded. */
2590 static bool
2591 call_can_make_abnormal_goto (gimple *t)
2593 /* If the function has no non-local labels, then a call cannot make an
2594 abnormal transfer of control. */
2595 if (!cfun->has_nonlocal_label
2596 && !cfun->calls_setjmp)
2597 return false;
2599 /* Likewise if the call has no side effects. */
2600 if (!gimple_has_side_effects (t))
2601 return false;
2603 /* Likewise if the called function is leaf. */
2604 if (gimple_call_flags (t) & ECF_LEAF)
2605 return false;
2607 return true;
2611 /* Return true if T can make an abnormal transfer of control flow.
2612 Transfers of control flow associated with EH are excluded. */
2614 bool
2615 stmt_can_make_abnormal_goto (gimple *t)
2617 if (computed_goto_p (t))
2618 return true;
2619 if (is_gimple_call (t))
2620 return call_can_make_abnormal_goto (t);
2621 return false;
2625 /* Return true if T represents a stmt that always transfers control. */
2627 bool
2628 is_ctrl_stmt (gimple *t)
2630 switch (gimple_code (t))
2632 case GIMPLE_COND:
2633 case GIMPLE_SWITCH:
2634 case GIMPLE_GOTO:
2635 case GIMPLE_RETURN:
2636 case GIMPLE_RESX:
2637 return true;
2638 default:
2639 return false;
2644 /* Return true if T is a statement that may alter the flow of control
2645 (e.g., a call to a non-returning function). */
2647 bool
2648 is_ctrl_altering_stmt (gimple *t)
2650 gcc_assert (t);
2652 switch (gimple_code (t))
2654 case GIMPLE_CALL:
2655 /* Per stmt call flag indicates whether the call could alter
2656 controlflow. */
2657 if (gimple_call_ctrl_altering_p (t))
2658 return true;
2659 break;
2661 case GIMPLE_EH_DISPATCH:
2662 /* EH_DISPATCH branches to the individual catch handlers at
2663 this level of a try or allowed-exceptions region. It can
2664 fallthru to the next statement as well. */
2665 return true;
2667 case GIMPLE_ASM:
2668 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2669 return true;
2670 break;
2672 CASE_GIMPLE_OMP:
2673 /* OpenMP directives alter control flow. */
2674 return true;
2676 case GIMPLE_TRANSACTION:
2677 /* A transaction start alters control flow. */
2678 return true;
2680 default:
2681 break;
2684 /* If a statement can throw, it alters control flow. */
2685 return stmt_can_throw_internal (cfun, t);
2689 /* Return true if T is a simple local goto. */
2691 bool
2692 simple_goto_p (gimple *t)
2694 return (gimple_code (t) == GIMPLE_GOTO
2695 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2699 /* Return true if STMT should start a new basic block. PREV_STMT is
2700 the statement preceding STMT. It is used when STMT is a label or a
2701 case label. Labels should only start a new basic block if their
2702 previous statement wasn't a label. Otherwise, sequence of labels
2703 would generate unnecessary basic blocks that only contain a single
2704 label. */
2706 static inline bool
2707 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2709 if (stmt == NULL)
2710 return false;
2712 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2713 any nondebug stmts in the block. We don't want to start another
2714 block in this case: the debug stmt will already have started the
2715 one STMT would start if we weren't outputting debug stmts. */
2716 if (prev_stmt && is_gimple_debug (prev_stmt))
2717 return false;
2719 /* Labels start a new basic block only if the preceding statement
2720 wasn't a label of the same type. This prevents the creation of
2721 consecutive blocks that have nothing but a single label. */
2722 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2724 /* Nonlocal and computed GOTO targets always start a new block. */
2725 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2726 || FORCED_LABEL (gimple_label_label (label_stmt)))
2727 return true;
2729 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2731 if (DECL_NONLOCAL (gimple_label_label (plabel))
2732 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2733 return true;
2735 cfg_stats.num_merged_labels++;
2736 return false;
2738 else
2739 return true;
2741 else if (gimple_code (stmt) == GIMPLE_CALL)
2743 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2744 /* setjmp acts similar to a nonlocal GOTO target and thus should
2745 start a new block. */
2746 return true;
2747 if (gimple_call_internal_p (stmt, IFN_PHI)
2748 && prev_stmt
2749 && gimple_code (prev_stmt) != GIMPLE_LABEL
2750 && (gimple_code (prev_stmt) != GIMPLE_CALL
2751 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2752 /* PHI nodes start a new block unless preceeded by a label
2753 or another PHI. */
2754 return true;
2757 return false;
2761 /* Return true if T should end a basic block. */
2763 bool
2764 stmt_ends_bb_p (gimple *t)
2766 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2769 /* Remove block annotations and other data structures. */
2771 void
2772 delete_tree_cfg_annotations (struct function *fn)
2774 vec_free (label_to_block_map_for_fn (fn));
2777 /* Return the virtual phi in BB. */
2779 gphi *
2780 get_virtual_phi (basic_block bb)
2782 for (gphi_iterator gsi = gsi_start_phis (bb);
2783 !gsi_end_p (gsi);
2784 gsi_next (&gsi))
2786 gphi *phi = gsi.phi ();
2788 if (virtual_operand_p (PHI_RESULT (phi)))
2789 return phi;
2792 return NULL;
2795 /* Return the first statement in basic block BB. */
2797 gimple *
2798 first_stmt (basic_block bb)
2800 gimple_stmt_iterator i = gsi_start_bb (bb);
2801 gimple *stmt = NULL;
2803 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2805 gsi_next (&i);
2806 stmt = NULL;
2808 return stmt;
2811 /* Return the first non-label statement in basic block BB. */
2813 static gimple *
2814 first_non_label_stmt (basic_block bb)
2816 gimple_stmt_iterator i = gsi_start_bb (bb);
2817 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2818 gsi_next (&i);
2819 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2822 /* Return the last statement in basic block BB. */
2824 gimple *
2825 last_stmt (basic_block bb)
2827 gimple_stmt_iterator i = gsi_last_bb (bb);
2828 gimple *stmt = NULL;
2830 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2832 gsi_prev (&i);
2833 stmt = NULL;
2835 return stmt;
2838 /* Return the last statement of an otherwise empty block. Return NULL
2839 if the block is totally empty, or if it contains more than one
2840 statement. */
2842 gimple *
2843 last_and_only_stmt (basic_block bb)
2845 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2846 gimple *last, *prev;
2848 if (gsi_end_p (i))
2849 return NULL;
2851 last = gsi_stmt (i);
2852 gsi_prev_nondebug (&i);
2853 if (gsi_end_p (i))
2854 return last;
2856 /* Empty statements should no longer appear in the instruction stream.
2857 Everything that might have appeared before should be deleted by
2858 remove_useless_stmts, and the optimizers should just gsi_remove
2859 instead of smashing with build_empty_stmt.
2861 Thus the only thing that should appear here in a block containing
2862 one executable statement is a label. */
2863 prev = gsi_stmt (i);
2864 if (gimple_code (prev) == GIMPLE_LABEL)
2865 return last;
2866 else
2867 return NULL;
2870 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2872 static void
2873 reinstall_phi_args (edge new_edge, edge old_edge)
2875 edge_var_map *vm;
2876 int i;
2877 gphi_iterator phis;
2879 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2880 if (!v)
2881 return;
2883 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2884 v->iterate (i, &vm) && !gsi_end_p (phis);
2885 i++, gsi_next (&phis))
2887 gphi *phi = phis.phi ();
2888 tree result = redirect_edge_var_map_result (vm);
2889 tree arg = redirect_edge_var_map_def (vm);
2891 gcc_assert (result == gimple_phi_result (phi));
2893 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2896 redirect_edge_var_map_clear (old_edge);
2899 /* Returns the basic block after which the new basic block created
2900 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2901 near its "logical" location. This is of most help to humans looking
2902 at debugging dumps. */
2904 basic_block
2905 split_edge_bb_loc (edge edge_in)
2907 basic_block dest = edge_in->dest;
2908 basic_block dest_prev = dest->prev_bb;
2910 if (dest_prev)
2912 edge e = find_edge (dest_prev, dest);
2913 if (e && !(e->flags & EDGE_COMPLEX))
2914 return edge_in->src;
2916 return dest_prev;
2919 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2920 Abort on abnormal edges. */
2922 static basic_block
2923 gimple_split_edge (edge edge_in)
2925 basic_block new_bb, after_bb, dest;
2926 edge new_edge, e;
2928 /* Abnormal edges cannot be split. */
2929 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2931 dest = edge_in->dest;
2933 after_bb = split_edge_bb_loc (edge_in);
2935 new_bb = create_empty_bb (after_bb);
2936 new_bb->count = edge_in->count ();
2938 e = redirect_edge_and_branch (edge_in, new_bb);
2939 gcc_assert (e == edge_in);
2941 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2942 reinstall_phi_args (new_edge, e);
2944 return new_bb;
2948 /* Verify properties of the address expression T whose base should be
2949 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2951 static bool
2952 verify_address (tree t, bool verify_addressable)
2954 bool old_constant;
2955 bool old_side_effects;
2956 bool new_constant;
2957 bool new_side_effects;
2959 old_constant = TREE_CONSTANT (t);
2960 old_side_effects = TREE_SIDE_EFFECTS (t);
2962 recompute_tree_invariant_for_addr_expr (t);
2963 new_side_effects = TREE_SIDE_EFFECTS (t);
2964 new_constant = TREE_CONSTANT (t);
2966 if (old_constant != new_constant)
2968 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2969 return true;
2971 if (old_side_effects != new_side_effects)
2973 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2974 return true;
2977 tree base = TREE_OPERAND (t, 0);
2978 while (handled_component_p (base))
2979 base = TREE_OPERAND (base, 0);
2981 if (!(VAR_P (base)
2982 || TREE_CODE (base) == PARM_DECL
2983 || TREE_CODE (base) == RESULT_DECL))
2984 return false;
2986 if (DECL_GIMPLE_REG_P (base))
2988 error ("%<DECL_GIMPLE_REG_P%> set on a variable with address taken");
2989 return true;
2992 if (verify_addressable && !TREE_ADDRESSABLE (base))
2994 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
2995 return true;
2998 return false;
3002 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3003 Returns true if there is an error, otherwise false. */
3005 static bool
3006 verify_types_in_gimple_min_lval (tree expr)
3008 tree op;
3010 if (is_gimple_id (expr))
3011 return false;
3013 if (TREE_CODE (expr) != TARGET_MEM_REF
3014 && TREE_CODE (expr) != MEM_REF)
3016 error ("invalid expression for min lvalue");
3017 return true;
3020 /* TARGET_MEM_REFs are strange beasts. */
3021 if (TREE_CODE (expr) == TARGET_MEM_REF)
3022 return false;
3024 op = TREE_OPERAND (expr, 0);
3025 if (!is_gimple_val (op))
3027 error ("invalid operand in indirect reference");
3028 debug_generic_stmt (op);
3029 return true;
3031 /* Memory references now generally can involve a value conversion. */
3033 return false;
3036 /* Verify if EXPR is a valid GIMPLE reference expression. If
3037 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3038 if there is an error, otherwise false. */
3040 static bool
3041 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3043 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3045 if (TREE_CODE (expr) == REALPART_EXPR
3046 || TREE_CODE (expr) == IMAGPART_EXPR
3047 || TREE_CODE (expr) == BIT_FIELD_REF)
3049 tree op = TREE_OPERAND (expr, 0);
3050 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3052 error ("non-scalar %qs", code_name);
3053 return true;
3056 if (TREE_CODE (expr) == BIT_FIELD_REF)
3058 tree t1 = TREE_OPERAND (expr, 1);
3059 tree t2 = TREE_OPERAND (expr, 2);
3060 poly_uint64 size, bitpos;
3061 if (!poly_int_tree_p (t1, &size)
3062 || !poly_int_tree_p (t2, &bitpos)
3063 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3064 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3066 error ("invalid position or size operand to %qs", code_name);
3067 return true;
3069 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3070 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3072 error ("integral result type precision does not match "
3073 "field size of %qs", code_name);
3074 return true;
3076 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3077 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3078 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3079 size))
3081 error ("mode size of non-integral result does not "
3082 "match field size of %qs",
3083 code_name);
3084 return true;
3086 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3087 && !type_has_mode_precision_p (TREE_TYPE (op)))
3089 error ("%qs of non-mode-precision operand", code_name);
3090 return true;
3092 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3093 && maybe_gt (size + bitpos,
3094 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3096 error ("position plus size exceeds size of referenced object in "
3097 "%qs", code_name);
3098 return true;
3102 if ((TREE_CODE (expr) == REALPART_EXPR
3103 || TREE_CODE (expr) == IMAGPART_EXPR)
3104 && !useless_type_conversion_p (TREE_TYPE (expr),
3105 TREE_TYPE (TREE_TYPE (op))))
3107 error ("type mismatch in %qs reference", code_name);
3108 debug_generic_stmt (TREE_TYPE (expr));
3109 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3110 return true;
3112 expr = op;
3115 while (handled_component_p (expr))
3117 code_name = get_tree_code_name (TREE_CODE (expr));
3119 if (TREE_CODE (expr) == REALPART_EXPR
3120 || TREE_CODE (expr) == IMAGPART_EXPR
3121 || TREE_CODE (expr) == BIT_FIELD_REF)
3123 error ("non-top-level %qs", code_name);
3124 return true;
3127 tree op = TREE_OPERAND (expr, 0);
3129 if (TREE_CODE (expr) == ARRAY_REF
3130 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3132 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3133 || (TREE_OPERAND (expr, 2)
3134 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3135 || (TREE_OPERAND (expr, 3)
3136 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3138 error ("invalid operands to %qs", code_name);
3139 debug_generic_stmt (expr);
3140 return true;
3144 /* Verify if the reference array element types are compatible. */
3145 if (TREE_CODE (expr) == ARRAY_REF
3146 && !useless_type_conversion_p (TREE_TYPE (expr),
3147 TREE_TYPE (TREE_TYPE (op))))
3149 error ("type mismatch in %qs", code_name);
3150 debug_generic_stmt (TREE_TYPE (expr));
3151 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3152 return true;
3154 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3155 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3156 TREE_TYPE (TREE_TYPE (op))))
3158 error ("type mismatch in %qs", code_name);
3159 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3160 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3161 return true;
3164 if (TREE_CODE (expr) == COMPONENT_REF)
3166 if (TREE_OPERAND (expr, 2)
3167 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3169 error ("invalid %qs offset operator", code_name);
3170 return true;
3172 if (!useless_type_conversion_p (TREE_TYPE (expr),
3173 TREE_TYPE (TREE_OPERAND (expr, 1))))
3175 error ("type mismatch in %qs", code_name);
3176 debug_generic_stmt (TREE_TYPE (expr));
3177 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3178 return true;
3182 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3184 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3185 that their operand is not an SSA name or an invariant when
3186 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3187 bug). Otherwise there is nothing to verify, gross mismatches at
3188 most invoke undefined behavior. */
3189 if (require_lvalue
3190 && (TREE_CODE (op) == SSA_NAME
3191 || is_gimple_min_invariant (op)))
3193 error ("conversion of %qs on the left hand side of %qs",
3194 get_tree_code_name (TREE_CODE (op)), code_name);
3195 debug_generic_stmt (expr);
3196 return true;
3198 else if (TREE_CODE (op) == SSA_NAME
3199 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3201 error ("conversion of register to a different size in %qs",
3202 code_name);
3203 debug_generic_stmt (expr);
3204 return true;
3206 else if (!handled_component_p (op))
3207 return false;
3210 expr = op;
3213 code_name = get_tree_code_name (TREE_CODE (expr));
3215 if (TREE_CODE (expr) == MEM_REF)
3217 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3218 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3219 && verify_address (TREE_OPERAND (expr, 0), false)))
3221 error ("invalid address operand in %qs", code_name);
3222 debug_generic_stmt (expr);
3223 return true;
3225 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3226 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3228 error ("invalid offset operand in %qs", code_name);
3229 debug_generic_stmt (expr);
3230 return true;
3232 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3233 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3235 error ("invalid clique in %qs", code_name);
3236 debug_generic_stmt (expr);
3237 return true;
3240 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3242 if (!TMR_BASE (expr)
3243 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3244 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3245 && verify_address (TMR_BASE (expr), false)))
3247 error ("invalid address operand in %qs", code_name);
3248 return true;
3250 if (!TMR_OFFSET (expr)
3251 || !poly_int_tree_p (TMR_OFFSET (expr))
3252 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3254 error ("invalid offset operand in %qs", code_name);
3255 debug_generic_stmt (expr);
3256 return true;
3258 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3259 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3261 error ("invalid clique in %qs", code_name);
3262 debug_generic_stmt (expr);
3263 return true;
3266 else if (TREE_CODE (expr) == INDIRECT_REF)
3268 error ("%qs in gimple IL", code_name);
3269 debug_generic_stmt (expr);
3270 return true;
3273 return ((require_lvalue || !is_gimple_min_invariant (expr))
3274 && verify_types_in_gimple_min_lval (expr));
3277 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3278 list of pointer-to types that is trivially convertible to DEST. */
3280 static bool
3281 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3283 tree src;
3285 if (!TYPE_POINTER_TO (src_obj))
3286 return true;
3288 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3289 if (useless_type_conversion_p (dest, src))
3290 return true;
3292 return false;
3295 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3296 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3298 static bool
3299 valid_fixed_convert_types_p (tree type1, tree type2)
3301 return (FIXED_POINT_TYPE_P (type1)
3302 && (INTEGRAL_TYPE_P (type2)
3303 || SCALAR_FLOAT_TYPE_P (type2)
3304 || FIXED_POINT_TYPE_P (type2)));
3307 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3308 is a problem, otherwise false. */
3310 static bool
3311 verify_gimple_call (gcall *stmt)
3313 tree fn = gimple_call_fn (stmt);
3314 tree fntype, fndecl;
3315 unsigned i;
3317 if (gimple_call_internal_p (stmt))
3319 if (fn)
3321 error ("gimple call has two targets");
3322 debug_generic_stmt (fn);
3323 return true;
3326 else
3328 if (!fn)
3330 error ("gimple call has no target");
3331 return true;
3335 if (fn && !is_gimple_call_addr (fn))
3337 error ("invalid function in gimple call");
3338 debug_generic_stmt (fn);
3339 return true;
3342 if (fn
3343 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3344 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3345 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3347 error ("non-function in gimple call");
3348 return true;
3351 fndecl = gimple_call_fndecl (stmt);
3352 if (fndecl
3353 && TREE_CODE (fndecl) == FUNCTION_DECL
3354 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3355 && !DECL_PURE_P (fndecl)
3356 && !TREE_READONLY (fndecl))
3358 error ("invalid pure const state for function");
3359 return true;
3362 tree lhs = gimple_call_lhs (stmt);
3363 if (lhs
3364 && (!is_gimple_lvalue (lhs)
3365 || verify_types_in_gimple_reference (lhs, true)))
3367 error ("invalid LHS in gimple call");
3368 return true;
3371 if (gimple_call_ctrl_altering_p (stmt)
3372 && gimple_call_noreturn_p (stmt)
3373 && should_remove_lhs_p (lhs))
3375 error ("LHS in %<noreturn%> call");
3376 return true;
3379 fntype = gimple_call_fntype (stmt);
3380 if (fntype
3381 && lhs
3382 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3383 /* ??? At least C++ misses conversions at assignments from
3384 void * call results.
3385 For now simply allow arbitrary pointer type conversions. */
3386 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3387 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3389 error ("invalid conversion in gimple call");
3390 debug_generic_stmt (TREE_TYPE (lhs));
3391 debug_generic_stmt (TREE_TYPE (fntype));
3392 return true;
3395 if (gimple_call_chain (stmt)
3396 && !is_gimple_val (gimple_call_chain (stmt)))
3398 error ("invalid static chain in gimple call");
3399 debug_generic_stmt (gimple_call_chain (stmt));
3400 return true;
3403 /* If there is a static chain argument, the call should either be
3404 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3405 if (gimple_call_chain (stmt)
3406 && fndecl
3407 && !DECL_STATIC_CHAIN (fndecl))
3409 error ("static chain with function that doesn%'t use one");
3410 return true;
3413 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3415 switch (DECL_FUNCTION_CODE (fndecl))
3417 case BUILT_IN_UNREACHABLE:
3418 case BUILT_IN_TRAP:
3419 if (gimple_call_num_args (stmt) > 0)
3421 /* Built-in unreachable with parameters might not be caught by
3422 undefined behavior sanitizer. Front-ends do check users do not
3423 call them that way but we also produce calls to
3424 __builtin_unreachable internally, for example when IPA figures
3425 out a call cannot happen in a legal program. In such cases,
3426 we must make sure arguments are stripped off. */
3427 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3428 "with arguments");
3429 return true;
3431 break;
3432 default:
3433 break;
3437 /* ??? The C frontend passes unpromoted arguments in case it
3438 didn't see a function declaration before the call. So for now
3439 leave the call arguments mostly unverified. Once we gimplify
3440 unit-at-a-time we have a chance to fix this. */
3442 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3444 tree arg = gimple_call_arg (stmt, i);
3445 if ((is_gimple_reg_type (TREE_TYPE (arg))
3446 && !is_gimple_val (arg))
3447 || (!is_gimple_reg_type (TREE_TYPE (arg))
3448 && !is_gimple_lvalue (arg)))
3450 error ("invalid argument to gimple call");
3451 debug_generic_expr (arg);
3452 return true;
3456 return false;
3459 /* Verifies the gimple comparison with the result type TYPE and
3460 the operands OP0 and OP1, comparison code is CODE. */
3462 static bool
3463 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3465 tree op0_type = TREE_TYPE (op0);
3466 tree op1_type = TREE_TYPE (op1);
3468 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3470 error ("invalid operands in gimple comparison");
3471 return true;
3474 /* For comparisons we do not have the operations type as the
3475 effective type the comparison is carried out in. Instead
3476 we require that either the first operand is trivially
3477 convertible into the second, or the other way around.
3478 Because we special-case pointers to void we allow
3479 comparisons of pointers with the same mode as well. */
3480 if (!useless_type_conversion_p (op0_type, op1_type)
3481 && !useless_type_conversion_p (op1_type, op0_type)
3482 && (!POINTER_TYPE_P (op0_type)
3483 || !POINTER_TYPE_P (op1_type)
3484 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3486 error ("mismatching comparison operand types");
3487 debug_generic_expr (op0_type);
3488 debug_generic_expr (op1_type);
3489 return true;
3492 /* The resulting type of a comparison may be an effective boolean type. */
3493 if (INTEGRAL_TYPE_P (type)
3494 && (TREE_CODE (type) == BOOLEAN_TYPE
3495 || TYPE_PRECISION (type) == 1))
3497 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3498 || TREE_CODE (op1_type) == VECTOR_TYPE)
3499 && code != EQ_EXPR && code != NE_EXPR
3500 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3501 && !VECTOR_INTEGER_TYPE_P (op0_type))
3503 error ("unsupported operation or type for vector comparison"
3504 " returning a boolean");
3505 debug_generic_expr (op0_type);
3506 debug_generic_expr (op1_type);
3507 return true;
3510 /* Or a boolean vector type with the same element count
3511 as the comparison operand types. */
3512 else if (TREE_CODE (type) == VECTOR_TYPE
3513 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3515 if (TREE_CODE (op0_type) != VECTOR_TYPE
3516 || TREE_CODE (op1_type) != VECTOR_TYPE)
3518 error ("non-vector operands in vector comparison");
3519 debug_generic_expr (op0_type);
3520 debug_generic_expr (op1_type);
3521 return true;
3524 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3525 TYPE_VECTOR_SUBPARTS (op0_type)))
3527 error ("invalid vector comparison resulting type");
3528 debug_generic_expr (type);
3529 return true;
3532 else
3534 error ("bogus comparison result type");
3535 debug_generic_expr (type);
3536 return true;
3539 return false;
3542 /* Verify a gimple assignment statement STMT with an unary rhs.
3543 Returns true if anything is wrong. */
3545 static bool
3546 verify_gimple_assign_unary (gassign *stmt)
3548 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3549 tree lhs = gimple_assign_lhs (stmt);
3550 tree lhs_type = TREE_TYPE (lhs);
3551 tree rhs1 = gimple_assign_rhs1 (stmt);
3552 tree rhs1_type = TREE_TYPE (rhs1);
3554 if (!is_gimple_reg (lhs))
3556 error ("non-register as LHS of unary operation");
3557 return true;
3560 if (!is_gimple_val (rhs1))
3562 error ("invalid operand in unary operation");
3563 return true;
3566 const char* const code_name = get_tree_code_name (rhs_code);
3568 /* First handle conversions. */
3569 switch (rhs_code)
3571 CASE_CONVERT:
3573 /* Allow conversions between vectors with the same number of elements,
3574 provided that the conversion is OK for the element types too. */
3575 if (VECTOR_TYPE_P (lhs_type)
3576 && VECTOR_TYPE_P (rhs1_type)
3577 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3578 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3580 lhs_type = TREE_TYPE (lhs_type);
3581 rhs1_type = TREE_TYPE (rhs1_type);
3583 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3585 error ("invalid vector types in nop conversion");
3586 debug_generic_expr (lhs_type);
3587 debug_generic_expr (rhs1_type);
3588 return true;
3591 /* Allow conversions from pointer type to integral type only if
3592 there is no sign or zero extension involved.
3593 For targets were the precision of ptrofftype doesn't match that
3594 of pointers we need to allow arbitrary conversions to ptrofftype. */
3595 if ((POINTER_TYPE_P (lhs_type)
3596 && INTEGRAL_TYPE_P (rhs1_type))
3597 || (POINTER_TYPE_P (rhs1_type)
3598 && INTEGRAL_TYPE_P (lhs_type)
3599 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3600 || ptrofftype_p (lhs_type))))
3601 return false;
3603 /* Allow conversion from integral to offset type and vice versa. */
3604 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3605 && INTEGRAL_TYPE_P (rhs1_type))
3606 || (INTEGRAL_TYPE_P (lhs_type)
3607 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3608 return false;
3610 /* Otherwise assert we are converting between types of the
3611 same kind. */
3612 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3614 error ("invalid types in nop conversion");
3615 debug_generic_expr (lhs_type);
3616 debug_generic_expr (rhs1_type);
3617 return true;
3620 return false;
3623 case ADDR_SPACE_CONVERT_EXPR:
3625 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3626 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3627 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3629 error ("invalid types in address space conversion");
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 return true;
3635 return false;
3638 case FIXED_CONVERT_EXPR:
3640 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3641 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3643 error ("invalid types in fixed-point conversion");
3644 debug_generic_expr (lhs_type);
3645 debug_generic_expr (rhs1_type);
3646 return true;
3649 return false;
3652 case FLOAT_EXPR:
3654 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3655 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3656 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3658 error ("invalid types in conversion to floating-point");
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 return true;
3664 return false;
3667 case FIX_TRUNC_EXPR:
3669 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3670 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3671 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3673 error ("invalid types in conversion to integer");
3674 debug_generic_expr (lhs_type);
3675 debug_generic_expr (rhs1_type);
3676 return true;
3679 return false;
3682 case VEC_UNPACK_HI_EXPR:
3683 case VEC_UNPACK_LO_EXPR:
3684 case VEC_UNPACK_FLOAT_HI_EXPR:
3685 case VEC_UNPACK_FLOAT_LO_EXPR:
3686 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3687 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3688 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3689 || TREE_CODE (lhs_type) != VECTOR_TYPE
3690 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3691 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3692 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3693 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3694 || ((rhs_code == VEC_UNPACK_HI_EXPR
3695 || rhs_code == VEC_UNPACK_LO_EXPR)
3696 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3697 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3698 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3699 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3700 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3701 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3702 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3703 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3704 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3705 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3706 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3707 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3708 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3709 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3710 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3711 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3713 error ("type mismatch in %qs expression", code_name);
3714 debug_generic_expr (lhs_type);
3715 debug_generic_expr (rhs1_type);
3716 return true;
3719 return false;
3721 case NEGATE_EXPR:
3722 case ABS_EXPR:
3723 case BIT_NOT_EXPR:
3724 case PAREN_EXPR:
3725 case CONJ_EXPR:
3726 break;
3728 case ABSU_EXPR:
3729 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3730 || !TYPE_UNSIGNED (lhs_type)
3731 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3732 || TYPE_UNSIGNED (rhs1_type)
3733 || element_precision (lhs_type) != element_precision (rhs1_type))
3735 error ("invalid types for %qs", code_name);
3736 debug_generic_expr (lhs_type);
3737 debug_generic_expr (rhs1_type);
3738 return true;
3740 return false;
3742 case VEC_DUPLICATE_EXPR:
3743 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3744 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3746 error ("%qs should be from a scalar to a like vector", code_name);
3747 debug_generic_expr (lhs_type);
3748 debug_generic_expr (rhs1_type);
3749 return true;
3751 return false;
3753 default:
3754 gcc_unreachable ();
3757 /* For the remaining codes assert there is no conversion involved. */
3758 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3760 error ("non-trivial conversion in unary operation");
3761 debug_generic_expr (lhs_type);
3762 debug_generic_expr (rhs1_type);
3763 return true;
3766 return false;
3769 /* Verify a gimple assignment statement STMT with a binary rhs.
3770 Returns true if anything is wrong. */
3772 static bool
3773 verify_gimple_assign_binary (gassign *stmt)
3775 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3776 tree lhs = gimple_assign_lhs (stmt);
3777 tree lhs_type = TREE_TYPE (lhs);
3778 tree rhs1 = gimple_assign_rhs1 (stmt);
3779 tree rhs1_type = TREE_TYPE (rhs1);
3780 tree rhs2 = gimple_assign_rhs2 (stmt);
3781 tree rhs2_type = TREE_TYPE (rhs2);
3783 if (!is_gimple_reg (lhs))
3785 error ("non-register as LHS of binary operation");
3786 return true;
3789 if (!is_gimple_val (rhs1)
3790 || !is_gimple_val (rhs2))
3792 error ("invalid operands in binary operation");
3793 return true;
3796 const char* const code_name = get_tree_code_name (rhs_code);
3798 /* First handle operations that involve different types. */
3799 switch (rhs_code)
3801 case COMPLEX_EXPR:
3803 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3804 || !(INTEGRAL_TYPE_P (rhs1_type)
3805 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3806 || !(INTEGRAL_TYPE_P (rhs2_type)
3807 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3809 error ("type mismatch in %qs", code_name);
3810 debug_generic_expr (lhs_type);
3811 debug_generic_expr (rhs1_type);
3812 debug_generic_expr (rhs2_type);
3813 return true;
3816 return false;
3819 case LSHIFT_EXPR:
3820 case RSHIFT_EXPR:
3821 case LROTATE_EXPR:
3822 case RROTATE_EXPR:
3824 /* Shifts and rotates are ok on integral types, fixed point
3825 types and integer vector types. */
3826 if ((!INTEGRAL_TYPE_P (rhs1_type)
3827 && !FIXED_POINT_TYPE_P (rhs1_type)
3828 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3829 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3830 || (!INTEGRAL_TYPE_P (rhs2_type)
3831 /* Vector shifts of vectors are also ok. */
3832 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3833 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3834 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3835 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3836 || !useless_type_conversion_p (lhs_type, rhs1_type))
3838 error ("type mismatch in %qs", code_name);
3839 debug_generic_expr (lhs_type);
3840 debug_generic_expr (rhs1_type);
3841 debug_generic_expr (rhs2_type);
3842 return true;
3845 return false;
3848 case WIDEN_LSHIFT_EXPR:
3850 if (!INTEGRAL_TYPE_P (lhs_type)
3851 || !INTEGRAL_TYPE_P (rhs1_type)
3852 || TREE_CODE (rhs2) != INTEGER_CST
3853 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3855 error ("type mismatch in %qs", code_name);
3856 debug_generic_expr (lhs_type);
3857 debug_generic_expr (rhs1_type);
3858 debug_generic_expr (rhs2_type);
3859 return true;
3862 return false;
3865 case VEC_WIDEN_LSHIFT_HI_EXPR:
3866 case VEC_WIDEN_LSHIFT_LO_EXPR:
3868 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3869 || TREE_CODE (lhs_type) != VECTOR_TYPE
3870 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3871 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3872 || TREE_CODE (rhs2) != INTEGER_CST
3873 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3874 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3876 error ("type mismatch in %qs", code_name);
3877 debug_generic_expr (lhs_type);
3878 debug_generic_expr (rhs1_type);
3879 debug_generic_expr (rhs2_type);
3880 return true;
3883 return false;
3886 case PLUS_EXPR:
3887 case MINUS_EXPR:
3889 tree lhs_etype = lhs_type;
3890 tree rhs1_etype = rhs1_type;
3891 tree rhs2_etype = rhs2_type;
3892 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3894 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3895 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3897 error ("invalid non-vector operands to %qs", code_name);
3898 return true;
3900 lhs_etype = TREE_TYPE (lhs_type);
3901 rhs1_etype = TREE_TYPE (rhs1_type);
3902 rhs2_etype = TREE_TYPE (rhs2_type);
3904 if (POINTER_TYPE_P (lhs_etype)
3905 || POINTER_TYPE_P (rhs1_etype)
3906 || POINTER_TYPE_P (rhs2_etype))
3908 error ("invalid (pointer) operands %qs", code_name);
3909 return true;
3912 /* Continue with generic binary expression handling. */
3913 break;
3916 case POINTER_PLUS_EXPR:
3918 if (!POINTER_TYPE_P (rhs1_type)
3919 || !useless_type_conversion_p (lhs_type, rhs1_type)
3920 || !ptrofftype_p (rhs2_type))
3922 error ("type mismatch in %qs", code_name);
3923 debug_generic_stmt (lhs_type);
3924 debug_generic_stmt (rhs1_type);
3925 debug_generic_stmt (rhs2_type);
3926 return true;
3929 return false;
3932 case POINTER_DIFF_EXPR:
3934 if (!POINTER_TYPE_P (rhs1_type)
3935 || !POINTER_TYPE_P (rhs2_type)
3936 /* Because we special-case pointers to void we allow difference
3937 of arbitrary pointers with the same mode. */
3938 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3939 || TREE_CODE (lhs_type) != INTEGER_TYPE
3940 || TYPE_UNSIGNED (lhs_type)
3941 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3943 error ("type mismatch in %qs", code_name);
3944 debug_generic_stmt (lhs_type);
3945 debug_generic_stmt (rhs1_type);
3946 debug_generic_stmt (rhs2_type);
3947 return true;
3950 return false;
3953 case TRUTH_ANDIF_EXPR:
3954 case TRUTH_ORIF_EXPR:
3955 case TRUTH_AND_EXPR:
3956 case TRUTH_OR_EXPR:
3957 case TRUTH_XOR_EXPR:
3959 gcc_unreachable ();
3961 case LT_EXPR:
3962 case LE_EXPR:
3963 case GT_EXPR:
3964 case GE_EXPR:
3965 case EQ_EXPR:
3966 case NE_EXPR:
3967 case UNORDERED_EXPR:
3968 case ORDERED_EXPR:
3969 case UNLT_EXPR:
3970 case UNLE_EXPR:
3971 case UNGT_EXPR:
3972 case UNGE_EXPR:
3973 case UNEQ_EXPR:
3974 case LTGT_EXPR:
3975 /* Comparisons are also binary, but the result type is not
3976 connected to the operand types. */
3977 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3979 case WIDEN_MULT_EXPR:
3980 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3981 return true;
3982 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3983 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3985 case WIDEN_SUM_EXPR:
3987 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3988 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3989 && ((!INTEGRAL_TYPE_P (rhs1_type)
3990 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3991 || (!INTEGRAL_TYPE_P (lhs_type)
3992 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3993 || !useless_type_conversion_p (lhs_type, rhs2_type)
3994 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3995 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3997 error ("type mismatch in %qs", code_name);
3998 debug_generic_expr (lhs_type);
3999 debug_generic_expr (rhs1_type);
4000 debug_generic_expr (rhs2_type);
4001 return true;
4003 return false;
4006 case VEC_WIDEN_MULT_HI_EXPR:
4007 case VEC_WIDEN_MULT_LO_EXPR:
4008 case VEC_WIDEN_MULT_EVEN_EXPR:
4009 case VEC_WIDEN_MULT_ODD_EXPR:
4011 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4012 || TREE_CODE (lhs_type) != VECTOR_TYPE
4013 || !types_compatible_p (rhs1_type, rhs2_type)
4014 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4015 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4017 error ("type mismatch in %qs", code_name);
4018 debug_generic_expr (lhs_type);
4019 debug_generic_expr (rhs1_type);
4020 debug_generic_expr (rhs2_type);
4021 return true;
4023 return false;
4026 case VEC_PACK_TRUNC_EXPR:
4027 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4028 vector boolean types. */
4029 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4030 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4031 && types_compatible_p (rhs1_type, rhs2_type)
4032 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4033 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4034 return false;
4036 /* Fallthru. */
4037 case VEC_PACK_SAT_EXPR:
4038 case VEC_PACK_FIX_TRUNC_EXPR:
4040 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4041 || TREE_CODE (lhs_type) != VECTOR_TYPE
4042 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4043 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4044 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4045 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4046 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4047 || !types_compatible_p (rhs1_type, rhs2_type)
4048 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4049 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4050 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4051 TYPE_VECTOR_SUBPARTS (lhs_type)))
4053 error ("type mismatch in %qs", code_name);
4054 debug_generic_expr (lhs_type);
4055 debug_generic_expr (rhs1_type);
4056 debug_generic_expr (rhs2_type);
4057 return true;
4060 return false;
4063 case VEC_PACK_FLOAT_EXPR:
4064 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4065 || TREE_CODE (lhs_type) != VECTOR_TYPE
4066 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4067 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4068 || !types_compatible_p (rhs1_type, rhs2_type)
4069 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4070 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4071 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4072 TYPE_VECTOR_SUBPARTS (lhs_type)))
4074 error ("type mismatch in %qs", code_name);
4075 debug_generic_expr (lhs_type);
4076 debug_generic_expr (rhs1_type);
4077 debug_generic_expr (rhs2_type);
4078 return true;
4081 return false;
4083 case MULT_EXPR:
4084 case MULT_HIGHPART_EXPR:
4085 case TRUNC_DIV_EXPR:
4086 case CEIL_DIV_EXPR:
4087 case FLOOR_DIV_EXPR:
4088 case ROUND_DIV_EXPR:
4089 case TRUNC_MOD_EXPR:
4090 case CEIL_MOD_EXPR:
4091 case FLOOR_MOD_EXPR:
4092 case ROUND_MOD_EXPR:
4093 case RDIV_EXPR:
4094 case EXACT_DIV_EXPR:
4095 case MIN_EXPR:
4096 case MAX_EXPR:
4097 case BIT_IOR_EXPR:
4098 case BIT_XOR_EXPR:
4099 case BIT_AND_EXPR:
4100 /* Continue with generic binary expression handling. */
4101 break;
4103 case VEC_SERIES_EXPR:
4104 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4106 error ("type mismatch in %qs", code_name);
4107 debug_generic_expr (rhs1_type);
4108 debug_generic_expr (rhs2_type);
4109 return true;
4111 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4112 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4114 error ("vector type expected in %qs", code_name);
4115 debug_generic_expr (lhs_type);
4116 return true;
4118 return false;
4120 default:
4121 gcc_unreachable ();
4124 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4125 || !useless_type_conversion_p (lhs_type, rhs2_type))
4127 error ("type mismatch in binary expression");
4128 debug_generic_stmt (lhs_type);
4129 debug_generic_stmt (rhs1_type);
4130 debug_generic_stmt (rhs2_type);
4131 return true;
4134 return false;
4137 /* Verify a gimple assignment statement STMT with a ternary rhs.
4138 Returns true if anything is wrong. */
4140 static bool
4141 verify_gimple_assign_ternary (gassign *stmt)
4143 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4144 tree lhs = gimple_assign_lhs (stmt);
4145 tree lhs_type = TREE_TYPE (lhs);
4146 tree rhs1 = gimple_assign_rhs1 (stmt);
4147 tree rhs1_type = TREE_TYPE (rhs1);
4148 tree rhs2 = gimple_assign_rhs2 (stmt);
4149 tree rhs2_type = TREE_TYPE (rhs2);
4150 tree rhs3 = gimple_assign_rhs3 (stmt);
4151 tree rhs3_type = TREE_TYPE (rhs3);
4153 if (!is_gimple_reg (lhs))
4155 error ("non-register as LHS of ternary operation");
4156 return true;
4159 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4160 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4161 || !is_gimple_val (rhs2)
4162 || !is_gimple_val (rhs3))
4164 error ("invalid operands in ternary operation");
4165 return true;
4168 const char* const code_name = get_tree_code_name (rhs_code);
4170 /* First handle operations that involve different types. */
4171 switch (rhs_code)
4173 case WIDEN_MULT_PLUS_EXPR:
4174 case WIDEN_MULT_MINUS_EXPR:
4175 if ((!INTEGRAL_TYPE_P (rhs1_type)
4176 && !FIXED_POINT_TYPE_P (rhs1_type))
4177 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4178 || !useless_type_conversion_p (lhs_type, rhs3_type)
4179 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4180 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4182 error ("type mismatch in %qs", code_name);
4183 debug_generic_expr (lhs_type);
4184 debug_generic_expr (rhs1_type);
4185 debug_generic_expr (rhs2_type);
4186 debug_generic_expr (rhs3_type);
4187 return true;
4189 break;
4191 case VEC_COND_EXPR:
4192 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4193 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4194 TYPE_VECTOR_SUBPARTS (lhs_type)))
4196 error ("the first argument of a %qs must be of a "
4197 "boolean vector type of the same number of elements "
4198 "as the result", code_name);
4199 debug_generic_expr (lhs_type);
4200 debug_generic_expr (rhs1_type);
4201 return true;
4203 /* Fallthrough. */
4204 case COND_EXPR:
4205 if (!is_gimple_val (rhs1)
4206 && verify_gimple_comparison (TREE_TYPE (rhs1),
4207 TREE_OPERAND (rhs1, 0),
4208 TREE_OPERAND (rhs1, 1),
4209 TREE_CODE (rhs1)))
4210 return true;
4211 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4212 || !useless_type_conversion_p (lhs_type, rhs3_type))
4214 error ("type mismatch in %qs", code_name);
4215 debug_generic_expr (lhs_type);
4216 debug_generic_expr (rhs2_type);
4217 debug_generic_expr (rhs3_type);
4218 return true;
4220 break;
4222 case VEC_PERM_EXPR:
4223 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4224 || !useless_type_conversion_p (lhs_type, rhs2_type))
4226 error ("type mismatch in %qs", code_name);
4227 debug_generic_expr (lhs_type);
4228 debug_generic_expr (rhs1_type);
4229 debug_generic_expr (rhs2_type);
4230 debug_generic_expr (rhs3_type);
4231 return true;
4234 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4235 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4236 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4238 error ("vector types expected in %qs", code_name);
4239 debug_generic_expr (lhs_type);
4240 debug_generic_expr (rhs1_type);
4241 debug_generic_expr (rhs2_type);
4242 debug_generic_expr (rhs3_type);
4243 return true;
4246 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4247 TYPE_VECTOR_SUBPARTS (rhs2_type))
4248 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4249 TYPE_VECTOR_SUBPARTS (rhs3_type))
4250 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4251 TYPE_VECTOR_SUBPARTS (lhs_type)))
4253 error ("vectors with different element number found in %qs",
4254 code_name);
4255 debug_generic_expr (lhs_type);
4256 debug_generic_expr (rhs1_type);
4257 debug_generic_expr (rhs2_type);
4258 debug_generic_expr (rhs3_type);
4259 return true;
4262 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4263 || (TREE_CODE (rhs3) != VECTOR_CST
4264 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4265 (TREE_TYPE (rhs3_type)))
4266 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4267 (TREE_TYPE (rhs1_type))))))
4269 error ("invalid mask type in %qs", code_name);
4270 debug_generic_expr (lhs_type);
4271 debug_generic_expr (rhs1_type);
4272 debug_generic_expr (rhs2_type);
4273 debug_generic_expr (rhs3_type);
4274 return true;
4277 return false;
4279 case SAD_EXPR:
4280 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4281 || !useless_type_conversion_p (lhs_type, rhs3_type)
4282 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4283 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4285 error ("type mismatch in %qs", code_name);
4286 debug_generic_expr (lhs_type);
4287 debug_generic_expr (rhs1_type);
4288 debug_generic_expr (rhs2_type);
4289 debug_generic_expr (rhs3_type);
4290 return true;
4293 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4294 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4295 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4297 error ("vector types expected in %qs", code_name);
4298 debug_generic_expr (lhs_type);
4299 debug_generic_expr (rhs1_type);
4300 debug_generic_expr (rhs2_type);
4301 debug_generic_expr (rhs3_type);
4302 return true;
4305 return false;
4307 case BIT_INSERT_EXPR:
4308 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4310 error ("type mismatch in %qs", code_name);
4311 debug_generic_expr (lhs_type);
4312 debug_generic_expr (rhs1_type);
4313 return true;
4315 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4316 && INTEGRAL_TYPE_P (rhs2_type))
4317 /* Vector element insert. */
4318 || (VECTOR_TYPE_P (rhs1_type)
4319 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4320 /* Aligned sub-vector insert. */
4321 || (VECTOR_TYPE_P (rhs1_type)
4322 && VECTOR_TYPE_P (rhs2_type)
4323 && types_compatible_p (TREE_TYPE (rhs1_type),
4324 TREE_TYPE (rhs2_type))
4325 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4326 TYPE_VECTOR_SUBPARTS (rhs2_type))
4327 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4329 error ("not allowed type combination in %qs", code_name);
4330 debug_generic_expr (rhs1_type);
4331 debug_generic_expr (rhs2_type);
4332 return true;
4334 if (! tree_fits_uhwi_p (rhs3)
4335 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4336 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4338 error ("invalid position or size in %qs", code_name);
4339 return true;
4341 if (INTEGRAL_TYPE_P (rhs1_type)
4342 && !type_has_mode_precision_p (rhs1_type))
4344 error ("%qs into non-mode-precision operand", code_name);
4345 return true;
4347 if (INTEGRAL_TYPE_P (rhs1_type))
4349 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4350 if (bitpos >= TYPE_PRECISION (rhs1_type)
4351 || (bitpos + TYPE_PRECISION (rhs2_type)
4352 > TYPE_PRECISION (rhs1_type)))
4354 error ("insertion out of range in %qs", code_name);
4355 return true;
4358 else if (VECTOR_TYPE_P (rhs1_type))
4360 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4361 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4362 if (bitpos % bitsize != 0)
4364 error ("%qs not at element boundary", code_name);
4365 return true;
4368 return false;
4370 case DOT_PROD_EXPR:
4372 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4373 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4374 && ((!INTEGRAL_TYPE_P (rhs1_type)
4375 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4376 || (!INTEGRAL_TYPE_P (lhs_type)
4377 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4378 || !types_compatible_p (rhs1_type, rhs2_type)
4379 || !useless_type_conversion_p (lhs_type, rhs3_type)
4380 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4381 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4383 error ("type mismatch in %qs", code_name);
4384 debug_generic_expr (lhs_type);
4385 debug_generic_expr (rhs1_type);
4386 debug_generic_expr (rhs2_type);
4387 return true;
4389 return false;
4392 case REALIGN_LOAD_EXPR:
4393 /* FIXME. */
4394 return false;
4396 default:
4397 gcc_unreachable ();
4399 return false;
4402 /* Verify a gimple assignment statement STMT with a single rhs.
4403 Returns true if anything is wrong. */
4405 static bool
4406 verify_gimple_assign_single (gassign *stmt)
4408 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4409 tree lhs = gimple_assign_lhs (stmt);
4410 tree lhs_type = TREE_TYPE (lhs);
4411 tree rhs1 = gimple_assign_rhs1 (stmt);
4412 tree rhs1_type = TREE_TYPE (rhs1);
4413 bool res = false;
4415 const char* const code_name = get_tree_code_name (rhs_code);
4417 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4419 error ("non-trivial conversion in %qs", code_name);
4420 debug_generic_expr (lhs_type);
4421 debug_generic_expr (rhs1_type);
4422 return true;
4425 if (gimple_clobber_p (stmt)
4426 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4428 error ("%qs LHS in clobber statement",
4429 get_tree_code_name (TREE_CODE (lhs)));
4430 debug_generic_expr (lhs);
4431 return true;
4434 if (handled_component_p (lhs)
4435 || TREE_CODE (lhs) == MEM_REF
4436 || TREE_CODE (lhs) == TARGET_MEM_REF)
4437 res |= verify_types_in_gimple_reference (lhs, true);
4439 /* Special codes we cannot handle via their class. */
4440 switch (rhs_code)
4442 case ADDR_EXPR:
4444 tree op = TREE_OPERAND (rhs1, 0);
4445 if (!is_gimple_addressable (op))
4447 error ("invalid operand in %qs", code_name);
4448 return true;
4451 /* Technically there is no longer a need for matching types, but
4452 gimple hygiene asks for this check. In LTO we can end up
4453 combining incompatible units and thus end up with addresses
4454 of globals that change their type to a common one. */
4455 if (!in_lto_p
4456 && !types_compatible_p (TREE_TYPE (op),
4457 TREE_TYPE (TREE_TYPE (rhs1)))
4458 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4459 TREE_TYPE (op)))
4461 error ("type mismatch in %qs", code_name);
4462 debug_generic_stmt (TREE_TYPE (rhs1));
4463 debug_generic_stmt (TREE_TYPE (op));
4464 return true;
4467 return (verify_address (rhs1, true)
4468 || verify_types_in_gimple_reference (op, true));
4471 /* tcc_reference */
4472 case INDIRECT_REF:
4473 error ("%qs in gimple IL", code_name);
4474 return true;
4476 case COMPONENT_REF:
4477 case BIT_FIELD_REF:
4478 case ARRAY_REF:
4479 case ARRAY_RANGE_REF:
4480 case VIEW_CONVERT_EXPR:
4481 case REALPART_EXPR:
4482 case IMAGPART_EXPR:
4483 case TARGET_MEM_REF:
4484 case MEM_REF:
4485 if (!is_gimple_reg (lhs)
4486 && is_gimple_reg_type (TREE_TYPE (lhs)))
4488 error ("invalid RHS for gimple memory store: %qs", code_name);
4489 debug_generic_stmt (lhs);
4490 debug_generic_stmt (rhs1);
4491 return true;
4493 return res || verify_types_in_gimple_reference (rhs1, false);
4495 /* tcc_constant */
4496 case SSA_NAME:
4497 case INTEGER_CST:
4498 case REAL_CST:
4499 case FIXED_CST:
4500 case COMPLEX_CST:
4501 case VECTOR_CST:
4502 case STRING_CST:
4503 return res;
4505 /* tcc_declaration */
4506 case CONST_DECL:
4507 return res;
4508 case VAR_DECL:
4509 case PARM_DECL:
4510 if (!is_gimple_reg (lhs)
4511 && !is_gimple_reg (rhs1)
4512 && is_gimple_reg_type (TREE_TYPE (lhs)))
4514 error ("invalid RHS for gimple memory store: %qs", code_name);
4515 debug_generic_stmt (lhs);
4516 debug_generic_stmt (rhs1);
4517 return true;
4519 return res;
4521 case CONSTRUCTOR:
4522 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4524 unsigned int i;
4525 tree elt_i, elt_v, elt_t = NULL_TREE;
4527 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4528 return res;
4529 /* For vector CONSTRUCTORs we require that either it is empty
4530 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4531 (then the element count must be correct to cover the whole
4532 outer vector and index must be NULL on all elements, or it is
4533 a CONSTRUCTOR of scalar elements, where we as an exception allow
4534 smaller number of elements (assuming zero filling) and
4535 consecutive indexes as compared to NULL indexes (such
4536 CONSTRUCTORs can appear in the IL from FEs). */
4537 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4539 if (elt_t == NULL_TREE)
4541 elt_t = TREE_TYPE (elt_v);
4542 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4544 tree elt_t = TREE_TYPE (elt_v);
4545 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4546 TREE_TYPE (elt_t)))
4548 error ("incorrect type of vector %qs elements",
4549 code_name);
4550 debug_generic_stmt (rhs1);
4551 return true;
4553 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4554 * TYPE_VECTOR_SUBPARTS (elt_t),
4555 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4557 error ("incorrect number of vector %qs elements",
4558 code_name);
4559 debug_generic_stmt (rhs1);
4560 return true;
4563 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4564 elt_t))
4566 error ("incorrect type of vector %qs elements",
4567 code_name);
4568 debug_generic_stmt (rhs1);
4569 return true;
4571 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4572 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4574 error ("incorrect number of vector %qs elements",
4575 code_name);
4576 debug_generic_stmt (rhs1);
4577 return true;
4580 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4582 error ("incorrect type of vector CONSTRUCTOR elements");
4583 debug_generic_stmt (rhs1);
4584 return true;
4586 if (elt_i != NULL_TREE
4587 && (TREE_CODE (elt_t) == VECTOR_TYPE
4588 || TREE_CODE (elt_i) != INTEGER_CST
4589 || compare_tree_int (elt_i, i) != 0))
4591 error ("vector %qs with non-NULL element index",
4592 code_name);
4593 debug_generic_stmt (rhs1);
4594 return true;
4596 if (!is_gimple_val (elt_v))
4598 error ("vector %qs element is not a GIMPLE value",
4599 code_name);
4600 debug_generic_stmt (rhs1);
4601 return true;
4605 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4607 error ("non-vector %qs with elements", code_name);
4608 debug_generic_stmt (rhs1);
4609 return true;
4611 return res;
4613 case ASSERT_EXPR:
4614 /* FIXME. */
4615 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4616 if (rhs1 == boolean_false_node)
4618 error ("%qs with an always-false condition", code_name);
4619 debug_generic_stmt (rhs1);
4620 return true;
4622 break;
4624 case OBJ_TYPE_REF:
4625 case WITH_SIZE_EXPR:
4626 /* FIXME. */
4627 return res;
4629 default:;
4632 return res;
4635 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4636 is a problem, otherwise false. */
4638 static bool
4639 verify_gimple_assign (gassign *stmt)
4641 switch (gimple_assign_rhs_class (stmt))
4643 case GIMPLE_SINGLE_RHS:
4644 return verify_gimple_assign_single (stmt);
4646 case GIMPLE_UNARY_RHS:
4647 return verify_gimple_assign_unary (stmt);
4649 case GIMPLE_BINARY_RHS:
4650 return verify_gimple_assign_binary (stmt);
4652 case GIMPLE_TERNARY_RHS:
4653 return verify_gimple_assign_ternary (stmt);
4655 default:
4656 gcc_unreachable ();
4660 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4661 is a problem, otherwise false. */
4663 static bool
4664 verify_gimple_return (greturn *stmt)
4666 tree op = gimple_return_retval (stmt);
4667 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4669 /* We cannot test for present return values as we do not fix up missing
4670 return values from the original source. */
4671 if (op == NULL)
4672 return false;
4674 if (!is_gimple_val (op)
4675 && TREE_CODE (op) != RESULT_DECL)
4677 error ("invalid operand in return statement");
4678 debug_generic_stmt (op);
4679 return true;
4682 if ((TREE_CODE (op) == RESULT_DECL
4683 && DECL_BY_REFERENCE (op))
4684 || (TREE_CODE (op) == SSA_NAME
4685 && SSA_NAME_VAR (op)
4686 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4687 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4688 op = TREE_TYPE (op);
4690 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4692 error ("invalid conversion in return statement");
4693 debug_generic_stmt (restype);
4694 debug_generic_stmt (TREE_TYPE (op));
4695 return true;
4698 return false;
4702 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4703 is a problem, otherwise false. */
4705 static bool
4706 verify_gimple_goto (ggoto *stmt)
4708 tree dest = gimple_goto_dest (stmt);
4710 /* ??? We have two canonical forms of direct goto destinations, a
4711 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4712 if (TREE_CODE (dest) != LABEL_DECL
4713 && (!is_gimple_val (dest)
4714 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4716 error ("goto destination is neither a label nor a pointer");
4717 return true;
4720 return false;
4723 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4724 is a problem, otherwise false. */
4726 static bool
4727 verify_gimple_switch (gswitch *stmt)
4729 unsigned int i, n;
4730 tree elt, prev_upper_bound = NULL_TREE;
4731 tree index_type, elt_type = NULL_TREE;
4733 if (!is_gimple_val (gimple_switch_index (stmt)))
4735 error ("invalid operand to switch statement");
4736 debug_generic_stmt (gimple_switch_index (stmt));
4737 return true;
4740 index_type = TREE_TYPE (gimple_switch_index (stmt));
4741 if (! INTEGRAL_TYPE_P (index_type))
4743 error ("non-integral type switch statement");
4744 debug_generic_expr (index_type);
4745 return true;
4748 elt = gimple_switch_label (stmt, 0);
4749 if (CASE_LOW (elt) != NULL_TREE
4750 || CASE_HIGH (elt) != NULL_TREE
4751 || CASE_CHAIN (elt) != NULL_TREE)
4753 error ("invalid default case label in switch statement");
4754 debug_generic_expr (elt);
4755 return true;
4758 n = gimple_switch_num_labels (stmt);
4759 for (i = 1; i < n; i++)
4761 elt = gimple_switch_label (stmt, i);
4763 if (CASE_CHAIN (elt))
4765 error ("invalid %<CASE_CHAIN%>");
4766 debug_generic_expr (elt);
4767 return true;
4769 if (! CASE_LOW (elt))
4771 error ("invalid case label in switch statement");
4772 debug_generic_expr (elt);
4773 return true;
4775 if (CASE_HIGH (elt)
4776 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4778 error ("invalid case range in switch statement");
4779 debug_generic_expr (elt);
4780 return true;
4783 if (elt_type)
4785 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4786 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4788 error ("type mismatch for case label in switch statement");
4789 debug_generic_expr (elt);
4790 return true;
4793 else
4795 elt_type = TREE_TYPE (CASE_LOW (elt));
4796 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4798 error ("type precision mismatch in switch statement");
4799 return true;
4803 if (prev_upper_bound)
4805 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4807 error ("case labels not sorted in switch statement");
4808 return true;
4812 prev_upper_bound = CASE_HIGH (elt);
4813 if (! prev_upper_bound)
4814 prev_upper_bound = CASE_LOW (elt);
4817 return false;
4820 /* Verify a gimple debug statement STMT.
4821 Returns true if anything is wrong. */
4823 static bool
4824 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4826 /* There isn't much that could be wrong in a gimple debug stmt. A
4827 gimple debug bind stmt, for example, maps a tree, that's usually
4828 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4829 component or member of an aggregate type, to another tree, that
4830 can be an arbitrary expression. These stmts expand into debug
4831 insns, and are converted to debug notes by var-tracking.c. */
4832 return false;
4835 /* Verify a gimple label statement STMT.
4836 Returns true if anything is wrong. */
4838 static bool
4839 verify_gimple_label (glabel *stmt)
4841 tree decl = gimple_label_label (stmt);
4842 int uid;
4843 bool err = false;
4845 if (TREE_CODE (decl) != LABEL_DECL)
4846 return true;
4847 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4848 && DECL_CONTEXT (decl) != current_function_decl)
4850 error ("label context is not the current function declaration");
4851 err |= true;
4854 uid = LABEL_DECL_UID (decl);
4855 if (cfun->cfg
4856 && (uid == -1
4857 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4859 error ("incorrect entry in %<label_to_block_map%>");
4860 err |= true;
4863 uid = EH_LANDING_PAD_NR (decl);
4864 if (uid)
4866 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4867 if (decl != lp->post_landing_pad)
4869 error ("incorrect setting of landing pad number");
4870 err |= true;
4874 return err;
4877 /* Verify a gimple cond statement STMT.
4878 Returns true if anything is wrong. */
4880 static bool
4881 verify_gimple_cond (gcond *stmt)
4883 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4885 error ("invalid comparison code in gimple cond");
4886 return true;
4888 if (!(!gimple_cond_true_label (stmt)
4889 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4890 || !(!gimple_cond_false_label (stmt)
4891 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4893 error ("invalid labels in gimple cond");
4894 return true;
4897 return verify_gimple_comparison (boolean_type_node,
4898 gimple_cond_lhs (stmt),
4899 gimple_cond_rhs (stmt),
4900 gimple_cond_code (stmt));
4903 /* Verify the GIMPLE statement STMT. Returns true if there is an
4904 error, otherwise false. */
4906 static bool
4907 verify_gimple_stmt (gimple *stmt)
4909 switch (gimple_code (stmt))
4911 case GIMPLE_ASSIGN:
4912 return verify_gimple_assign (as_a <gassign *> (stmt));
4914 case GIMPLE_LABEL:
4915 return verify_gimple_label (as_a <glabel *> (stmt));
4917 case GIMPLE_CALL:
4918 return verify_gimple_call (as_a <gcall *> (stmt));
4920 case GIMPLE_COND:
4921 return verify_gimple_cond (as_a <gcond *> (stmt));
4923 case GIMPLE_GOTO:
4924 return verify_gimple_goto (as_a <ggoto *> (stmt));
4926 case GIMPLE_SWITCH:
4927 return verify_gimple_switch (as_a <gswitch *> (stmt));
4929 case GIMPLE_RETURN:
4930 return verify_gimple_return (as_a <greturn *> (stmt));
4932 case GIMPLE_ASM:
4933 return false;
4935 case GIMPLE_TRANSACTION:
4936 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4938 /* Tuples that do not have tree operands. */
4939 case GIMPLE_NOP:
4940 case GIMPLE_PREDICT:
4941 case GIMPLE_RESX:
4942 case GIMPLE_EH_DISPATCH:
4943 case GIMPLE_EH_MUST_NOT_THROW:
4944 return false;
4946 CASE_GIMPLE_OMP:
4947 /* OpenMP directives are validated by the FE and never operated
4948 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4949 non-gimple expressions when the main index variable has had
4950 its address taken. This does not affect the loop itself
4951 because the header of an GIMPLE_OMP_FOR is merely used to determine
4952 how to setup the parallel iteration. */
4953 return false;
4955 case GIMPLE_DEBUG:
4956 return verify_gimple_debug (stmt);
4958 default:
4959 gcc_unreachable ();
4963 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4964 and false otherwise. */
4966 static bool
4967 verify_gimple_phi (gphi *phi)
4969 bool err = false;
4970 unsigned i;
4971 tree phi_result = gimple_phi_result (phi);
4972 bool virtual_p;
4974 if (!phi_result)
4976 error ("invalid %<PHI%> result");
4977 return true;
4980 virtual_p = virtual_operand_p (phi_result);
4981 if (TREE_CODE (phi_result) != SSA_NAME
4982 || (virtual_p
4983 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4985 error ("invalid %<PHI%> result");
4986 err = true;
4989 for (i = 0; i < gimple_phi_num_args (phi); i++)
4991 tree t = gimple_phi_arg_def (phi, i);
4993 if (!t)
4995 error ("missing %<PHI%> def");
4996 err |= true;
4997 continue;
4999 /* Addressable variables do have SSA_NAMEs but they
5000 are not considered gimple values. */
5001 else if ((TREE_CODE (t) == SSA_NAME
5002 && virtual_p != virtual_operand_p (t))
5003 || (virtual_p
5004 && (TREE_CODE (t) != SSA_NAME
5005 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5006 || (!virtual_p
5007 && !is_gimple_val (t)))
5009 error ("invalid %<PHI%> argument");
5010 debug_generic_expr (t);
5011 err |= true;
5013 #ifdef ENABLE_TYPES_CHECKING
5014 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5016 error ("incompatible types in %<PHI%> argument %u", i);
5017 debug_generic_stmt (TREE_TYPE (phi_result));
5018 debug_generic_stmt (TREE_TYPE (t));
5019 err |= true;
5021 #endif
5024 return err;
5027 /* Verify the GIMPLE statements inside the sequence STMTS. */
5029 static bool
5030 verify_gimple_in_seq_2 (gimple_seq stmts)
5032 gimple_stmt_iterator ittr;
5033 bool err = false;
5035 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5037 gimple *stmt = gsi_stmt (ittr);
5039 switch (gimple_code (stmt))
5041 case GIMPLE_BIND:
5042 err |= verify_gimple_in_seq_2 (
5043 gimple_bind_body (as_a <gbind *> (stmt)));
5044 break;
5046 case GIMPLE_TRY:
5047 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5048 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5049 break;
5051 case GIMPLE_EH_FILTER:
5052 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5053 break;
5055 case GIMPLE_EH_ELSE:
5057 geh_else *eh_else = as_a <geh_else *> (stmt);
5058 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5059 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5061 break;
5063 case GIMPLE_CATCH:
5064 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5065 as_a <gcatch *> (stmt)));
5066 break;
5068 case GIMPLE_TRANSACTION:
5069 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5070 break;
5072 default:
5074 bool err2 = verify_gimple_stmt (stmt);
5075 if (err2)
5076 debug_gimple_stmt (stmt);
5077 err |= err2;
5082 return err;
5085 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5086 is a problem, otherwise false. */
5088 static bool
5089 verify_gimple_transaction (gtransaction *stmt)
5091 tree lab;
5093 lab = gimple_transaction_label_norm (stmt);
5094 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5095 return true;
5096 lab = gimple_transaction_label_uninst (stmt);
5097 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5098 return true;
5099 lab = gimple_transaction_label_over (stmt);
5100 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5101 return true;
5103 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5107 /* Verify the GIMPLE statements inside the statement list STMTS. */
5109 DEBUG_FUNCTION void
5110 verify_gimple_in_seq (gimple_seq stmts)
5112 timevar_push (TV_TREE_STMT_VERIFY);
5113 if (verify_gimple_in_seq_2 (stmts))
5114 internal_error ("%<verify_gimple%> failed");
5115 timevar_pop (TV_TREE_STMT_VERIFY);
5118 /* Return true when the T can be shared. */
5120 static bool
5121 tree_node_can_be_shared (tree t)
5123 if (IS_TYPE_OR_DECL_P (t)
5124 || TREE_CODE (t) == SSA_NAME
5125 || TREE_CODE (t) == IDENTIFIER_NODE
5126 || TREE_CODE (t) == CASE_LABEL_EXPR
5127 || is_gimple_min_invariant (t))
5128 return true;
5130 if (t == error_mark_node)
5131 return true;
5133 return false;
5136 /* Called via walk_tree. Verify tree sharing. */
5138 static tree
5139 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5141 hash_set<void *> *visited = (hash_set<void *> *) data;
5143 if (tree_node_can_be_shared (*tp))
5145 *walk_subtrees = false;
5146 return NULL;
5149 if (visited->add (*tp))
5150 return *tp;
5152 return NULL;
5155 /* Called via walk_gimple_stmt. Verify tree sharing. */
5157 static tree
5158 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5160 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5161 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5164 static bool eh_error_found;
5165 bool
5166 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5167 hash_set<gimple *> *visited)
5169 if (!visited->contains (stmt))
5171 error ("dead statement in EH table");
5172 debug_gimple_stmt (stmt);
5173 eh_error_found = true;
5175 return true;
5178 /* Verify if the location LOCs block is in BLOCKS. */
5180 static bool
5181 verify_location (hash_set<tree> *blocks, location_t loc)
5183 tree block = LOCATION_BLOCK (loc);
5184 if (block != NULL_TREE
5185 && !blocks->contains (block))
5187 error ("location references block not in block tree");
5188 return true;
5190 if (block != NULL_TREE)
5191 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5192 return false;
5195 /* Called via walk_tree. Verify that expressions have no blocks. */
5197 static tree
5198 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5200 if (!EXPR_P (*tp))
5202 *walk_subtrees = false;
5203 return NULL;
5206 location_t loc = EXPR_LOCATION (*tp);
5207 if (LOCATION_BLOCK (loc) != NULL)
5208 return *tp;
5210 return NULL;
5213 /* Called via walk_tree. Verify locations of expressions. */
5215 static tree
5216 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5218 hash_set<tree> *blocks = (hash_set<tree> *) data;
5219 tree t = *tp;
5221 /* ??? This doesn't really belong here but there's no good place to
5222 stick this remainder of old verify_expr. */
5223 /* ??? This barfs on debug stmts which contain binds to vars with
5224 different function context. */
5225 #if 0
5226 if (VAR_P (t)
5227 || TREE_CODE (t) == PARM_DECL
5228 || TREE_CODE (t) == RESULT_DECL)
5230 tree context = decl_function_context (t);
5231 if (context != cfun->decl
5232 && !SCOPE_FILE_SCOPE_P (context)
5233 && !TREE_STATIC (t)
5234 && !DECL_EXTERNAL (t))
5236 error ("local declaration from a different function");
5237 return t;
5240 #endif
5242 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5244 tree x = DECL_DEBUG_EXPR (t);
5245 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5246 if (addr)
5247 return addr;
5249 if ((VAR_P (t)
5250 || TREE_CODE (t) == PARM_DECL
5251 || TREE_CODE (t) == RESULT_DECL)
5252 && DECL_HAS_VALUE_EXPR_P (t))
5254 tree x = DECL_VALUE_EXPR (t);
5255 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5256 if (addr)
5257 return addr;
5260 if (!EXPR_P (t))
5262 *walk_subtrees = false;
5263 return NULL;
5266 location_t loc = EXPR_LOCATION (t);
5267 if (verify_location (blocks, loc))
5268 return t;
5270 return NULL;
5273 /* Called via walk_gimple_op. Verify locations of expressions. */
5275 static tree
5276 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5278 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5279 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5282 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5284 static void
5285 collect_subblocks (hash_set<tree> *blocks, tree block)
5287 tree t;
5288 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5290 blocks->add (t);
5291 collect_subblocks (blocks, t);
5295 /* Disable warnings about missing quoting in GCC diagnostics for
5296 the verification errors. Their format strings don't follow
5297 GCC diagnostic conventions and trigger an ICE in the end. */
5298 #if __GNUC__ >= 10
5299 # pragma GCC diagnostic push
5300 # pragma GCC diagnostic ignored "-Wformat-diag"
5301 #endif
5303 /* Verify the GIMPLE statements in the CFG of FN. */
5305 DEBUG_FUNCTION void
5306 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5308 basic_block bb;
5309 bool err = false;
5311 timevar_push (TV_TREE_STMT_VERIFY);
5312 hash_set<void *> visited;
5313 hash_set<gimple *> visited_throwing_stmts;
5315 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5316 hash_set<tree> blocks;
5317 if (DECL_INITIAL (fn->decl))
5319 blocks.add (DECL_INITIAL (fn->decl));
5320 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5323 FOR_EACH_BB_FN (bb, fn)
5325 gimple_stmt_iterator gsi;
5326 edge_iterator ei;
5327 edge e;
5329 for (gphi_iterator gpi = gsi_start_phis (bb);
5330 !gsi_end_p (gpi);
5331 gsi_next (&gpi))
5333 gphi *phi = gpi.phi ();
5334 bool err2 = false;
5335 unsigned i;
5337 if (gimple_bb (phi) != bb)
5339 error ("gimple_bb (phi) is set to a wrong basic block");
5340 err2 = true;
5343 err2 |= verify_gimple_phi (phi);
5345 /* Only PHI arguments have locations. */
5346 if (gimple_location (phi) != UNKNOWN_LOCATION)
5348 error ("PHI node with location");
5349 err2 = true;
5352 for (i = 0; i < gimple_phi_num_args (phi); i++)
5354 tree arg = gimple_phi_arg_def (phi, i);
5355 tree addr = walk_tree (&arg, verify_node_sharing_1,
5356 &visited, NULL);
5357 if (addr)
5359 error ("incorrect sharing of tree nodes");
5360 debug_generic_expr (addr);
5361 err2 |= true;
5363 location_t loc = gimple_phi_arg_location (phi, i);
5364 if (virtual_operand_p (gimple_phi_result (phi))
5365 && loc != UNKNOWN_LOCATION)
5367 error ("virtual PHI with argument locations");
5368 err2 = true;
5370 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5371 if (addr)
5373 debug_generic_expr (addr);
5374 err2 = true;
5376 err2 |= verify_location (&blocks, loc);
5379 if (err2)
5380 debug_gimple_stmt (phi);
5381 err |= err2;
5384 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5386 gimple *stmt = gsi_stmt (gsi);
5387 bool err2 = false;
5388 struct walk_stmt_info wi;
5389 tree addr;
5390 int lp_nr;
5392 if (gimple_bb (stmt) != bb)
5394 error ("gimple_bb (stmt) is set to a wrong basic block");
5395 err2 = true;
5398 err2 |= verify_gimple_stmt (stmt);
5399 err2 |= verify_location (&blocks, gimple_location (stmt));
5401 memset (&wi, 0, sizeof (wi));
5402 wi.info = (void *) &visited;
5403 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5404 if (addr)
5406 error ("incorrect sharing of tree nodes");
5407 debug_generic_expr (addr);
5408 err2 |= true;
5411 memset (&wi, 0, sizeof (wi));
5412 wi.info = (void *) &blocks;
5413 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5414 if (addr)
5416 debug_generic_expr (addr);
5417 err2 |= true;
5420 /* If the statement is marked as part of an EH region, then it is
5421 expected that the statement could throw. Verify that when we
5422 have optimizations that simplify statements such that we prove
5423 that they cannot throw, that we update other data structures
5424 to match. */
5425 lp_nr = lookup_stmt_eh_lp (stmt);
5426 if (lp_nr != 0)
5427 visited_throwing_stmts.add (stmt);
5428 if (lp_nr > 0)
5430 if (!stmt_could_throw_p (cfun, stmt))
5432 if (verify_nothrow)
5434 error ("statement marked for throw, but doesn%'t");
5435 err2 |= true;
5438 else if (!gsi_one_before_end_p (gsi))
5440 error ("statement marked for throw in middle of block");
5441 err2 |= true;
5445 if (err2)
5446 debug_gimple_stmt (stmt);
5447 err |= err2;
5450 FOR_EACH_EDGE (e, ei, bb->succs)
5451 if (e->goto_locus != UNKNOWN_LOCATION)
5452 err |= verify_location (&blocks, e->goto_locus);
5455 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5456 eh_error_found = false;
5457 if (eh_table)
5458 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5459 (&visited_throwing_stmts);
5461 if (err || eh_error_found)
5462 internal_error ("verify_gimple failed");
5464 verify_histograms ();
5465 timevar_pop (TV_TREE_STMT_VERIFY);
5469 /* Verifies that the flow information is OK. */
5471 static int
5472 gimple_verify_flow_info (void)
5474 int err = 0;
5475 basic_block bb;
5476 gimple_stmt_iterator gsi;
5477 gimple *stmt;
5478 edge e;
5479 edge_iterator ei;
5481 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5482 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5484 error ("ENTRY_BLOCK has IL associated with it");
5485 err = 1;
5488 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5489 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5491 error ("EXIT_BLOCK has IL associated with it");
5492 err = 1;
5495 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5496 if (e->flags & EDGE_FALLTHRU)
5498 error ("fallthru to exit from bb %d", e->src->index);
5499 err = 1;
5502 FOR_EACH_BB_FN (bb, cfun)
5504 bool found_ctrl_stmt = false;
5506 stmt = NULL;
5508 /* Skip labels on the start of basic block. */
5509 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5511 tree label;
5512 gimple *prev_stmt = stmt;
5514 stmt = gsi_stmt (gsi);
5516 if (gimple_code (stmt) != GIMPLE_LABEL)
5517 break;
5519 label = gimple_label_label (as_a <glabel *> (stmt));
5520 if (prev_stmt && DECL_NONLOCAL (label))
5522 error ("nonlocal label ");
5523 print_generic_expr (stderr, label);
5524 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5525 bb->index);
5526 err = 1;
5529 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5531 error ("EH landing pad label ");
5532 print_generic_expr (stderr, label);
5533 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5534 bb->index);
5535 err = 1;
5538 if (label_to_block (cfun, label) != bb)
5540 error ("label ");
5541 print_generic_expr (stderr, label);
5542 fprintf (stderr, " to block does not match in bb %d",
5543 bb->index);
5544 err = 1;
5547 if (decl_function_context (label) != current_function_decl)
5549 error ("label ");
5550 print_generic_expr (stderr, label);
5551 fprintf (stderr, " has incorrect context in bb %d",
5552 bb->index);
5553 err = 1;
5557 /* Verify that body of basic block BB is free of control flow. */
5558 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5560 gimple *stmt = gsi_stmt (gsi);
5562 if (found_ctrl_stmt)
5564 error ("control flow in the middle of basic block %d",
5565 bb->index);
5566 err = 1;
5569 if (stmt_ends_bb_p (stmt))
5570 found_ctrl_stmt = true;
5572 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5574 error ("label ");
5575 print_generic_expr (stderr, gimple_label_label (label_stmt));
5576 fprintf (stderr, " in the middle of basic block %d", bb->index);
5577 err = 1;
5581 gsi = gsi_last_nondebug_bb (bb);
5582 if (gsi_end_p (gsi))
5583 continue;
5585 stmt = gsi_stmt (gsi);
5587 if (gimple_code (stmt) == GIMPLE_LABEL)
5588 continue;
5590 err |= verify_eh_edges (stmt);
5592 if (is_ctrl_stmt (stmt))
5594 FOR_EACH_EDGE (e, ei, bb->succs)
5595 if (e->flags & EDGE_FALLTHRU)
5597 error ("fallthru edge after a control statement in bb %d",
5598 bb->index);
5599 err = 1;
5603 if (gimple_code (stmt) != GIMPLE_COND)
5605 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5606 after anything else but if statement. */
5607 FOR_EACH_EDGE (e, ei, bb->succs)
5608 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5610 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5611 bb->index);
5612 err = 1;
5616 switch (gimple_code (stmt))
5618 case GIMPLE_COND:
5620 edge true_edge;
5621 edge false_edge;
5623 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5625 if (!true_edge
5626 || !false_edge
5627 || !(true_edge->flags & EDGE_TRUE_VALUE)
5628 || !(false_edge->flags & EDGE_FALSE_VALUE)
5629 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5630 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5631 || EDGE_COUNT (bb->succs) >= 3)
5633 error ("wrong outgoing edge flags at end of bb %d",
5634 bb->index);
5635 err = 1;
5638 break;
5640 case GIMPLE_GOTO:
5641 if (simple_goto_p (stmt))
5643 error ("explicit goto at end of bb %d", bb->index);
5644 err = 1;
5646 else
5648 /* FIXME. We should double check that the labels in the
5649 destination blocks have their address taken. */
5650 FOR_EACH_EDGE (e, ei, bb->succs)
5651 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5652 | EDGE_FALSE_VALUE))
5653 || !(e->flags & EDGE_ABNORMAL))
5655 error ("wrong outgoing edge flags at end of bb %d",
5656 bb->index);
5657 err = 1;
5660 break;
5662 case GIMPLE_CALL:
5663 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5664 break;
5665 /* fallthru */
5666 case GIMPLE_RETURN:
5667 if (!single_succ_p (bb)
5668 || (single_succ_edge (bb)->flags
5669 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5670 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5672 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5673 err = 1;
5675 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5677 error ("return edge does not point to exit in bb %d",
5678 bb->index);
5679 err = 1;
5681 break;
5683 case GIMPLE_SWITCH:
5685 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5686 tree prev;
5687 edge e;
5688 size_t i, n;
5690 n = gimple_switch_num_labels (switch_stmt);
5692 /* Mark all the destination basic blocks. */
5693 for (i = 0; i < n; ++i)
5695 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5696 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5697 label_bb->aux = (void *)1;
5700 /* Verify that the case labels are sorted. */
5701 prev = gimple_switch_label (switch_stmt, 0);
5702 for (i = 1; i < n; ++i)
5704 tree c = gimple_switch_label (switch_stmt, i);
5705 if (!CASE_LOW (c))
5707 error ("found default case not at the start of "
5708 "case vector");
5709 err = 1;
5710 continue;
5712 if (CASE_LOW (prev)
5713 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5715 error ("case labels not sorted: ");
5716 print_generic_expr (stderr, prev);
5717 fprintf (stderr," is greater than ");
5718 print_generic_expr (stderr, c);
5719 fprintf (stderr," but comes before it.\n");
5720 err = 1;
5722 prev = c;
5724 /* VRP will remove the default case if it can prove it will
5725 never be executed. So do not verify there always exists
5726 a default case here. */
5728 FOR_EACH_EDGE (e, ei, bb->succs)
5730 if (!e->dest->aux)
5732 error ("extra outgoing edge %d->%d",
5733 bb->index, e->dest->index);
5734 err = 1;
5737 e->dest->aux = (void *)2;
5738 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5739 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5741 error ("wrong outgoing edge flags at end of bb %d",
5742 bb->index);
5743 err = 1;
5747 /* Check that we have all of them. */
5748 for (i = 0; i < n; ++i)
5750 basic_block label_bb = gimple_switch_label_bb (cfun,
5751 switch_stmt, i);
5753 if (label_bb->aux != (void *)2)
5755 error ("missing edge %i->%i", bb->index, label_bb->index);
5756 err = 1;
5760 FOR_EACH_EDGE (e, ei, bb->succs)
5761 e->dest->aux = (void *)0;
5763 break;
5765 case GIMPLE_EH_DISPATCH:
5766 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5767 break;
5769 default:
5770 break;
5774 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5775 verify_dominators (CDI_DOMINATORS);
5777 return err;
5780 #if __GNUC__ >= 10
5781 # pragma GCC diagnostic pop
5782 #endif
5784 /* Updates phi nodes after creating a forwarder block joined
5785 by edge FALLTHRU. */
5787 static void
5788 gimple_make_forwarder_block (edge fallthru)
5790 edge e;
5791 edge_iterator ei;
5792 basic_block dummy, bb;
5793 tree var;
5794 gphi_iterator gsi;
5795 bool forward_location_p;
5797 dummy = fallthru->src;
5798 bb = fallthru->dest;
5800 if (single_pred_p (bb))
5801 return;
5803 /* We can forward location info if we have only one predecessor. */
5804 forward_location_p = single_pred_p (dummy);
5806 /* If we redirected a branch we must create new PHI nodes at the
5807 start of BB. */
5808 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5810 gphi *phi, *new_phi;
5812 phi = gsi.phi ();
5813 var = gimple_phi_result (phi);
5814 new_phi = create_phi_node (var, bb);
5815 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5816 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5817 forward_location_p
5818 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5821 /* Add the arguments we have stored on edges. */
5822 FOR_EACH_EDGE (e, ei, bb->preds)
5824 if (e == fallthru)
5825 continue;
5827 flush_pending_stmts (e);
5832 /* Return a non-special label in the head of basic block BLOCK.
5833 Create one if it doesn't exist. */
5835 tree
5836 gimple_block_label (basic_block bb)
5838 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5839 bool first = true;
5840 tree label;
5841 glabel *stmt;
5843 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5845 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5846 if (!stmt)
5847 break;
5848 label = gimple_label_label (stmt);
5849 if (!DECL_NONLOCAL (label))
5851 if (!first)
5852 gsi_move_before (&i, &s);
5853 return label;
5857 label = create_artificial_label (UNKNOWN_LOCATION);
5858 stmt = gimple_build_label (label);
5859 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5860 return label;
5864 /* Attempt to perform edge redirection by replacing a possibly complex
5865 jump instruction by a goto or by removing the jump completely.
5866 This can apply only if all edges now point to the same block. The
5867 parameters and return values are equivalent to
5868 redirect_edge_and_branch. */
5870 static edge
5871 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5873 basic_block src = e->src;
5874 gimple_stmt_iterator i;
5875 gimple *stmt;
5877 /* We can replace or remove a complex jump only when we have exactly
5878 two edges. */
5879 if (EDGE_COUNT (src->succs) != 2
5880 /* Verify that all targets will be TARGET. Specifically, the
5881 edge that is not E must also go to TARGET. */
5882 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5883 return NULL;
5885 i = gsi_last_bb (src);
5886 if (gsi_end_p (i))
5887 return NULL;
5889 stmt = gsi_stmt (i);
5891 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5893 gsi_remove (&i, true);
5894 e = ssa_redirect_edge (e, target);
5895 e->flags = EDGE_FALLTHRU;
5896 return e;
5899 return NULL;
5903 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5904 edge representing the redirected branch. */
5906 static edge
5907 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5909 basic_block bb = e->src;
5910 gimple_stmt_iterator gsi;
5911 edge ret;
5912 gimple *stmt;
5914 if (e->flags & EDGE_ABNORMAL)
5915 return NULL;
5917 if (e->dest == dest)
5918 return NULL;
5920 if (e->flags & EDGE_EH)
5921 return redirect_eh_edge (e, dest);
5923 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5925 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5926 if (ret)
5927 return ret;
5930 gsi = gsi_last_nondebug_bb (bb);
5931 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5933 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5935 case GIMPLE_COND:
5936 /* For COND_EXPR, we only need to redirect the edge. */
5937 break;
5939 case GIMPLE_GOTO:
5940 /* No non-abnormal edges should lead from a non-simple goto, and
5941 simple ones should be represented implicitly. */
5942 gcc_unreachable ();
5944 case GIMPLE_SWITCH:
5946 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5947 tree label = gimple_block_label (dest);
5948 tree cases = get_cases_for_edge (e, switch_stmt);
5950 /* If we have a list of cases associated with E, then use it
5951 as it's a lot faster than walking the entire case vector. */
5952 if (cases)
5954 edge e2 = find_edge (e->src, dest);
5955 tree last, first;
5957 first = cases;
5958 while (cases)
5960 last = cases;
5961 CASE_LABEL (cases) = label;
5962 cases = CASE_CHAIN (cases);
5965 /* If there was already an edge in the CFG, then we need
5966 to move all the cases associated with E to E2. */
5967 if (e2)
5969 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5971 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5972 CASE_CHAIN (cases2) = first;
5974 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5976 else
5978 size_t i, n = gimple_switch_num_labels (switch_stmt);
5980 for (i = 0; i < n; i++)
5982 tree elt = gimple_switch_label (switch_stmt, i);
5983 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5984 CASE_LABEL (elt) = label;
5988 break;
5990 case GIMPLE_ASM:
5992 gasm *asm_stmt = as_a <gasm *> (stmt);
5993 int i, n = gimple_asm_nlabels (asm_stmt);
5994 tree label = NULL;
5996 for (i = 0; i < n; ++i)
5998 tree cons = gimple_asm_label_op (asm_stmt, i);
5999 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6001 if (!label)
6002 label = gimple_block_label (dest);
6003 TREE_VALUE (cons) = label;
6007 /* If we didn't find any label matching the former edge in the
6008 asm labels, we must be redirecting the fallthrough
6009 edge. */
6010 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6012 break;
6014 case GIMPLE_RETURN:
6015 gsi_remove (&gsi, true);
6016 e->flags |= EDGE_FALLTHRU;
6017 break;
6019 case GIMPLE_OMP_RETURN:
6020 case GIMPLE_OMP_CONTINUE:
6021 case GIMPLE_OMP_SECTIONS_SWITCH:
6022 case GIMPLE_OMP_FOR:
6023 /* The edges from OMP constructs can be simply redirected. */
6024 break;
6026 case GIMPLE_EH_DISPATCH:
6027 if (!(e->flags & EDGE_FALLTHRU))
6028 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6029 break;
6031 case GIMPLE_TRANSACTION:
6032 if (e->flags & EDGE_TM_ABORT)
6033 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6034 gimple_block_label (dest));
6035 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6036 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6037 gimple_block_label (dest));
6038 else
6039 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6040 gimple_block_label (dest));
6041 break;
6043 default:
6044 /* Otherwise it must be a fallthru edge, and we don't need to
6045 do anything besides redirecting it. */
6046 gcc_assert (e->flags & EDGE_FALLTHRU);
6047 break;
6050 /* Update/insert PHI nodes as necessary. */
6052 /* Now update the edges in the CFG. */
6053 e = ssa_redirect_edge (e, dest);
6055 return e;
6058 /* Returns true if it is possible to remove edge E by redirecting
6059 it to the destination of the other edge from E->src. */
6061 static bool
6062 gimple_can_remove_branch_p (const_edge e)
6064 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6065 return false;
6067 return true;
6070 /* Simple wrapper, as we can always redirect fallthru edges. */
6072 static basic_block
6073 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6075 e = gimple_redirect_edge_and_branch (e, dest);
6076 gcc_assert (e);
6078 return NULL;
6082 /* Splits basic block BB after statement STMT (but at least after the
6083 labels). If STMT is NULL, BB is split just after the labels. */
6085 static basic_block
6086 gimple_split_block (basic_block bb, void *stmt)
6088 gimple_stmt_iterator gsi;
6089 gimple_stmt_iterator gsi_tgt;
6090 gimple_seq list;
6091 basic_block new_bb;
6092 edge e;
6093 edge_iterator ei;
6095 new_bb = create_empty_bb (bb);
6097 /* Redirect the outgoing edges. */
6098 new_bb->succs = bb->succs;
6099 bb->succs = NULL;
6100 FOR_EACH_EDGE (e, ei, new_bb->succs)
6101 e->src = new_bb;
6103 /* Get a stmt iterator pointing to the first stmt to move. */
6104 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6105 gsi = gsi_after_labels (bb);
6106 else
6108 gsi = gsi_for_stmt ((gimple *) stmt);
6109 gsi_next (&gsi);
6112 /* Move everything from GSI to the new basic block. */
6113 if (gsi_end_p (gsi))
6114 return new_bb;
6116 /* Split the statement list - avoid re-creating new containers as this
6117 brings ugly quadratic memory consumption in the inliner.
6118 (We are still quadratic since we need to update stmt BB pointers,
6119 sadly.) */
6120 gsi_split_seq_before (&gsi, &list);
6121 set_bb_seq (new_bb, list);
6122 for (gsi_tgt = gsi_start (list);
6123 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6124 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6126 return new_bb;
6130 /* Moves basic block BB after block AFTER. */
6132 static bool
6133 gimple_move_block_after (basic_block bb, basic_block after)
6135 if (bb->prev_bb == after)
6136 return true;
6138 unlink_block (bb);
6139 link_block (bb, after);
6141 return true;
6145 /* Return TRUE if block BB has no executable statements, otherwise return
6146 FALSE. */
6148 static bool
6149 gimple_empty_block_p (basic_block bb)
6151 /* BB must have no executable statements. */
6152 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6153 if (phi_nodes (bb))
6154 return false;
6155 while (!gsi_end_p (gsi))
6157 gimple *stmt = gsi_stmt (gsi);
6158 if (is_gimple_debug (stmt))
6160 else if (gimple_code (stmt) == GIMPLE_NOP
6161 || gimple_code (stmt) == GIMPLE_PREDICT)
6163 else
6164 return false;
6165 gsi_next (&gsi);
6167 return true;
6171 /* Split a basic block if it ends with a conditional branch and if the
6172 other part of the block is not empty. */
6174 static basic_block
6175 gimple_split_block_before_cond_jump (basic_block bb)
6177 gimple *last, *split_point;
6178 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6179 if (gsi_end_p (gsi))
6180 return NULL;
6181 last = gsi_stmt (gsi);
6182 if (gimple_code (last) != GIMPLE_COND
6183 && gimple_code (last) != GIMPLE_SWITCH)
6184 return NULL;
6185 gsi_prev (&gsi);
6186 split_point = gsi_stmt (gsi);
6187 return split_block (bb, split_point)->dest;
6191 /* Return true if basic_block can be duplicated. */
6193 static bool
6194 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6196 return true;
6199 /* Create a duplicate of the basic block BB. NOTE: This does not
6200 preserve SSA form. */
6202 static basic_block
6203 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6205 basic_block new_bb;
6206 gimple_stmt_iterator gsi_tgt;
6208 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6210 /* Copy the PHI nodes. We ignore PHI node arguments here because
6211 the incoming edges have not been setup yet. */
6212 for (gphi_iterator gpi = gsi_start_phis (bb);
6213 !gsi_end_p (gpi);
6214 gsi_next (&gpi))
6216 gphi *phi, *copy;
6217 phi = gpi.phi ();
6218 copy = create_phi_node (NULL_TREE, new_bb);
6219 create_new_def_for (gimple_phi_result (phi), copy,
6220 gimple_phi_result_ptr (copy));
6221 gimple_set_uid (copy, gimple_uid (phi));
6224 gsi_tgt = gsi_start_bb (new_bb);
6225 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6226 !gsi_end_p (gsi);
6227 gsi_next (&gsi))
6229 def_operand_p def_p;
6230 ssa_op_iter op_iter;
6231 tree lhs;
6232 gimple *stmt, *copy;
6234 stmt = gsi_stmt (gsi);
6235 if (gimple_code (stmt) == GIMPLE_LABEL)
6236 continue;
6238 /* Don't duplicate label debug stmts. */
6239 if (gimple_debug_bind_p (stmt)
6240 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6241 == LABEL_DECL)
6242 continue;
6244 /* Create a new copy of STMT and duplicate STMT's virtual
6245 operands. */
6246 copy = gimple_copy (stmt);
6247 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6249 maybe_duplicate_eh_stmt (copy, stmt);
6250 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6252 /* When copying around a stmt writing into a local non-user
6253 aggregate, make sure it won't share stack slot with other
6254 vars. */
6255 lhs = gimple_get_lhs (stmt);
6256 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6258 tree base = get_base_address (lhs);
6259 if (base
6260 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6261 && DECL_IGNORED_P (base)
6262 && !TREE_STATIC (base)
6263 && !DECL_EXTERNAL (base)
6264 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6265 DECL_NONSHAREABLE (base) = 1;
6268 /* If requested remap dependence info of cliques brought in
6269 via inlining. */
6270 if (id)
6271 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6273 tree op = gimple_op (copy, i);
6274 if (!op)
6275 continue;
6276 if (TREE_CODE (op) == ADDR_EXPR
6277 || TREE_CODE (op) == WITH_SIZE_EXPR)
6278 op = TREE_OPERAND (op, 0);
6279 while (handled_component_p (op))
6280 op = TREE_OPERAND (op, 0);
6281 if ((TREE_CODE (op) == MEM_REF
6282 || TREE_CODE (op) == TARGET_MEM_REF)
6283 && MR_DEPENDENCE_CLIQUE (op) > 1
6284 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6286 if (!id->dependence_map)
6287 id->dependence_map = new hash_map<dependence_hash,
6288 unsigned short>;
6289 bool existed;
6290 unsigned short &newc = id->dependence_map->get_or_insert
6291 (MR_DEPENDENCE_CLIQUE (op), &existed);
6292 if (!existed)
6294 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6295 newc = ++cfun->last_clique;
6297 MR_DEPENDENCE_CLIQUE (op) = newc;
6301 /* Create new names for all the definitions created by COPY and
6302 add replacement mappings for each new name. */
6303 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6304 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6307 return new_bb;
6310 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6312 static void
6313 add_phi_args_after_copy_edge (edge e_copy)
6315 basic_block bb, bb_copy = e_copy->src, dest;
6316 edge e;
6317 edge_iterator ei;
6318 gphi *phi, *phi_copy;
6319 tree def;
6320 gphi_iterator psi, psi_copy;
6322 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6323 return;
6325 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6327 if (e_copy->dest->flags & BB_DUPLICATED)
6328 dest = get_bb_original (e_copy->dest);
6329 else
6330 dest = e_copy->dest;
6332 e = find_edge (bb, dest);
6333 if (!e)
6335 /* During loop unrolling the target of the latch edge is copied.
6336 In this case we are not looking for edge to dest, but to
6337 duplicated block whose original was dest. */
6338 FOR_EACH_EDGE (e, ei, bb->succs)
6340 if ((e->dest->flags & BB_DUPLICATED)
6341 && get_bb_original (e->dest) == dest)
6342 break;
6345 gcc_assert (e != NULL);
6348 for (psi = gsi_start_phis (e->dest),
6349 psi_copy = gsi_start_phis (e_copy->dest);
6350 !gsi_end_p (psi);
6351 gsi_next (&psi), gsi_next (&psi_copy))
6353 phi = psi.phi ();
6354 phi_copy = psi_copy.phi ();
6355 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6356 add_phi_arg (phi_copy, def, e_copy,
6357 gimple_phi_arg_location_from_edge (phi, e));
6362 /* Basic block BB_COPY was created by code duplication. Add phi node
6363 arguments for edges going out of BB_COPY. The blocks that were
6364 duplicated have BB_DUPLICATED set. */
6366 void
6367 add_phi_args_after_copy_bb (basic_block bb_copy)
6369 edge e_copy;
6370 edge_iterator ei;
6372 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6374 add_phi_args_after_copy_edge (e_copy);
6378 /* Blocks in REGION_COPY array of length N_REGION were created by
6379 duplication of basic blocks. Add phi node arguments for edges
6380 going from these blocks. If E_COPY is not NULL, also add
6381 phi node arguments for its destination.*/
6383 void
6384 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6385 edge e_copy)
6387 unsigned i;
6389 for (i = 0; i < n_region; i++)
6390 region_copy[i]->flags |= BB_DUPLICATED;
6392 for (i = 0; i < n_region; i++)
6393 add_phi_args_after_copy_bb (region_copy[i]);
6394 if (e_copy)
6395 add_phi_args_after_copy_edge (e_copy);
6397 for (i = 0; i < n_region; i++)
6398 region_copy[i]->flags &= ~BB_DUPLICATED;
6401 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6402 important exit edge EXIT. By important we mean that no SSA name defined
6403 inside region is live over the other exit edges of the region. All entry
6404 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6405 to the duplicate of the region. Dominance and loop information is
6406 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6407 UPDATE_DOMINANCE is false then we assume that the caller will update the
6408 dominance information after calling this function. The new basic
6409 blocks are stored to REGION_COPY in the same order as they had in REGION,
6410 provided that REGION_COPY is not NULL.
6411 The function returns false if it is unable to copy the region,
6412 true otherwise. */
6414 bool
6415 gimple_duplicate_sese_region (edge entry, edge exit,
6416 basic_block *region, unsigned n_region,
6417 basic_block *region_copy,
6418 bool update_dominance)
6420 unsigned i;
6421 bool free_region_copy = false, copying_header = false;
6422 class loop *loop = entry->dest->loop_father;
6423 edge exit_copy;
6424 vec<basic_block> doms = vNULL;
6425 edge redirected;
6426 profile_count total_count = profile_count::uninitialized ();
6427 profile_count entry_count = profile_count::uninitialized ();
6429 if (!can_copy_bbs_p (region, n_region))
6430 return false;
6432 /* Some sanity checking. Note that we do not check for all possible
6433 missuses of the functions. I.e. if you ask to copy something weird,
6434 it will work, but the state of structures probably will not be
6435 correct. */
6436 for (i = 0; i < n_region; i++)
6438 /* We do not handle subloops, i.e. all the blocks must belong to the
6439 same loop. */
6440 if (region[i]->loop_father != loop)
6441 return false;
6443 if (region[i] != entry->dest
6444 && region[i] == loop->header)
6445 return false;
6448 /* In case the function is used for loop header copying (which is the primary
6449 use), ensure that EXIT and its copy will be new latch and entry edges. */
6450 if (loop->header == entry->dest)
6452 copying_header = true;
6454 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6455 return false;
6457 for (i = 0; i < n_region; i++)
6458 if (region[i] != exit->src
6459 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6460 return false;
6463 initialize_original_copy_tables ();
6465 if (copying_header)
6466 set_loop_copy (loop, loop_outer (loop));
6467 else
6468 set_loop_copy (loop, loop);
6470 if (!region_copy)
6472 region_copy = XNEWVEC (basic_block, n_region);
6473 free_region_copy = true;
6476 /* Record blocks outside the region that are dominated by something
6477 inside. */
6478 if (update_dominance)
6480 doms.create (0);
6481 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6484 if (entry->dest->count.initialized_p ())
6486 total_count = entry->dest->count;
6487 entry_count = entry->count ();
6488 /* Fix up corner cases, to avoid division by zero or creation of negative
6489 frequencies. */
6490 if (entry_count > total_count)
6491 entry_count = total_count;
6494 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6495 split_edge_bb_loc (entry), update_dominance);
6496 if (total_count.initialized_p () && entry_count.initialized_p ())
6498 scale_bbs_frequencies_profile_count (region, n_region,
6499 total_count - entry_count,
6500 total_count);
6501 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6502 total_count);
6505 if (copying_header)
6507 loop->header = exit->dest;
6508 loop->latch = exit->src;
6511 /* Redirect the entry and add the phi node arguments. */
6512 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6513 gcc_assert (redirected != NULL);
6514 flush_pending_stmts (entry);
6516 /* Concerning updating of dominators: We must recount dominators
6517 for entry block and its copy. Anything that is outside of the
6518 region, but was dominated by something inside needs recounting as
6519 well. */
6520 if (update_dominance)
6522 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6523 doms.safe_push (get_bb_original (entry->dest));
6524 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6525 doms.release ();
6528 /* Add the other PHI node arguments. */
6529 add_phi_args_after_copy (region_copy, n_region, NULL);
6531 if (free_region_copy)
6532 free (region_copy);
6534 free_original_copy_tables ();
6535 return true;
6538 /* Checks if BB is part of the region defined by N_REGION BBS. */
6539 static bool
6540 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6542 unsigned int n;
6544 for (n = 0; n < n_region; n++)
6546 if (bb == bbs[n])
6547 return true;
6549 return false;
6552 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6553 are stored to REGION_COPY in the same order in that they appear
6554 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6555 the region, EXIT an exit from it. The condition guarding EXIT
6556 is moved to ENTRY. Returns true if duplication succeeds, false
6557 otherwise.
6559 For example,
6561 some_code;
6562 if (cond)
6564 else
6567 is transformed to
6569 if (cond)
6571 some_code;
6574 else
6576 some_code;
6581 bool
6582 gimple_duplicate_sese_tail (edge entry, edge exit,
6583 basic_block *region, unsigned n_region,
6584 basic_block *region_copy)
6586 unsigned i;
6587 bool free_region_copy = false;
6588 class loop *loop = exit->dest->loop_father;
6589 class loop *orig_loop = entry->dest->loop_father;
6590 basic_block switch_bb, entry_bb, nentry_bb;
6591 vec<basic_block> doms;
6592 profile_count total_count = profile_count::uninitialized (),
6593 exit_count = profile_count::uninitialized ();
6594 edge exits[2], nexits[2], e;
6595 gimple_stmt_iterator gsi;
6596 gimple *cond_stmt;
6597 edge sorig, snew;
6598 basic_block exit_bb;
6599 gphi_iterator psi;
6600 gphi *phi;
6601 tree def;
6602 class loop *target, *aloop, *cloop;
6604 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6605 exits[0] = exit;
6606 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6608 if (!can_copy_bbs_p (region, n_region))
6609 return false;
6611 initialize_original_copy_tables ();
6612 set_loop_copy (orig_loop, loop);
6614 target= loop;
6615 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6617 if (bb_part_of_region_p (aloop->header, region, n_region))
6619 cloop = duplicate_loop (aloop, target);
6620 duplicate_subloops (aloop, cloop);
6624 if (!region_copy)
6626 region_copy = XNEWVEC (basic_block, n_region);
6627 free_region_copy = true;
6630 gcc_assert (!need_ssa_update_p (cfun));
6632 /* Record blocks outside the region that are dominated by something
6633 inside. */
6634 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6636 total_count = exit->src->count;
6637 exit_count = exit->count ();
6638 /* Fix up corner cases, to avoid division by zero or creation of negative
6639 frequencies. */
6640 if (exit_count > total_count)
6641 exit_count = total_count;
6643 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6644 split_edge_bb_loc (exit), true);
6645 if (total_count.initialized_p () && exit_count.initialized_p ())
6647 scale_bbs_frequencies_profile_count (region, n_region,
6648 total_count - exit_count,
6649 total_count);
6650 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6651 total_count);
6654 /* Create the switch block, and put the exit condition to it. */
6655 entry_bb = entry->dest;
6656 nentry_bb = get_bb_copy (entry_bb);
6657 if (!last_stmt (entry->src)
6658 || !stmt_ends_bb_p (last_stmt (entry->src)))
6659 switch_bb = entry->src;
6660 else
6661 switch_bb = split_edge (entry);
6662 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6664 gsi = gsi_last_bb (switch_bb);
6665 cond_stmt = last_stmt (exit->src);
6666 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6667 cond_stmt = gimple_copy (cond_stmt);
6669 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6671 sorig = single_succ_edge (switch_bb);
6672 sorig->flags = exits[1]->flags;
6673 sorig->probability = exits[1]->probability;
6674 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6675 snew->probability = exits[0]->probability;
6678 /* Register the new edge from SWITCH_BB in loop exit lists. */
6679 rescan_loop_exit (snew, true, false);
6681 /* Add the PHI node arguments. */
6682 add_phi_args_after_copy (region_copy, n_region, snew);
6684 /* Get rid of now superfluous conditions and associated edges (and phi node
6685 arguments). */
6686 exit_bb = exit->dest;
6688 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6689 PENDING_STMT (e) = NULL;
6691 /* The latch of ORIG_LOOP was copied, and so was the backedge
6692 to the original header. We redirect this backedge to EXIT_BB. */
6693 for (i = 0; i < n_region; i++)
6694 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6696 gcc_assert (single_succ_edge (region_copy[i]));
6697 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6698 PENDING_STMT (e) = NULL;
6699 for (psi = gsi_start_phis (exit_bb);
6700 !gsi_end_p (psi);
6701 gsi_next (&psi))
6703 phi = psi.phi ();
6704 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6705 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6708 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6709 PENDING_STMT (e) = NULL;
6711 /* Anything that is outside of the region, but was dominated by something
6712 inside needs to update dominance info. */
6713 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6714 doms.release ();
6715 /* Update the SSA web. */
6716 update_ssa (TODO_update_ssa);
6718 if (free_region_copy)
6719 free (region_copy);
6721 free_original_copy_tables ();
6722 return true;
6725 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6726 adding blocks when the dominator traversal reaches EXIT. This
6727 function silently assumes that ENTRY strictly dominates EXIT. */
6729 void
6730 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6731 vec<basic_block> *bbs_p)
6733 basic_block son;
6735 for (son = first_dom_son (CDI_DOMINATORS, entry);
6736 son;
6737 son = next_dom_son (CDI_DOMINATORS, son))
6739 bbs_p->safe_push (son);
6740 if (son != exit)
6741 gather_blocks_in_sese_region (son, exit, bbs_p);
6745 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6746 The duplicates are recorded in VARS_MAP. */
6748 static void
6749 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6750 tree to_context)
6752 tree t = *tp, new_t;
6753 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6755 if (DECL_CONTEXT (t) == to_context)
6756 return;
6758 bool existed;
6759 tree &loc = vars_map->get_or_insert (t, &existed);
6761 if (!existed)
6763 if (SSA_VAR_P (t))
6765 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6766 add_local_decl (f, new_t);
6768 else
6770 gcc_assert (TREE_CODE (t) == CONST_DECL);
6771 new_t = copy_node (t);
6773 DECL_CONTEXT (new_t) = to_context;
6775 loc = new_t;
6777 else
6778 new_t = loc;
6780 *tp = new_t;
6784 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6785 VARS_MAP maps old ssa names and var_decls to the new ones. */
6787 static tree
6788 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6789 tree to_context)
6791 tree new_name;
6793 gcc_assert (!virtual_operand_p (name));
6795 tree *loc = vars_map->get (name);
6797 if (!loc)
6799 tree decl = SSA_NAME_VAR (name);
6800 if (decl)
6802 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6803 replace_by_duplicate_decl (&decl, vars_map, to_context);
6804 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6805 decl, SSA_NAME_DEF_STMT (name));
6807 else
6808 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6809 name, SSA_NAME_DEF_STMT (name));
6811 /* Now that we've used the def stmt to define new_name, make sure it
6812 doesn't define name anymore. */
6813 SSA_NAME_DEF_STMT (name) = NULL;
6815 vars_map->put (name, new_name);
6817 else
6818 new_name = *loc;
6820 return new_name;
6823 struct move_stmt_d
6825 tree orig_block;
6826 tree new_block;
6827 tree from_context;
6828 tree to_context;
6829 hash_map<tree, tree> *vars_map;
6830 htab_t new_label_map;
6831 hash_map<void *, void *> *eh_map;
6832 bool remap_decls_p;
6835 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6836 contained in *TP if it has been ORIG_BLOCK previously and change the
6837 DECL_CONTEXT of every local variable referenced in *TP. */
6839 static tree
6840 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6842 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6843 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6844 tree t = *tp;
6846 if (EXPR_P (t))
6848 tree block = TREE_BLOCK (t);
6849 if (block == NULL_TREE)
6851 else if (block == p->orig_block
6852 || p->orig_block == NULL_TREE)
6854 /* tree_node_can_be_shared says we can share invariant
6855 addresses but unshare_expr copies them anyways. Make sure
6856 to unshare before adjusting the block in place - we do not
6857 always see a copy here. */
6858 if (TREE_CODE (t) == ADDR_EXPR
6859 && is_gimple_min_invariant (t))
6860 *tp = t = unshare_expr (t);
6861 TREE_SET_BLOCK (t, p->new_block);
6863 else if (flag_checking)
6865 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6866 block = BLOCK_SUPERCONTEXT (block);
6867 gcc_assert (block == p->orig_block);
6870 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6872 if (TREE_CODE (t) == SSA_NAME)
6873 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6874 else if (TREE_CODE (t) == PARM_DECL
6875 && gimple_in_ssa_p (cfun))
6876 *tp = *(p->vars_map->get (t));
6877 else if (TREE_CODE (t) == LABEL_DECL)
6879 if (p->new_label_map)
6881 struct tree_map in, *out;
6882 in.base.from = t;
6883 out = (struct tree_map *)
6884 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6885 if (out)
6886 *tp = t = out->to;
6889 /* For FORCED_LABELs we can end up with references from other
6890 functions if some SESE regions are outlined. It is UB to
6891 jump in between them, but they could be used just for printing
6892 addresses etc. In that case, DECL_CONTEXT on the label should
6893 be the function containing the glabel stmt with that LABEL_DECL,
6894 rather than whatever function a reference to the label was seen
6895 last time. */
6896 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6897 DECL_CONTEXT (t) = p->to_context;
6899 else if (p->remap_decls_p)
6901 /* Replace T with its duplicate. T should no longer appear in the
6902 parent function, so this looks wasteful; however, it may appear
6903 in referenced_vars, and more importantly, as virtual operands of
6904 statements, and in alias lists of other variables. It would be
6905 quite difficult to expunge it from all those places. ??? It might
6906 suffice to do this for addressable variables. */
6907 if ((VAR_P (t) && !is_global_var (t))
6908 || TREE_CODE (t) == CONST_DECL)
6909 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6911 *walk_subtrees = 0;
6913 else if (TYPE_P (t))
6914 *walk_subtrees = 0;
6916 return NULL_TREE;
6919 /* Helper for move_stmt_r. Given an EH region number for the source
6920 function, map that to the duplicate EH regio number in the dest. */
6922 static int
6923 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6925 eh_region old_r, new_r;
6927 old_r = get_eh_region_from_number (old_nr);
6928 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6930 return new_r->index;
6933 /* Similar, but operate on INTEGER_CSTs. */
6935 static tree
6936 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6938 int old_nr, new_nr;
6940 old_nr = tree_to_shwi (old_t_nr);
6941 new_nr = move_stmt_eh_region_nr (old_nr, p);
6943 return build_int_cst (integer_type_node, new_nr);
6946 /* Like move_stmt_op, but for gimple statements.
6948 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6949 contained in the current statement in *GSI_P and change the
6950 DECL_CONTEXT of every local variable referenced in the current
6951 statement. */
6953 static tree
6954 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6955 struct walk_stmt_info *wi)
6957 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6958 gimple *stmt = gsi_stmt (*gsi_p);
6959 tree block = gimple_block (stmt);
6961 if (block == p->orig_block
6962 || (p->orig_block == NULL_TREE
6963 && block != NULL_TREE))
6964 gimple_set_block (stmt, p->new_block);
6966 switch (gimple_code (stmt))
6968 case GIMPLE_CALL:
6969 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6971 tree r, fndecl = gimple_call_fndecl (stmt);
6972 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6973 switch (DECL_FUNCTION_CODE (fndecl))
6975 case BUILT_IN_EH_COPY_VALUES:
6976 r = gimple_call_arg (stmt, 1);
6977 r = move_stmt_eh_region_tree_nr (r, p);
6978 gimple_call_set_arg (stmt, 1, r);
6979 /* FALLTHRU */
6981 case BUILT_IN_EH_POINTER:
6982 case BUILT_IN_EH_FILTER:
6983 r = gimple_call_arg (stmt, 0);
6984 r = move_stmt_eh_region_tree_nr (r, p);
6985 gimple_call_set_arg (stmt, 0, r);
6986 break;
6988 default:
6989 break;
6992 break;
6994 case GIMPLE_RESX:
6996 gresx *resx_stmt = as_a <gresx *> (stmt);
6997 int r = gimple_resx_region (resx_stmt);
6998 r = move_stmt_eh_region_nr (r, p);
6999 gimple_resx_set_region (resx_stmt, r);
7001 break;
7003 case GIMPLE_EH_DISPATCH:
7005 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7006 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7007 r = move_stmt_eh_region_nr (r, p);
7008 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7010 break;
7012 case GIMPLE_OMP_RETURN:
7013 case GIMPLE_OMP_CONTINUE:
7014 break;
7016 case GIMPLE_LABEL:
7018 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7019 so that such labels can be referenced from other regions.
7020 Make sure to update it when seeing a GIMPLE_LABEL though,
7021 that is the owner of the label. */
7022 walk_gimple_op (stmt, move_stmt_op, wi);
7023 *handled_ops_p = true;
7024 tree label = gimple_label_label (as_a <glabel *> (stmt));
7025 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7026 DECL_CONTEXT (label) = p->to_context;
7028 break;
7030 default:
7031 if (is_gimple_omp (stmt))
7033 /* Do not remap variables inside OMP directives. Variables
7034 referenced in clauses and directive header belong to the
7035 parent function and should not be moved into the child
7036 function. */
7037 bool save_remap_decls_p = p->remap_decls_p;
7038 p->remap_decls_p = false;
7039 *handled_ops_p = true;
7041 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7042 move_stmt_op, wi);
7044 p->remap_decls_p = save_remap_decls_p;
7046 break;
7049 return NULL_TREE;
7052 /* Move basic block BB from function CFUN to function DEST_FN. The
7053 block is moved out of the original linked list and placed after
7054 block AFTER in the new list. Also, the block is removed from the
7055 original array of blocks and placed in DEST_FN's array of blocks.
7056 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7057 updated to reflect the moved edges.
7059 The local variables are remapped to new instances, VARS_MAP is used
7060 to record the mapping. */
7062 static void
7063 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7064 basic_block after, bool update_edge_count_p,
7065 struct move_stmt_d *d)
7067 struct control_flow_graph *cfg;
7068 edge_iterator ei;
7069 edge e;
7070 gimple_stmt_iterator si;
7071 unsigned old_len, new_len;
7073 /* Remove BB from dominance structures. */
7074 delete_from_dominance_info (CDI_DOMINATORS, bb);
7076 /* Move BB from its current loop to the copy in the new function. */
7077 if (current_loops)
7079 class loop *new_loop = (class loop *)bb->loop_father->aux;
7080 if (new_loop)
7081 bb->loop_father = new_loop;
7084 /* Link BB to the new linked list. */
7085 move_block_after (bb, after);
7087 /* Update the edge count in the corresponding flowgraphs. */
7088 if (update_edge_count_p)
7089 FOR_EACH_EDGE (e, ei, bb->succs)
7091 cfun->cfg->x_n_edges--;
7092 dest_cfun->cfg->x_n_edges++;
7095 /* Remove BB from the original basic block array. */
7096 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7097 cfun->cfg->x_n_basic_blocks--;
7099 /* Grow DEST_CFUN's basic block array if needed. */
7100 cfg = dest_cfun->cfg;
7101 cfg->x_n_basic_blocks++;
7102 if (bb->index >= cfg->x_last_basic_block)
7103 cfg->x_last_basic_block = bb->index + 1;
7105 old_len = vec_safe_length (cfg->x_basic_block_info);
7106 if ((unsigned) cfg->x_last_basic_block >= old_len)
7108 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7109 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7112 (*cfg->x_basic_block_info)[bb->index] = bb;
7114 /* Remap the variables in phi nodes. */
7115 for (gphi_iterator psi = gsi_start_phis (bb);
7116 !gsi_end_p (psi); )
7118 gphi *phi = psi.phi ();
7119 use_operand_p use;
7120 tree op = PHI_RESULT (phi);
7121 ssa_op_iter oi;
7122 unsigned i;
7124 if (virtual_operand_p (op))
7126 /* Remove the phi nodes for virtual operands (alias analysis will be
7127 run for the new function, anyway). But replace all uses that
7128 might be outside of the region we move. */
7129 use_operand_p use_p;
7130 imm_use_iterator iter;
7131 gimple *use_stmt;
7132 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7133 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7134 SET_USE (use_p, SSA_NAME_VAR (op));
7135 remove_phi_node (&psi, true);
7136 continue;
7139 SET_PHI_RESULT (phi,
7140 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7141 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7143 op = USE_FROM_PTR (use);
7144 if (TREE_CODE (op) == SSA_NAME)
7145 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7148 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7150 location_t locus = gimple_phi_arg_location (phi, i);
7151 tree block = LOCATION_BLOCK (locus);
7153 if (locus == UNKNOWN_LOCATION)
7154 continue;
7155 if (d->orig_block == NULL_TREE || block == d->orig_block)
7157 locus = set_block (locus, d->new_block);
7158 gimple_phi_arg_set_location (phi, i, locus);
7162 gsi_next (&psi);
7165 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7167 gimple *stmt = gsi_stmt (si);
7168 struct walk_stmt_info wi;
7170 memset (&wi, 0, sizeof (wi));
7171 wi.info = d;
7172 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7174 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7176 tree label = gimple_label_label (label_stmt);
7177 int uid = LABEL_DECL_UID (label);
7179 gcc_assert (uid > -1);
7181 old_len = vec_safe_length (cfg->x_label_to_block_map);
7182 if (old_len <= (unsigned) uid)
7184 new_len = 3 * uid / 2 + 1;
7185 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7188 (*cfg->x_label_to_block_map)[uid] = bb;
7189 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7191 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7193 if (uid >= dest_cfun->cfg->last_label_uid)
7194 dest_cfun->cfg->last_label_uid = uid + 1;
7197 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7198 remove_stmt_from_eh_lp_fn (cfun, stmt);
7200 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7201 gimple_remove_stmt_histograms (cfun, stmt);
7203 /* We cannot leave any operands allocated from the operand caches of
7204 the current function. */
7205 free_stmt_operands (cfun, stmt);
7206 push_cfun (dest_cfun);
7207 update_stmt (stmt);
7208 pop_cfun ();
7211 FOR_EACH_EDGE (e, ei, bb->succs)
7212 if (e->goto_locus != UNKNOWN_LOCATION)
7214 tree block = LOCATION_BLOCK (e->goto_locus);
7215 if (d->orig_block == NULL_TREE
7216 || block == d->orig_block)
7217 e->goto_locus = set_block (e->goto_locus, d->new_block);
7221 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7222 the outermost EH region. Use REGION as the incoming base EH region.
7223 If there is no single outermost region, return NULL and set *ALL to
7224 true. */
7226 static eh_region
7227 find_outermost_region_in_block (struct function *src_cfun,
7228 basic_block bb, eh_region region,
7229 bool *all)
7231 gimple_stmt_iterator si;
7233 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7235 gimple *stmt = gsi_stmt (si);
7236 eh_region stmt_region;
7237 int lp_nr;
7239 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7240 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7241 if (stmt_region)
7243 if (region == NULL)
7244 region = stmt_region;
7245 else if (stmt_region != region)
7247 region = eh_region_outermost (src_cfun, stmt_region, region);
7248 if (region == NULL)
7250 *all = true;
7251 return NULL;
7257 return region;
7260 static tree
7261 new_label_mapper (tree decl, void *data)
7263 htab_t hash = (htab_t) data;
7264 struct tree_map *m;
7265 void **slot;
7267 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7269 m = XNEW (struct tree_map);
7270 m->hash = DECL_UID (decl);
7271 m->base.from = decl;
7272 m->to = create_artificial_label (UNKNOWN_LOCATION);
7273 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7274 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7275 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7277 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7278 gcc_assert (*slot == NULL);
7280 *slot = m;
7282 return m->to;
7285 /* Tree walker to replace the decls used inside value expressions by
7286 duplicates. */
7288 static tree
7289 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7291 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7293 switch (TREE_CODE (*tp))
7295 case VAR_DECL:
7296 case PARM_DECL:
7297 case RESULT_DECL:
7298 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7299 break;
7300 default:
7301 break;
7304 if (IS_TYPE_OR_DECL_P (*tp))
7305 *walk_subtrees = false;
7307 return NULL;
7310 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7311 subblocks. */
7313 static void
7314 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7315 tree to_context)
7317 tree *tp, t;
7319 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7321 t = *tp;
7322 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7323 continue;
7324 replace_by_duplicate_decl (&t, vars_map, to_context);
7325 if (t != *tp)
7327 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7329 tree x = DECL_VALUE_EXPR (*tp);
7330 struct replace_decls_d rd = { vars_map, to_context };
7331 unshare_expr (x);
7332 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7333 SET_DECL_VALUE_EXPR (t, x);
7334 DECL_HAS_VALUE_EXPR_P (t) = 1;
7336 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7337 *tp = t;
7341 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7342 replace_block_vars_by_duplicates (block, vars_map, to_context);
7345 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7346 from FN1 to FN2. */
7348 static void
7349 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7350 class loop *loop)
7352 /* Discard it from the old loop array. */
7353 (*get_loops (fn1))[loop->num] = NULL;
7355 /* Place it in the new loop array, assigning it a new number. */
7356 loop->num = number_of_loops (fn2);
7357 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7359 /* Recurse to children. */
7360 for (loop = loop->inner; loop; loop = loop->next)
7361 fixup_loop_arrays_after_move (fn1, fn2, loop);
7364 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7365 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7367 DEBUG_FUNCTION void
7368 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7370 basic_block bb;
7371 edge_iterator ei;
7372 edge e;
7373 bitmap bbs = BITMAP_ALLOC (NULL);
7374 int i;
7376 gcc_assert (entry != NULL);
7377 gcc_assert (entry != exit);
7378 gcc_assert (bbs_p != NULL);
7380 gcc_assert (bbs_p->length () > 0);
7382 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7383 bitmap_set_bit (bbs, bb->index);
7385 gcc_assert (bitmap_bit_p (bbs, entry->index));
7386 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7388 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7390 if (bb == entry)
7392 gcc_assert (single_pred_p (entry));
7393 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7395 else
7396 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7398 e = ei_edge (ei);
7399 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7402 if (bb == exit)
7404 gcc_assert (single_succ_p (exit));
7405 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7407 else
7408 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7410 e = ei_edge (ei);
7411 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7415 BITMAP_FREE (bbs);
7418 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7420 bool
7421 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7423 bitmap release_names = (bitmap)data;
7425 if (TREE_CODE (from) != SSA_NAME)
7426 return true;
7428 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7429 return true;
7432 /* Return LOOP_DIST_ALIAS call if present in BB. */
7434 static gimple *
7435 find_loop_dist_alias (basic_block bb)
7437 gimple *g = last_stmt (bb);
7438 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7439 return NULL;
7441 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7442 gsi_prev (&gsi);
7443 if (gsi_end_p (gsi))
7444 return NULL;
7446 g = gsi_stmt (gsi);
7447 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7448 return g;
7449 return NULL;
7452 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7453 to VALUE and update any immediate uses of it's LHS. */
7455 void
7456 fold_loop_internal_call (gimple *g, tree value)
7458 tree lhs = gimple_call_lhs (g);
7459 use_operand_p use_p;
7460 imm_use_iterator iter;
7461 gimple *use_stmt;
7462 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7464 update_call_from_tree (&gsi, value);
7465 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7467 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7468 SET_USE (use_p, value);
7469 update_stmt (use_stmt);
7473 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7474 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7475 single basic block in the original CFG and the new basic block is
7476 returned. DEST_CFUN must not have a CFG yet.
7478 Note that the region need not be a pure SESE region. Blocks inside
7479 the region may contain calls to abort/exit. The only restriction
7480 is that ENTRY_BB should be the only entry point and it must
7481 dominate EXIT_BB.
7483 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7484 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7485 to the new function.
7487 All local variables referenced in the region are assumed to be in
7488 the corresponding BLOCK_VARS and unexpanded variable lists
7489 associated with DEST_CFUN.
7491 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7492 reimplement move_sese_region_to_fn by duplicating the region rather than
7493 moving it. */
7495 basic_block
7496 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7497 basic_block exit_bb, tree orig_block)
7499 vec<basic_block> bbs, dom_bbs;
7500 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7501 basic_block after, bb, *entry_pred, *exit_succ, abb;
7502 struct function *saved_cfun = cfun;
7503 int *entry_flag, *exit_flag;
7504 profile_probability *entry_prob, *exit_prob;
7505 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7506 edge e;
7507 edge_iterator ei;
7508 htab_t new_label_map;
7509 hash_map<void *, void *> *eh_map;
7510 class loop *loop = entry_bb->loop_father;
7511 class loop *loop0 = get_loop (saved_cfun, 0);
7512 struct move_stmt_d d;
7514 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7515 region. */
7516 gcc_assert (entry_bb != exit_bb
7517 && (!exit_bb
7518 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7520 /* Collect all the blocks in the region. Manually add ENTRY_BB
7521 because it won't be added by dfs_enumerate_from. */
7522 bbs.create (0);
7523 bbs.safe_push (entry_bb);
7524 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7526 if (flag_checking)
7527 verify_sese (entry_bb, exit_bb, &bbs);
7529 /* The blocks that used to be dominated by something in BBS will now be
7530 dominated by the new block. */
7531 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7532 bbs.address (),
7533 bbs.length ());
7535 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7536 the predecessor edges to ENTRY_BB and the successor edges to
7537 EXIT_BB so that we can re-attach them to the new basic block that
7538 will replace the region. */
7539 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7540 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7541 entry_flag = XNEWVEC (int, num_entry_edges);
7542 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7543 i = 0;
7544 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7546 entry_prob[i] = e->probability;
7547 entry_flag[i] = e->flags;
7548 entry_pred[i++] = e->src;
7549 remove_edge (e);
7552 if (exit_bb)
7554 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7555 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7556 exit_flag = XNEWVEC (int, num_exit_edges);
7557 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7558 i = 0;
7559 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7561 exit_prob[i] = e->probability;
7562 exit_flag[i] = e->flags;
7563 exit_succ[i++] = e->dest;
7564 remove_edge (e);
7567 else
7569 num_exit_edges = 0;
7570 exit_succ = NULL;
7571 exit_flag = NULL;
7572 exit_prob = NULL;
7575 /* Switch context to the child function to initialize DEST_FN's CFG. */
7576 gcc_assert (dest_cfun->cfg == NULL);
7577 push_cfun (dest_cfun);
7579 init_empty_tree_cfg ();
7581 /* Initialize EH information for the new function. */
7582 eh_map = NULL;
7583 new_label_map = NULL;
7584 if (saved_cfun->eh)
7586 eh_region region = NULL;
7587 bool all = false;
7589 FOR_EACH_VEC_ELT (bbs, i, bb)
7591 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7592 if (all)
7593 break;
7596 init_eh_for_function ();
7597 if (region != NULL || all)
7599 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7600 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7601 new_label_mapper, new_label_map);
7605 /* Initialize an empty loop tree. */
7606 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7607 init_loops_structure (dest_cfun, loops, 1);
7608 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7609 set_loops_for_fn (dest_cfun, loops);
7611 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7613 /* Move the outlined loop tree part. */
7614 num_nodes = bbs.length ();
7615 FOR_EACH_VEC_ELT (bbs, i, bb)
7617 if (bb->loop_father->header == bb)
7619 class loop *this_loop = bb->loop_father;
7620 class loop *outer = loop_outer (this_loop);
7621 if (outer == loop
7622 /* If the SESE region contains some bbs ending with
7623 a noreturn call, those are considered to belong
7624 to the outermost loop in saved_cfun, rather than
7625 the entry_bb's loop_father. */
7626 || outer == loop0)
7628 if (outer != loop)
7629 num_nodes -= this_loop->num_nodes;
7630 flow_loop_tree_node_remove (bb->loop_father);
7631 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7632 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7635 else if (bb->loop_father == loop0 && loop0 != loop)
7636 num_nodes--;
7638 /* Remove loop exits from the outlined region. */
7639 if (loops_for_fn (saved_cfun)->exits)
7640 FOR_EACH_EDGE (e, ei, bb->succs)
7642 struct loops *l = loops_for_fn (saved_cfun);
7643 loop_exit **slot
7644 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7645 NO_INSERT);
7646 if (slot)
7647 l->exits->clear_slot (slot);
7651 /* Adjust the number of blocks in the tree root of the outlined part. */
7652 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7654 /* Setup a mapping to be used by move_block_to_fn. */
7655 loop->aux = current_loops->tree_root;
7656 loop0->aux = current_loops->tree_root;
7658 /* Fix up orig_loop_num. If the block referenced in it has been moved
7659 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7660 class loop *dloop;
7661 signed char *moved_orig_loop_num = NULL;
7662 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7663 if (dloop->orig_loop_num)
7665 if (moved_orig_loop_num == NULL)
7666 moved_orig_loop_num
7667 = XCNEWVEC (signed char, vec_safe_length (larray));
7668 if ((*larray)[dloop->orig_loop_num] != NULL
7669 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7671 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7672 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7673 moved_orig_loop_num[dloop->orig_loop_num]++;
7674 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7676 else
7678 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7679 dloop->orig_loop_num = 0;
7682 pop_cfun ();
7684 if (moved_orig_loop_num)
7686 FOR_EACH_VEC_ELT (bbs, i, bb)
7688 gimple *g = find_loop_dist_alias (bb);
7689 if (g == NULL)
7690 continue;
7692 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7693 gcc_assert (orig_loop_num
7694 && (unsigned) orig_loop_num < vec_safe_length (larray));
7695 if (moved_orig_loop_num[orig_loop_num] == 2)
7697 /* If we have moved both loops with this orig_loop_num into
7698 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7699 too, update the first argument. */
7700 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7701 && (get_loop (saved_cfun, dloop->orig_loop_num)
7702 == NULL));
7703 tree t = build_int_cst (integer_type_node,
7704 (*larray)[dloop->orig_loop_num]->num);
7705 gimple_call_set_arg (g, 0, t);
7706 update_stmt (g);
7707 /* Make sure the following loop will not update it. */
7708 moved_orig_loop_num[orig_loop_num] = 0;
7710 else
7711 /* Otherwise at least one of the loops stayed in saved_cfun.
7712 Remove the LOOP_DIST_ALIAS call. */
7713 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7715 FOR_EACH_BB_FN (bb, saved_cfun)
7717 gimple *g = find_loop_dist_alias (bb);
7718 if (g == NULL)
7719 continue;
7720 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7721 gcc_assert (orig_loop_num
7722 && (unsigned) orig_loop_num < vec_safe_length (larray));
7723 if (moved_orig_loop_num[orig_loop_num])
7724 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7725 of the corresponding loops was moved, remove it. */
7726 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7728 XDELETEVEC (moved_orig_loop_num);
7730 ggc_free (larray);
7732 /* Move blocks from BBS into DEST_CFUN. */
7733 gcc_assert (bbs.length () >= 2);
7734 after = dest_cfun->cfg->x_entry_block_ptr;
7735 hash_map<tree, tree> vars_map;
7737 memset (&d, 0, sizeof (d));
7738 d.orig_block = orig_block;
7739 d.new_block = DECL_INITIAL (dest_cfun->decl);
7740 d.from_context = cfun->decl;
7741 d.to_context = dest_cfun->decl;
7742 d.vars_map = &vars_map;
7743 d.new_label_map = new_label_map;
7744 d.eh_map = eh_map;
7745 d.remap_decls_p = true;
7747 if (gimple_in_ssa_p (cfun))
7748 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7750 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7751 set_ssa_default_def (dest_cfun, arg, narg);
7752 vars_map.put (arg, narg);
7755 FOR_EACH_VEC_ELT (bbs, i, bb)
7757 /* No need to update edge counts on the last block. It has
7758 already been updated earlier when we detached the region from
7759 the original CFG. */
7760 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7761 after = bb;
7764 /* Adjust the maximum clique used. */
7765 dest_cfun->last_clique = saved_cfun->last_clique;
7767 loop->aux = NULL;
7768 loop0->aux = NULL;
7769 /* Loop sizes are no longer correct, fix them up. */
7770 loop->num_nodes -= num_nodes;
7771 for (class loop *outer = loop_outer (loop);
7772 outer; outer = loop_outer (outer))
7773 outer->num_nodes -= num_nodes;
7774 loop0->num_nodes -= bbs.length () - num_nodes;
7776 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7778 class loop *aloop;
7779 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7780 if (aloop != NULL)
7782 if (aloop->simduid)
7784 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7785 d.to_context);
7786 dest_cfun->has_simduid_loops = true;
7788 if (aloop->force_vectorize)
7789 dest_cfun->has_force_vectorize_loops = true;
7793 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7794 if (orig_block)
7796 tree block;
7797 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7798 == NULL_TREE);
7799 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7800 = BLOCK_SUBBLOCKS (orig_block);
7801 for (block = BLOCK_SUBBLOCKS (orig_block);
7802 block; block = BLOCK_CHAIN (block))
7803 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7804 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7807 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7808 &vars_map, dest_cfun->decl);
7810 if (new_label_map)
7811 htab_delete (new_label_map);
7812 if (eh_map)
7813 delete eh_map;
7815 if (gimple_in_ssa_p (cfun))
7817 /* We need to release ssa-names in a defined order, so first find them,
7818 and then iterate in ascending version order. */
7819 bitmap release_names = BITMAP_ALLOC (NULL);
7820 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7821 bitmap_iterator bi;
7822 unsigned i;
7823 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7824 release_ssa_name (ssa_name (i));
7825 BITMAP_FREE (release_names);
7828 /* Rewire the entry and exit blocks. The successor to the entry
7829 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7830 the child function. Similarly, the predecessor of DEST_FN's
7831 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7832 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7833 various CFG manipulation function get to the right CFG.
7835 FIXME, this is silly. The CFG ought to become a parameter to
7836 these helpers. */
7837 push_cfun (dest_cfun);
7838 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7839 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7840 if (exit_bb)
7842 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7843 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7845 else
7846 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7847 pop_cfun ();
7849 /* Back in the original function, the SESE region has disappeared,
7850 create a new basic block in its place. */
7851 bb = create_empty_bb (entry_pred[0]);
7852 if (current_loops)
7853 add_bb_to_loop (bb, loop);
7854 for (i = 0; i < num_entry_edges; i++)
7856 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7857 e->probability = entry_prob[i];
7860 for (i = 0; i < num_exit_edges; i++)
7862 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7863 e->probability = exit_prob[i];
7866 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7867 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7868 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7869 dom_bbs.release ();
7871 if (exit_bb)
7873 free (exit_prob);
7874 free (exit_flag);
7875 free (exit_succ);
7877 free (entry_prob);
7878 free (entry_flag);
7879 free (entry_pred);
7880 bbs.release ();
7882 return bb;
7885 /* Dump default def DEF to file FILE using FLAGS and indentation
7886 SPC. */
7888 static void
7889 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7891 for (int i = 0; i < spc; ++i)
7892 fprintf (file, " ");
7893 dump_ssaname_info_to_file (file, def, spc);
7895 print_generic_expr (file, TREE_TYPE (def), flags);
7896 fprintf (file, " ");
7897 print_generic_expr (file, def, flags);
7898 fprintf (file, " = ");
7899 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7900 fprintf (file, ";\n");
7903 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7905 static void
7906 print_no_sanitize_attr_value (FILE *file, tree value)
7908 unsigned int flags = tree_to_uhwi (value);
7909 bool first = true;
7910 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7912 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7914 if (!first)
7915 fprintf (file, " | ");
7916 fprintf (file, "%s", sanitizer_opts[i].name);
7917 first = false;
7922 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7925 void
7926 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7928 tree arg, var, old_current_fndecl = current_function_decl;
7929 struct function *dsf;
7930 bool ignore_topmost_bind = false, any_var = false;
7931 basic_block bb;
7932 tree chain;
7933 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7934 && decl_is_tm_clone (fndecl));
7935 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7937 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7939 fprintf (file, "__attribute__((");
7941 bool first = true;
7942 tree chain;
7943 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7944 first = false, chain = TREE_CHAIN (chain))
7946 if (!first)
7947 fprintf (file, ", ");
7949 tree name = get_attribute_name (chain);
7950 print_generic_expr (file, name, dump_flags);
7951 if (TREE_VALUE (chain) != NULL_TREE)
7953 fprintf (file, " (");
7955 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7956 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7957 else
7958 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7959 fprintf (file, ")");
7963 fprintf (file, "))\n");
7966 current_function_decl = fndecl;
7967 if (flags & TDF_GIMPLE)
7969 static bool hotness_bb_param_printed = false;
7970 if (profile_info != NULL
7971 && !hotness_bb_param_printed)
7973 hotness_bb_param_printed = true;
7974 fprintf (file,
7975 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
7976 " */\n", get_hot_bb_threshold ());
7979 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7980 dump_flags | TDF_SLIM);
7981 fprintf (file, " __GIMPLE (%s",
7982 (fun->curr_properties & PROP_ssa) ? "ssa"
7983 : (fun->curr_properties & PROP_cfg) ? "cfg"
7984 : "");
7986 if (cfun->cfg)
7988 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7989 if (bb->count.initialized_p ())
7990 fprintf (file, ",%s(%d)",
7991 profile_quality_as_string (bb->count.quality ()),
7992 bb->count.value ());
7993 fprintf (file, ")\n%s (", function_name (fun));
7996 else
7997 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7999 arg = DECL_ARGUMENTS (fndecl);
8000 while (arg)
8002 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8003 fprintf (file, " ");
8004 print_generic_expr (file, arg, dump_flags);
8005 if (DECL_CHAIN (arg))
8006 fprintf (file, ", ");
8007 arg = DECL_CHAIN (arg);
8009 fprintf (file, ")\n");
8011 dsf = DECL_STRUCT_FUNCTION (fndecl);
8012 if (dsf && (flags & TDF_EH))
8013 dump_eh_tree (file, dsf);
8015 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8017 dump_node (fndecl, TDF_SLIM | flags, file);
8018 current_function_decl = old_current_fndecl;
8019 return;
8022 /* When GIMPLE is lowered, the variables are no longer available in
8023 BIND_EXPRs, so display them separately. */
8024 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8026 unsigned ix;
8027 ignore_topmost_bind = true;
8029 fprintf (file, "{\n");
8030 if (gimple_in_ssa_p (fun)
8031 && (flags & TDF_ALIAS))
8033 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8034 arg = DECL_CHAIN (arg))
8036 tree def = ssa_default_def (fun, arg);
8037 if (def)
8038 dump_default_def (file, def, 2, flags);
8041 tree res = DECL_RESULT (fun->decl);
8042 if (res != NULL_TREE
8043 && DECL_BY_REFERENCE (res))
8045 tree def = ssa_default_def (fun, res);
8046 if (def)
8047 dump_default_def (file, def, 2, flags);
8050 tree static_chain = fun->static_chain_decl;
8051 if (static_chain != NULL_TREE)
8053 tree def = ssa_default_def (fun, static_chain);
8054 if (def)
8055 dump_default_def (file, def, 2, flags);
8059 if (!vec_safe_is_empty (fun->local_decls))
8060 FOR_EACH_LOCAL_DECL (fun, ix, var)
8062 print_generic_decl (file, var, flags);
8063 fprintf (file, "\n");
8065 any_var = true;
8068 tree name;
8070 if (gimple_in_ssa_p (cfun))
8071 FOR_EACH_SSA_NAME (ix, name, cfun)
8073 if (!SSA_NAME_VAR (name))
8075 fprintf (file, " ");
8076 print_generic_expr (file, TREE_TYPE (name), flags);
8077 fprintf (file, " ");
8078 print_generic_expr (file, name, flags);
8079 fprintf (file, ";\n");
8081 any_var = true;
8086 if (fun && fun->decl == fndecl
8087 && fun->cfg
8088 && basic_block_info_for_fn (fun))
8090 /* If the CFG has been built, emit a CFG-based dump. */
8091 if (!ignore_topmost_bind)
8092 fprintf (file, "{\n");
8094 if (any_var && n_basic_blocks_for_fn (fun))
8095 fprintf (file, "\n");
8097 FOR_EACH_BB_FN (bb, fun)
8098 dump_bb (file, bb, 2, flags);
8100 fprintf (file, "}\n");
8102 else if (fun->curr_properties & PROP_gimple_any)
8104 /* The function is now in GIMPLE form but the CFG has not been
8105 built yet. Emit the single sequence of GIMPLE statements
8106 that make up its body. */
8107 gimple_seq body = gimple_body (fndecl);
8109 if (gimple_seq_first_stmt (body)
8110 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8111 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8112 print_gimple_seq (file, body, 0, flags);
8113 else
8115 if (!ignore_topmost_bind)
8116 fprintf (file, "{\n");
8118 if (any_var)
8119 fprintf (file, "\n");
8121 print_gimple_seq (file, body, 2, flags);
8122 fprintf (file, "}\n");
8125 else
8127 int indent;
8129 /* Make a tree based dump. */
8130 chain = DECL_SAVED_TREE (fndecl);
8131 if (chain && TREE_CODE (chain) == BIND_EXPR)
8133 if (ignore_topmost_bind)
8135 chain = BIND_EXPR_BODY (chain);
8136 indent = 2;
8138 else
8139 indent = 0;
8141 else
8143 if (!ignore_topmost_bind)
8145 fprintf (file, "{\n");
8146 /* No topmost bind, pretend it's ignored for later. */
8147 ignore_topmost_bind = true;
8149 indent = 2;
8152 if (any_var)
8153 fprintf (file, "\n");
8155 print_generic_stmt_indented (file, chain, flags, indent);
8156 if (ignore_topmost_bind)
8157 fprintf (file, "}\n");
8160 if (flags & TDF_ENUMERATE_LOCALS)
8161 dump_enumerated_decls (file, flags);
8162 fprintf (file, "\n\n");
8164 current_function_decl = old_current_fndecl;
8167 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8169 DEBUG_FUNCTION void
8170 debug_function (tree fn, dump_flags_t flags)
8172 dump_function_to_file (fn, stderr, flags);
8176 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8178 static void
8179 print_pred_bbs (FILE *file, basic_block bb)
8181 edge e;
8182 edge_iterator ei;
8184 FOR_EACH_EDGE (e, ei, bb->preds)
8185 fprintf (file, "bb_%d ", e->src->index);
8189 /* Print on FILE the indexes for the successors of basic_block BB. */
8191 static void
8192 print_succ_bbs (FILE *file, basic_block bb)
8194 edge e;
8195 edge_iterator ei;
8197 FOR_EACH_EDGE (e, ei, bb->succs)
8198 fprintf (file, "bb_%d ", e->dest->index);
8201 /* Print to FILE the basic block BB following the VERBOSITY level. */
8203 void
8204 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8206 char *s_indent = (char *) alloca ((size_t) indent + 1);
8207 memset ((void *) s_indent, ' ', (size_t) indent);
8208 s_indent[indent] = '\0';
8210 /* Print basic_block's header. */
8211 if (verbosity >= 2)
8213 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8214 print_pred_bbs (file, bb);
8215 fprintf (file, "}, succs = {");
8216 print_succ_bbs (file, bb);
8217 fprintf (file, "})\n");
8220 /* Print basic_block's body. */
8221 if (verbosity >= 3)
8223 fprintf (file, "%s {\n", s_indent);
8224 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8225 fprintf (file, "%s }\n", s_indent);
8229 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8231 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8232 VERBOSITY level this outputs the contents of the loop, or just its
8233 structure. */
8235 static void
8236 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8238 char *s_indent;
8239 basic_block bb;
8241 if (loop == NULL)
8242 return;
8244 s_indent = (char *) alloca ((size_t) indent + 1);
8245 memset ((void *) s_indent, ' ', (size_t) indent);
8246 s_indent[indent] = '\0';
8248 /* Print loop's header. */
8249 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8250 if (loop->header)
8251 fprintf (file, "header = %d", loop->header->index);
8252 else
8254 fprintf (file, "deleted)\n");
8255 return;
8257 if (loop->latch)
8258 fprintf (file, ", latch = %d", loop->latch->index);
8259 else
8260 fprintf (file, ", multiple latches");
8261 fprintf (file, ", niter = ");
8262 print_generic_expr (file, loop->nb_iterations);
8264 if (loop->any_upper_bound)
8266 fprintf (file, ", upper_bound = ");
8267 print_decu (loop->nb_iterations_upper_bound, file);
8269 if (loop->any_likely_upper_bound)
8271 fprintf (file, ", likely_upper_bound = ");
8272 print_decu (loop->nb_iterations_likely_upper_bound, file);
8275 if (loop->any_estimate)
8277 fprintf (file, ", estimate = ");
8278 print_decu (loop->nb_iterations_estimate, file);
8280 if (loop->unroll)
8281 fprintf (file, ", unroll = %d", loop->unroll);
8282 fprintf (file, ")\n");
8284 /* Print loop's body. */
8285 if (verbosity >= 1)
8287 fprintf (file, "%s{\n", s_indent);
8288 FOR_EACH_BB_FN (bb, cfun)
8289 if (bb->loop_father == loop)
8290 print_loops_bb (file, bb, indent, verbosity);
8292 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8293 fprintf (file, "%s}\n", s_indent);
8297 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8298 spaces. Following VERBOSITY level this outputs the contents of the
8299 loop, or just its structure. */
8301 static void
8302 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8303 int verbosity)
8305 if (loop == NULL)
8306 return;
8308 print_loop (file, loop, indent, verbosity);
8309 print_loop_and_siblings (file, loop->next, indent, verbosity);
8312 /* Follow a CFG edge from the entry point of the program, and on entry
8313 of a loop, pretty print the loop structure on FILE. */
8315 void
8316 print_loops (FILE *file, int verbosity)
8318 basic_block bb;
8320 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8321 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8322 if (bb && bb->loop_father)
8323 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8326 /* Dump a loop. */
8328 DEBUG_FUNCTION void
8329 debug (class loop &ref)
8331 print_loop (stderr, &ref, 0, /*verbosity*/0);
8334 DEBUG_FUNCTION void
8335 debug (class loop *ptr)
8337 if (ptr)
8338 debug (*ptr);
8339 else
8340 fprintf (stderr, "<nil>\n");
8343 /* Dump a loop verbosely. */
8345 DEBUG_FUNCTION void
8346 debug_verbose (class loop &ref)
8348 print_loop (stderr, &ref, 0, /*verbosity*/3);
8351 DEBUG_FUNCTION void
8352 debug_verbose (class loop *ptr)
8354 if (ptr)
8355 debug (*ptr);
8356 else
8357 fprintf (stderr, "<nil>\n");
8361 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8363 DEBUG_FUNCTION void
8364 debug_loops (int verbosity)
8366 print_loops (stderr, verbosity);
8369 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8371 DEBUG_FUNCTION void
8372 debug_loop (class loop *loop, int verbosity)
8374 print_loop (stderr, loop, 0, verbosity);
8377 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8378 level. */
8380 DEBUG_FUNCTION void
8381 debug_loop_num (unsigned num, int verbosity)
8383 debug_loop (get_loop (cfun, num), verbosity);
8386 /* Return true if BB ends with a call, possibly followed by some
8387 instructions that must stay with the call. Return false,
8388 otherwise. */
8390 static bool
8391 gimple_block_ends_with_call_p (basic_block bb)
8393 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8394 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8398 /* Return true if BB ends with a conditional branch. Return false,
8399 otherwise. */
8401 static bool
8402 gimple_block_ends_with_condjump_p (const_basic_block bb)
8404 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8405 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8409 /* Return true if statement T may terminate execution of BB in ways not
8410 explicitly represtented in the CFG. */
8412 bool
8413 stmt_can_terminate_bb_p (gimple *t)
8415 tree fndecl = NULL_TREE;
8416 int call_flags = 0;
8418 /* Eh exception not handled internally terminates execution of the whole
8419 function. */
8420 if (stmt_can_throw_external (cfun, t))
8421 return true;
8423 /* NORETURN and LONGJMP calls already have an edge to exit.
8424 CONST and PURE calls do not need one.
8425 We don't currently check for CONST and PURE here, although
8426 it would be a good idea, because those attributes are
8427 figured out from the RTL in mark_constant_function, and
8428 the counter incrementation code from -fprofile-arcs
8429 leads to different results from -fbranch-probabilities. */
8430 if (is_gimple_call (t))
8432 fndecl = gimple_call_fndecl (t);
8433 call_flags = gimple_call_flags (t);
8436 if (is_gimple_call (t)
8437 && fndecl
8438 && fndecl_built_in_p (fndecl)
8439 && (call_flags & ECF_NOTHROW)
8440 && !(call_flags & ECF_RETURNS_TWICE)
8441 /* fork() doesn't really return twice, but the effect of
8442 wrapping it in __gcov_fork() which calls __gcov_dump() and
8443 __gcov_reset() and clears the counters before forking has the same
8444 effect as returning twice. Force a fake edge. */
8445 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8446 return false;
8448 if (is_gimple_call (t))
8450 edge_iterator ei;
8451 edge e;
8452 basic_block bb;
8454 if (call_flags & (ECF_PURE | ECF_CONST)
8455 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8456 return false;
8458 /* Function call may do longjmp, terminate program or do other things.
8459 Special case noreturn that have non-abnormal edges out as in this case
8460 the fact is sufficiently represented by lack of edges out of T. */
8461 if (!(call_flags & ECF_NORETURN))
8462 return true;
8464 bb = gimple_bb (t);
8465 FOR_EACH_EDGE (e, ei, bb->succs)
8466 if ((e->flags & EDGE_FAKE) == 0)
8467 return true;
8470 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8471 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8472 return true;
8474 return false;
8478 /* Add fake edges to the function exit for any non constant and non
8479 noreturn calls (or noreturn calls with EH/abnormal edges),
8480 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8481 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8482 that were split.
8484 The goal is to expose cases in which entering a basic block does
8485 not imply that all subsequent instructions must be executed. */
8487 static int
8488 gimple_flow_call_edges_add (sbitmap blocks)
8490 int i;
8491 int blocks_split = 0;
8492 int last_bb = last_basic_block_for_fn (cfun);
8493 bool check_last_block = false;
8495 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8496 return 0;
8498 if (! blocks)
8499 check_last_block = true;
8500 else
8501 check_last_block = bitmap_bit_p (blocks,
8502 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8504 /* In the last basic block, before epilogue generation, there will be
8505 a fallthru edge to EXIT. Special care is required if the last insn
8506 of the last basic block is a call because make_edge folds duplicate
8507 edges, which would result in the fallthru edge also being marked
8508 fake, which would result in the fallthru edge being removed by
8509 remove_fake_edges, which would result in an invalid CFG.
8511 Moreover, we can't elide the outgoing fake edge, since the block
8512 profiler needs to take this into account in order to solve the minimal
8513 spanning tree in the case that the call doesn't return.
8515 Handle this by adding a dummy instruction in a new last basic block. */
8516 if (check_last_block)
8518 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8519 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8520 gimple *t = NULL;
8522 if (!gsi_end_p (gsi))
8523 t = gsi_stmt (gsi);
8525 if (t && stmt_can_terminate_bb_p (t))
8527 edge e;
8529 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8530 if (e)
8532 gsi_insert_on_edge (e, gimple_build_nop ());
8533 gsi_commit_edge_inserts ();
8538 /* Now add fake edges to the function exit for any non constant
8539 calls since there is no way that we can determine if they will
8540 return or not... */
8541 for (i = 0; i < last_bb; i++)
8543 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8544 gimple_stmt_iterator gsi;
8545 gimple *stmt, *last_stmt;
8547 if (!bb)
8548 continue;
8550 if (blocks && !bitmap_bit_p (blocks, i))
8551 continue;
8553 gsi = gsi_last_nondebug_bb (bb);
8554 if (!gsi_end_p (gsi))
8556 last_stmt = gsi_stmt (gsi);
8559 stmt = gsi_stmt (gsi);
8560 if (stmt_can_terminate_bb_p (stmt))
8562 edge e;
8564 /* The handling above of the final block before the
8565 epilogue should be enough to verify that there is
8566 no edge to the exit block in CFG already.
8567 Calling make_edge in such case would cause us to
8568 mark that edge as fake and remove it later. */
8569 if (flag_checking && stmt == last_stmt)
8571 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8572 gcc_assert (e == NULL);
8575 /* Note that the following may create a new basic block
8576 and renumber the existing basic blocks. */
8577 if (stmt != last_stmt)
8579 e = split_block (bb, stmt);
8580 if (e)
8581 blocks_split++;
8583 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8584 e->probability = profile_probability::guessed_never ();
8586 gsi_prev (&gsi);
8588 while (!gsi_end_p (gsi));
8592 if (blocks_split)
8593 checking_verify_flow_info ();
8595 return blocks_split;
8598 /* Removes edge E and all the blocks dominated by it, and updates dominance
8599 information. The IL in E->src needs to be updated separately.
8600 If dominance info is not available, only the edge E is removed.*/
8602 void
8603 remove_edge_and_dominated_blocks (edge e)
8605 vec<basic_block> bbs_to_remove = vNULL;
8606 vec<basic_block> bbs_to_fix_dom = vNULL;
8607 edge f;
8608 edge_iterator ei;
8609 bool none_removed = false;
8610 unsigned i;
8611 basic_block bb, dbb;
8612 bitmap_iterator bi;
8614 /* If we are removing a path inside a non-root loop that may change
8615 loop ownership of blocks or remove loops. Mark loops for fixup. */
8616 if (current_loops
8617 && loop_outer (e->src->loop_father) != NULL
8618 && e->src->loop_father == e->dest->loop_father)
8619 loops_state_set (LOOPS_NEED_FIXUP);
8621 if (!dom_info_available_p (CDI_DOMINATORS))
8623 remove_edge (e);
8624 return;
8627 /* No updating is needed for edges to exit. */
8628 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8630 if (cfgcleanup_altered_bbs)
8631 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8632 remove_edge (e);
8633 return;
8636 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8637 that is not dominated by E->dest, then this set is empty. Otherwise,
8638 all the basic blocks dominated by E->dest are removed.
8640 Also, to DF_IDOM we store the immediate dominators of the blocks in
8641 the dominance frontier of E (i.e., of the successors of the
8642 removed blocks, if there are any, and of E->dest otherwise). */
8643 FOR_EACH_EDGE (f, ei, e->dest->preds)
8645 if (f == e)
8646 continue;
8648 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8650 none_removed = true;
8651 break;
8655 auto_bitmap df, df_idom;
8656 if (none_removed)
8657 bitmap_set_bit (df_idom,
8658 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8659 else
8661 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8662 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8664 FOR_EACH_EDGE (f, ei, bb->succs)
8666 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8667 bitmap_set_bit (df, f->dest->index);
8670 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8671 bitmap_clear_bit (df, bb->index);
8673 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8675 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8676 bitmap_set_bit (df_idom,
8677 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8681 if (cfgcleanup_altered_bbs)
8683 /* Record the set of the altered basic blocks. */
8684 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8685 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8688 /* Remove E and the cancelled blocks. */
8689 if (none_removed)
8690 remove_edge (e);
8691 else
8693 /* Walk backwards so as to get a chance to substitute all
8694 released DEFs into debug stmts. See
8695 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8696 details. */
8697 for (i = bbs_to_remove.length (); i-- > 0; )
8698 delete_basic_block (bbs_to_remove[i]);
8701 /* Update the dominance information. The immediate dominator may change only
8702 for blocks whose immediate dominator belongs to DF_IDOM:
8704 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8705 removal. Let Z the arbitrary block such that idom(Z) = Y and
8706 Z dominates X after the removal. Before removal, there exists a path P
8707 from Y to X that avoids Z. Let F be the last edge on P that is
8708 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8709 dominates W, and because of P, Z does not dominate W), and W belongs to
8710 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8711 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8713 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8714 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8715 dbb;
8716 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8717 bbs_to_fix_dom.safe_push (dbb);
8720 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8722 bbs_to_remove.release ();
8723 bbs_to_fix_dom.release ();
8726 /* Purge dead EH edges from basic block BB. */
8728 bool
8729 gimple_purge_dead_eh_edges (basic_block bb)
8731 bool changed = false;
8732 edge e;
8733 edge_iterator ei;
8734 gimple *stmt = last_stmt (bb);
8736 if (stmt && stmt_can_throw_internal (cfun, stmt))
8737 return false;
8739 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8741 if (e->flags & EDGE_EH)
8743 remove_edge_and_dominated_blocks (e);
8744 changed = true;
8746 else
8747 ei_next (&ei);
8750 return changed;
8753 /* Purge dead EH edges from basic block listed in BLOCKS. */
8755 bool
8756 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8758 bool changed = false;
8759 unsigned i;
8760 bitmap_iterator bi;
8762 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8764 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8766 /* Earlier gimple_purge_dead_eh_edges could have removed
8767 this basic block already. */
8768 gcc_assert (bb || changed);
8769 if (bb != NULL)
8770 changed |= gimple_purge_dead_eh_edges (bb);
8773 return changed;
8776 /* Purge dead abnormal call edges from basic block BB. */
8778 bool
8779 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8781 bool changed = false;
8782 edge e;
8783 edge_iterator ei;
8784 gimple *stmt = last_stmt (bb);
8786 if (!cfun->has_nonlocal_label
8787 && !cfun->calls_setjmp)
8788 return false;
8790 if (stmt && stmt_can_make_abnormal_goto (stmt))
8791 return false;
8793 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8795 if (e->flags & EDGE_ABNORMAL)
8797 if (e->flags & EDGE_FALLTHRU)
8798 e->flags &= ~EDGE_ABNORMAL;
8799 else
8800 remove_edge_and_dominated_blocks (e);
8801 changed = true;
8803 else
8804 ei_next (&ei);
8807 return changed;
8810 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8812 bool
8813 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8815 bool changed = false;
8816 unsigned i;
8817 bitmap_iterator bi;
8819 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8821 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8823 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8824 this basic block already. */
8825 gcc_assert (bb || changed);
8826 if (bb != NULL)
8827 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8830 return changed;
8833 /* This function is called whenever a new edge is created or
8834 redirected. */
8836 static void
8837 gimple_execute_on_growing_pred (edge e)
8839 basic_block bb = e->dest;
8841 if (!gimple_seq_empty_p (phi_nodes (bb)))
8842 reserve_phi_args_for_new_edge (bb);
8845 /* This function is called immediately before edge E is removed from
8846 the edge vector E->dest->preds. */
8848 static void
8849 gimple_execute_on_shrinking_pred (edge e)
8851 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8852 remove_phi_args (e);
8855 /*---------------------------------------------------------------------------
8856 Helper functions for Loop versioning
8857 ---------------------------------------------------------------------------*/
8859 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8860 of 'first'. Both of them are dominated by 'new_head' basic block. When
8861 'new_head' was created by 'second's incoming edge it received phi arguments
8862 on the edge by split_edge(). Later, additional edge 'e' was created to
8863 connect 'new_head' and 'first'. Now this routine adds phi args on this
8864 additional edge 'e' that new_head to second edge received as part of edge
8865 splitting. */
8867 static void
8868 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8869 basic_block new_head, edge e)
8871 gphi *phi1, *phi2;
8872 gphi_iterator psi1, psi2;
8873 tree def;
8874 edge e2 = find_edge (new_head, second);
8876 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8877 edge, we should always have an edge from NEW_HEAD to SECOND. */
8878 gcc_assert (e2 != NULL);
8880 /* Browse all 'second' basic block phi nodes and add phi args to
8881 edge 'e' for 'first' head. PHI args are always in correct order. */
8883 for (psi2 = gsi_start_phis (second),
8884 psi1 = gsi_start_phis (first);
8885 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8886 gsi_next (&psi2), gsi_next (&psi1))
8888 phi1 = psi1.phi ();
8889 phi2 = psi2.phi ();
8890 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8891 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8896 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8897 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8898 the destination of the ELSE part. */
8900 static void
8901 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8902 basic_block second_head ATTRIBUTE_UNUSED,
8903 basic_block cond_bb, void *cond_e)
8905 gimple_stmt_iterator gsi;
8906 gimple *new_cond_expr;
8907 tree cond_expr = (tree) cond_e;
8908 edge e0;
8910 /* Build new conditional expr */
8911 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8912 NULL_TREE, NULL_TREE);
8914 /* Add new cond in cond_bb. */
8915 gsi = gsi_last_bb (cond_bb);
8916 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8918 /* Adjust edges appropriately to connect new head with first head
8919 as well as second head. */
8920 e0 = single_succ_edge (cond_bb);
8921 e0->flags &= ~EDGE_FALLTHRU;
8922 e0->flags |= EDGE_FALSE_VALUE;
8926 /* Do book-keeping of basic block BB for the profile consistency checker.
8927 Store the counting in RECORD. */
8928 static void
8929 gimple_account_profile_record (basic_block bb,
8930 struct profile_record *record)
8932 gimple_stmt_iterator i;
8933 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8935 record->size
8936 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8937 if (bb->count.initialized_p ())
8938 record->time
8939 += estimate_num_insns (gsi_stmt (i),
8940 &eni_time_weights) * bb->count.to_gcov_type ();
8941 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8942 record->time
8943 += estimate_num_insns (gsi_stmt (i),
8944 &eni_time_weights) * bb->count.to_frequency (cfun);
8948 struct cfg_hooks gimple_cfg_hooks = {
8949 "gimple",
8950 gimple_verify_flow_info,
8951 gimple_dump_bb, /* dump_bb */
8952 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8953 create_bb, /* create_basic_block */
8954 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8955 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8956 gimple_can_remove_branch_p, /* can_remove_branch_p */
8957 remove_bb, /* delete_basic_block */
8958 gimple_split_block, /* split_block */
8959 gimple_move_block_after, /* move_block_after */
8960 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8961 gimple_merge_blocks, /* merge_blocks */
8962 gimple_predict_edge, /* predict_edge */
8963 gimple_predicted_by_p, /* predicted_by_p */
8964 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8965 gimple_duplicate_bb, /* duplicate_block */
8966 gimple_split_edge, /* split_edge */
8967 gimple_make_forwarder_block, /* make_forward_block */
8968 NULL, /* tidy_fallthru_edge */
8969 NULL, /* force_nonfallthru */
8970 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8971 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8972 gimple_flow_call_edges_add, /* flow_call_edges_add */
8973 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8974 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8975 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8976 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8977 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8978 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8979 flush_pending_stmts, /* flush_pending_stmts */
8980 gimple_empty_block_p, /* block_empty_p */
8981 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8982 gimple_account_profile_record,
8986 /* Split all critical edges. Split some extra (not necessarily critical) edges
8987 if FOR_EDGE_INSERTION_P is true. */
8989 unsigned int
8990 split_critical_edges (bool for_edge_insertion_p /* = false */)
8992 basic_block bb;
8993 edge e;
8994 edge_iterator ei;
8996 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8997 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8998 mappings around the calls to split_edge. */
8999 start_recording_case_labels ();
9000 FOR_ALL_BB_FN (bb, cfun)
9002 FOR_EACH_EDGE (e, ei, bb->succs)
9004 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9005 split_edge (e);
9006 /* PRE inserts statements to edges and expects that
9007 since split_critical_edges was done beforehand, committing edge
9008 insertions will not split more edges. In addition to critical
9009 edges we must split edges that have multiple successors and
9010 end by control flow statements, such as RESX.
9011 Go ahead and split them too. This matches the logic in
9012 gimple_find_edge_insert_loc. */
9013 else if (for_edge_insertion_p
9014 && (!single_pred_p (e->dest)
9015 || !gimple_seq_empty_p (phi_nodes (e->dest))
9016 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9017 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9018 && !(e->flags & EDGE_ABNORMAL))
9020 gimple_stmt_iterator gsi;
9022 gsi = gsi_last_bb (e->src);
9023 if (!gsi_end_p (gsi)
9024 && stmt_ends_bb_p (gsi_stmt (gsi))
9025 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9026 && !gimple_call_builtin_p (gsi_stmt (gsi),
9027 BUILT_IN_RETURN)))
9028 split_edge (e);
9032 end_recording_case_labels ();
9033 return 0;
9036 namespace {
9038 const pass_data pass_data_split_crit_edges =
9040 GIMPLE_PASS, /* type */
9041 "crited", /* name */
9042 OPTGROUP_NONE, /* optinfo_flags */
9043 TV_TREE_SPLIT_EDGES, /* tv_id */
9044 PROP_cfg, /* properties_required */
9045 PROP_no_crit_edges, /* properties_provided */
9046 0, /* properties_destroyed */
9047 0, /* todo_flags_start */
9048 0, /* todo_flags_finish */
9051 class pass_split_crit_edges : public gimple_opt_pass
9053 public:
9054 pass_split_crit_edges (gcc::context *ctxt)
9055 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9058 /* opt_pass methods: */
9059 virtual unsigned int execute (function *) { return split_critical_edges (); }
9061 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9062 }; // class pass_split_crit_edges
9064 } // anon namespace
9066 gimple_opt_pass *
9067 make_pass_split_crit_edges (gcc::context *ctxt)
9069 return new pass_split_crit_edges (ctxt);
9073 /* Insert COND expression which is GIMPLE_COND after STMT
9074 in basic block BB with appropriate basic block split
9075 and creation of a new conditionally executed basic block.
9076 Update profile so the new bb is visited with probability PROB.
9077 Return created basic block. */
9078 basic_block
9079 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9080 profile_probability prob)
9082 edge fall = split_block (bb, stmt);
9083 gimple_stmt_iterator iter = gsi_last_bb (bb);
9084 basic_block new_bb;
9086 /* Insert cond statement. */
9087 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9088 if (gsi_end_p (iter))
9089 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9090 else
9091 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9093 /* Create conditionally executed block. */
9094 new_bb = create_empty_bb (bb);
9095 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9096 e->probability = prob;
9097 new_bb->count = e->count ();
9098 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9100 /* Fix edge for split bb. */
9101 fall->flags = EDGE_FALSE_VALUE;
9102 fall->probability -= e->probability;
9104 /* Update dominance info. */
9105 if (dom_info_available_p (CDI_DOMINATORS))
9107 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9108 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9111 /* Update loop info. */
9112 if (current_loops)
9113 add_bb_to_loop (new_bb, bb->loop_father);
9115 return new_bb;
9118 /* Build a ternary operation and gimplify it. Emit code before GSI.
9119 Return the gimple_val holding the result. */
9121 tree
9122 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9123 tree type, tree a, tree b, tree c)
9125 tree ret;
9126 location_t loc = gimple_location (gsi_stmt (*gsi));
9128 ret = fold_build3_loc (loc, code, type, a, b, c);
9129 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9130 GSI_SAME_STMT);
9133 /* Build a binary operation and gimplify it. Emit code before GSI.
9134 Return the gimple_val holding the result. */
9136 tree
9137 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9138 tree type, tree a, tree b)
9140 tree ret;
9142 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9143 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9144 GSI_SAME_STMT);
9147 /* Build a unary operation and gimplify it. Emit code before GSI.
9148 Return the gimple_val holding the result. */
9150 tree
9151 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9152 tree a)
9154 tree ret;
9156 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9157 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9158 GSI_SAME_STMT);
9163 /* Given a basic block B which ends with a conditional and has
9164 precisely two successors, determine which of the edges is taken if
9165 the conditional is true and which is taken if the conditional is
9166 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9168 void
9169 extract_true_false_edges_from_block (basic_block b,
9170 edge *true_edge,
9171 edge *false_edge)
9173 edge e = EDGE_SUCC (b, 0);
9175 if (e->flags & EDGE_TRUE_VALUE)
9177 *true_edge = e;
9178 *false_edge = EDGE_SUCC (b, 1);
9180 else
9182 *false_edge = e;
9183 *true_edge = EDGE_SUCC (b, 1);
9188 /* From a controlling predicate in the immediate dominator DOM of
9189 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9190 predicate evaluates to true and false and store them to
9191 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9192 they are non-NULL. Returns true if the edges can be determined,
9193 else return false. */
9195 bool
9196 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9197 edge *true_controlled_edge,
9198 edge *false_controlled_edge)
9200 basic_block bb = phiblock;
9201 edge true_edge, false_edge, tem;
9202 edge e0 = NULL, e1 = NULL;
9204 /* We have to verify that one edge into the PHI node is dominated
9205 by the true edge of the predicate block and the other edge
9206 dominated by the false edge. This ensures that the PHI argument
9207 we are going to take is completely determined by the path we
9208 take from the predicate block.
9209 We can only use BB dominance checks below if the destination of
9210 the true/false edges are dominated by their edge, thus only
9211 have a single predecessor. */
9212 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9213 tem = EDGE_PRED (bb, 0);
9214 if (tem == true_edge
9215 || (single_pred_p (true_edge->dest)
9216 && (tem->src == true_edge->dest
9217 || dominated_by_p (CDI_DOMINATORS,
9218 tem->src, true_edge->dest))))
9219 e0 = tem;
9220 else if (tem == false_edge
9221 || (single_pred_p (false_edge->dest)
9222 && (tem->src == false_edge->dest
9223 || dominated_by_p (CDI_DOMINATORS,
9224 tem->src, false_edge->dest))))
9225 e1 = tem;
9226 else
9227 return false;
9228 tem = EDGE_PRED (bb, 1);
9229 if (tem == true_edge
9230 || (single_pred_p (true_edge->dest)
9231 && (tem->src == true_edge->dest
9232 || dominated_by_p (CDI_DOMINATORS,
9233 tem->src, true_edge->dest))))
9234 e0 = tem;
9235 else if (tem == false_edge
9236 || (single_pred_p (false_edge->dest)
9237 && (tem->src == false_edge->dest
9238 || dominated_by_p (CDI_DOMINATORS,
9239 tem->src, false_edge->dest))))
9240 e1 = tem;
9241 else
9242 return false;
9243 if (!e0 || !e1)
9244 return false;
9246 if (true_controlled_edge)
9247 *true_controlled_edge = e0;
9248 if (false_controlled_edge)
9249 *false_controlled_edge = e1;
9251 return true;
9254 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9255 range [low, high]. Place associated stmts before *GSI. */
9257 void
9258 generate_range_test (basic_block bb, tree index, tree low, tree high,
9259 tree *lhs, tree *rhs)
9261 tree type = TREE_TYPE (index);
9262 tree utype = range_check_type (type);
9264 low = fold_convert (utype, low);
9265 high = fold_convert (utype, high);
9267 gimple_seq seq = NULL;
9268 index = gimple_convert (&seq, utype, index);
9269 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9270 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9272 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9273 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9276 /* Return the basic block that belongs to label numbered INDEX
9277 of a switch statement. */
9279 basic_block
9280 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9282 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9285 /* Return the default basic block of a switch statement. */
9287 basic_block
9288 gimple_switch_default_bb (function *ifun, gswitch *gs)
9290 return gimple_switch_label_bb (ifun, gs, 0);
9293 /* Return the edge that belongs to label numbered INDEX
9294 of a switch statement. */
9296 edge
9297 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9299 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9302 /* Return the default edge of a switch statement. */
9304 edge
9305 gimple_switch_default_edge (function *ifun, gswitch *gs)
9307 return gimple_switch_edge (ifun, gs, 0);
9311 /* Emit return warnings. */
9313 namespace {
9315 const pass_data pass_data_warn_function_return =
9317 GIMPLE_PASS, /* type */
9318 "*warn_function_return", /* name */
9319 OPTGROUP_NONE, /* optinfo_flags */
9320 TV_NONE, /* tv_id */
9321 PROP_cfg, /* properties_required */
9322 0, /* properties_provided */
9323 0, /* properties_destroyed */
9324 0, /* todo_flags_start */
9325 0, /* todo_flags_finish */
9328 class pass_warn_function_return : public gimple_opt_pass
9330 public:
9331 pass_warn_function_return (gcc::context *ctxt)
9332 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9335 /* opt_pass methods: */
9336 virtual unsigned int execute (function *);
9338 }; // class pass_warn_function_return
9340 unsigned int
9341 pass_warn_function_return::execute (function *fun)
9343 location_t location;
9344 gimple *last;
9345 edge e;
9346 edge_iterator ei;
9348 if (!targetm.warn_func_return (fun->decl))
9349 return 0;
9351 /* If we have a path to EXIT, then we do return. */
9352 if (TREE_THIS_VOLATILE (fun->decl)
9353 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9355 location = UNKNOWN_LOCATION;
9356 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9357 (e = ei_safe_edge (ei)); )
9359 last = last_stmt (e->src);
9360 if ((gimple_code (last) == GIMPLE_RETURN
9361 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9362 && location == UNKNOWN_LOCATION
9363 && ((location = LOCATION_LOCUS (gimple_location (last)))
9364 != UNKNOWN_LOCATION)
9365 && !optimize)
9366 break;
9367 /* When optimizing, replace return stmts in noreturn functions
9368 with __builtin_unreachable () call. */
9369 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9371 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9372 gimple *new_stmt = gimple_build_call (fndecl, 0);
9373 gimple_set_location (new_stmt, gimple_location (last));
9374 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9375 gsi_replace (&gsi, new_stmt, true);
9376 remove_edge (e);
9378 else
9379 ei_next (&ei);
9381 if (location == UNKNOWN_LOCATION)
9382 location = cfun->function_end_locus;
9383 warning_at (location, 0, "%<noreturn%> function does return");
9386 /* If we see "return;" in some basic block, then we do reach the end
9387 without returning a value. */
9388 else if (warn_return_type > 0
9389 && !TREE_NO_WARNING (fun->decl)
9390 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9392 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9394 gimple *last = last_stmt (e->src);
9395 greturn *return_stmt = dyn_cast <greturn *> (last);
9396 if (return_stmt
9397 && gimple_return_retval (return_stmt) == NULL
9398 && !gimple_no_warning_p (last))
9400 location = gimple_location (last);
9401 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9402 location = fun->function_end_locus;
9403 if (warning_at (location, OPT_Wreturn_type,
9404 "control reaches end of non-void function"))
9405 TREE_NO_WARNING (fun->decl) = 1;
9406 break;
9409 /* The C++ FE turns fallthrough from the end of non-void function
9410 into __builtin_unreachable () call with BUILTINS_LOCATION.
9411 Recognize those too. */
9412 basic_block bb;
9413 if (!TREE_NO_WARNING (fun->decl))
9414 FOR_EACH_BB_FN (bb, fun)
9415 if (EDGE_COUNT (bb->succs) == 0)
9417 gimple *last = last_stmt (bb);
9418 const enum built_in_function ubsan_missing_ret
9419 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9420 if (last
9421 && ((LOCATION_LOCUS (gimple_location (last))
9422 == BUILTINS_LOCATION
9423 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9424 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9426 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9427 gsi_prev_nondebug (&gsi);
9428 gimple *prev = gsi_stmt (gsi);
9429 if (prev == NULL)
9430 location = UNKNOWN_LOCATION;
9431 else
9432 location = gimple_location (prev);
9433 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9434 location = fun->function_end_locus;
9435 if (warning_at (location, OPT_Wreturn_type,
9436 "control reaches end of non-void function"))
9437 TREE_NO_WARNING (fun->decl) = 1;
9438 break;
9442 return 0;
9445 } // anon namespace
9447 gimple_opt_pass *
9448 make_pass_warn_function_return (gcc::context *ctxt)
9450 return new pass_warn_function_return (ctxt);
9453 /* Walk a gimplified function and warn for functions whose return value is
9454 ignored and attribute((warn_unused_result)) is set. This is done before
9455 inlining, so we don't have to worry about that. */
9457 static void
9458 do_warn_unused_result (gimple_seq seq)
9460 tree fdecl, ftype;
9461 gimple_stmt_iterator i;
9463 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9465 gimple *g = gsi_stmt (i);
9467 switch (gimple_code (g))
9469 case GIMPLE_BIND:
9470 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9471 break;
9472 case GIMPLE_TRY:
9473 do_warn_unused_result (gimple_try_eval (g));
9474 do_warn_unused_result (gimple_try_cleanup (g));
9475 break;
9476 case GIMPLE_CATCH:
9477 do_warn_unused_result (gimple_catch_handler (
9478 as_a <gcatch *> (g)));
9479 break;
9480 case GIMPLE_EH_FILTER:
9481 do_warn_unused_result (gimple_eh_filter_failure (g));
9482 break;
9484 case GIMPLE_CALL:
9485 if (gimple_call_lhs (g))
9486 break;
9487 if (gimple_call_internal_p (g))
9488 break;
9490 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9491 LHS. All calls whose value is ignored should be
9492 represented like this. Look for the attribute. */
9493 fdecl = gimple_call_fndecl (g);
9494 ftype = gimple_call_fntype (g);
9496 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9498 location_t loc = gimple_location (g);
9500 if (fdecl)
9501 warning_at (loc, OPT_Wunused_result,
9502 "ignoring return value of %qD "
9503 "declared with attribute %<warn_unused_result%>",
9504 fdecl);
9505 else
9506 warning_at (loc, OPT_Wunused_result,
9507 "ignoring return value of function "
9508 "declared with attribute %<warn_unused_result%>");
9510 break;
9512 default:
9513 /* Not a container, not a call, or a call whose value is used. */
9514 break;
9519 namespace {
9521 const pass_data pass_data_warn_unused_result =
9523 GIMPLE_PASS, /* type */
9524 "*warn_unused_result", /* name */
9525 OPTGROUP_NONE, /* optinfo_flags */
9526 TV_NONE, /* tv_id */
9527 PROP_gimple_any, /* properties_required */
9528 0, /* properties_provided */
9529 0, /* properties_destroyed */
9530 0, /* todo_flags_start */
9531 0, /* todo_flags_finish */
9534 class pass_warn_unused_result : public gimple_opt_pass
9536 public:
9537 pass_warn_unused_result (gcc::context *ctxt)
9538 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9541 /* opt_pass methods: */
9542 virtual bool gate (function *) { return flag_warn_unused_result; }
9543 virtual unsigned int execute (function *)
9545 do_warn_unused_result (gimple_body (current_function_decl));
9546 return 0;
9549 }; // class pass_warn_unused_result
9551 } // anon namespace
9553 gimple_opt_pass *
9554 make_pass_warn_unused_result (gcc::context *ctxt)
9556 return new pass_warn_unused_result (ctxt);
9559 /* IPA passes, compilation of earlier functions or inlining
9560 might have changed some properties, such as marked functions nothrow,
9561 pure, const or noreturn.
9562 Remove redundant edges and basic blocks, and create new ones if necessary.
9564 This pass can't be executed as stand alone pass from pass manager, because
9565 in between inlining and this fixup the verify_flow_info would fail. */
9567 unsigned int
9568 execute_fixup_cfg (void)
9570 basic_block bb;
9571 gimple_stmt_iterator gsi;
9572 int todo = 0;
9573 cgraph_node *node = cgraph_node::get (current_function_decl);
9574 /* Same scaling is also done by ipa_merge_profiles. */
9575 profile_count num = node->count;
9576 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9577 bool scale = num.initialized_p () && !(num == den);
9579 if (scale)
9581 profile_count::adjust_for_ipa_scaling (&num, &den);
9582 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9583 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9584 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9587 FOR_EACH_BB_FN (bb, cfun)
9589 if (scale)
9590 bb->count = bb->count.apply_scale (num, den);
9591 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9593 gimple *stmt = gsi_stmt (gsi);
9594 tree decl = is_gimple_call (stmt)
9595 ? gimple_call_fndecl (stmt)
9596 : NULL;
9597 if (decl)
9599 int flags = gimple_call_flags (stmt);
9600 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9602 if (gimple_purge_dead_abnormal_call_edges (bb))
9603 todo |= TODO_cleanup_cfg;
9605 if (gimple_in_ssa_p (cfun))
9607 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9608 update_stmt (stmt);
9612 if (flags & ECF_NORETURN
9613 && fixup_noreturn_call (stmt))
9614 todo |= TODO_cleanup_cfg;
9617 /* Remove stores to variables we marked write-only.
9618 Keep access when store has side effect, i.e. in case when source
9619 is volatile. */
9620 if (gimple_store_p (stmt)
9621 && !gimple_has_side_effects (stmt)
9622 && !optimize_debug)
9624 tree lhs = get_base_address (gimple_get_lhs (stmt));
9626 if (VAR_P (lhs)
9627 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9628 && varpool_node::get (lhs)->writeonly)
9630 unlink_stmt_vdef (stmt);
9631 gsi_remove (&gsi, true);
9632 release_defs (stmt);
9633 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9634 continue;
9637 /* For calls we can simply remove LHS when it is known
9638 to be write-only. */
9639 if (is_gimple_call (stmt)
9640 && gimple_get_lhs (stmt))
9642 tree lhs = get_base_address (gimple_get_lhs (stmt));
9644 if (VAR_P (lhs)
9645 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9646 && varpool_node::get (lhs)->writeonly)
9648 gimple_call_set_lhs (stmt, NULL);
9649 update_stmt (stmt);
9650 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9654 if (maybe_clean_eh_stmt (stmt)
9655 && gimple_purge_dead_eh_edges (bb))
9656 todo |= TODO_cleanup_cfg;
9657 gsi_next (&gsi);
9660 /* If we have a basic block with no successors that does not
9661 end with a control statement or a noreturn call end it with
9662 a call to __builtin_unreachable. This situation can occur
9663 when inlining a noreturn call that does in fact return. */
9664 if (EDGE_COUNT (bb->succs) == 0)
9666 gimple *stmt = last_stmt (bb);
9667 if (!stmt
9668 || (!is_ctrl_stmt (stmt)
9669 && (!is_gimple_call (stmt)
9670 || !gimple_call_noreturn_p (stmt))))
9672 if (stmt && is_gimple_call (stmt))
9673 gimple_call_set_ctrl_altering (stmt, false);
9674 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9675 stmt = gimple_build_call (fndecl, 0);
9676 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9677 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9678 if (!cfun->after_inlining)
9680 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9681 node->create_edge (cgraph_node::get_create (fndecl),
9682 call_stmt, bb->count);
9687 if (scale)
9689 update_max_bb_count ();
9690 compute_function_frequency ();
9693 if (current_loops
9694 && (todo & TODO_cleanup_cfg))
9695 loops_state_set (LOOPS_NEED_FIXUP);
9697 return todo;
9700 namespace {
9702 const pass_data pass_data_fixup_cfg =
9704 GIMPLE_PASS, /* type */
9705 "fixup_cfg", /* name */
9706 OPTGROUP_NONE, /* optinfo_flags */
9707 TV_NONE, /* tv_id */
9708 PROP_cfg, /* properties_required */
9709 0, /* properties_provided */
9710 0, /* properties_destroyed */
9711 0, /* todo_flags_start */
9712 0, /* todo_flags_finish */
9715 class pass_fixup_cfg : public gimple_opt_pass
9717 public:
9718 pass_fixup_cfg (gcc::context *ctxt)
9719 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9722 /* opt_pass methods: */
9723 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9724 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9726 }; // class pass_fixup_cfg
9728 } // anon namespace
9730 gimple_opt_pass *
9731 make_pass_fixup_cfg (gcc::context *ctxt)
9733 return new pass_fixup_cfg (ctxt);
9736 /* Garbage collection support for edge_def. */
9738 extern void gt_ggc_mx (tree&);
9739 extern void gt_ggc_mx (gimple *&);
9740 extern void gt_ggc_mx (rtx&);
9741 extern void gt_ggc_mx (basic_block&);
9743 static void
9744 gt_ggc_mx (rtx_insn *& x)
9746 if (x)
9747 gt_ggc_mx_rtx_def ((void *) x);
9750 void
9751 gt_ggc_mx (edge_def *e)
9753 tree block = LOCATION_BLOCK (e->goto_locus);
9754 gt_ggc_mx (e->src);
9755 gt_ggc_mx (e->dest);
9756 if (current_ir_type () == IR_GIMPLE)
9757 gt_ggc_mx (e->insns.g);
9758 else
9759 gt_ggc_mx (e->insns.r);
9760 gt_ggc_mx (block);
9763 /* PCH support for edge_def. */
9765 extern void gt_pch_nx (tree&);
9766 extern void gt_pch_nx (gimple *&);
9767 extern void gt_pch_nx (rtx&);
9768 extern void gt_pch_nx (basic_block&);
9770 static void
9771 gt_pch_nx (rtx_insn *& x)
9773 if (x)
9774 gt_pch_nx_rtx_def ((void *) x);
9777 void
9778 gt_pch_nx (edge_def *e)
9780 tree block = LOCATION_BLOCK (e->goto_locus);
9781 gt_pch_nx (e->src);
9782 gt_pch_nx (e->dest);
9783 if (current_ir_type () == IR_GIMPLE)
9784 gt_pch_nx (e->insns.g);
9785 else
9786 gt_pch_nx (e->insns.r);
9787 gt_pch_nx (block);
9790 void
9791 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9793 tree block = LOCATION_BLOCK (e->goto_locus);
9794 op (&(e->src), cookie);
9795 op (&(e->dest), cookie);
9796 if (current_ir_type () == IR_GIMPLE)
9797 op (&(e->insns.g), cookie);
9798 else
9799 op (&(e->insns.r), cookie);
9800 op (&(block), cookie);
9803 #if CHECKING_P
9805 namespace selftest {
9807 /* Helper function for CFG selftests: create a dummy function decl
9808 and push it as cfun. */
9810 static tree
9811 push_fndecl (const char *name)
9813 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9814 /* FIXME: this uses input_location: */
9815 tree fndecl = build_fn_decl (name, fn_type);
9816 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9817 NULL_TREE, integer_type_node);
9818 DECL_RESULT (fndecl) = retval;
9819 push_struct_function (fndecl);
9820 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9821 ASSERT_TRUE (fun != NULL);
9822 init_empty_tree_cfg_for_function (fun);
9823 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9824 ASSERT_EQ (0, n_edges_for_fn (fun));
9825 return fndecl;
9828 /* These tests directly create CFGs.
9829 Compare with the static fns within tree-cfg.c:
9830 - build_gimple_cfg
9831 - make_blocks: calls create_basic_block (seq, bb);
9832 - make_edges. */
9834 /* Verify a simple cfg of the form:
9835 ENTRY -> A -> B -> C -> EXIT. */
9837 static void
9838 test_linear_chain ()
9840 gimple_register_cfg_hooks ();
9842 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9843 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9845 /* Create some empty blocks. */
9846 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9847 basic_block bb_b = create_empty_bb (bb_a);
9848 basic_block bb_c = create_empty_bb (bb_b);
9850 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9851 ASSERT_EQ (0, n_edges_for_fn (fun));
9853 /* Create some edges: a simple linear chain of BBs. */
9854 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9855 make_edge (bb_a, bb_b, 0);
9856 make_edge (bb_b, bb_c, 0);
9857 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9859 /* Verify the edges. */
9860 ASSERT_EQ (4, n_edges_for_fn (fun));
9861 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9862 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9863 ASSERT_EQ (1, bb_a->preds->length ());
9864 ASSERT_EQ (1, bb_a->succs->length ());
9865 ASSERT_EQ (1, bb_b->preds->length ());
9866 ASSERT_EQ (1, bb_b->succs->length ());
9867 ASSERT_EQ (1, bb_c->preds->length ());
9868 ASSERT_EQ (1, bb_c->succs->length ());
9869 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9870 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9872 /* Verify the dominance information
9873 Each BB in our simple chain should be dominated by the one before
9874 it. */
9875 calculate_dominance_info (CDI_DOMINATORS);
9876 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9877 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9878 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9879 ASSERT_EQ (1, dom_by_b.length ());
9880 ASSERT_EQ (bb_c, dom_by_b[0]);
9881 free_dominance_info (CDI_DOMINATORS);
9882 dom_by_b.release ();
9884 /* Similarly for post-dominance: each BB in our chain is post-dominated
9885 by the one after it. */
9886 calculate_dominance_info (CDI_POST_DOMINATORS);
9887 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9888 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9889 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9890 ASSERT_EQ (1, postdom_by_b.length ());
9891 ASSERT_EQ (bb_a, postdom_by_b[0]);
9892 free_dominance_info (CDI_POST_DOMINATORS);
9893 postdom_by_b.release ();
9895 pop_cfun ();
9898 /* Verify a simple CFG of the form:
9899 ENTRY
9903 /t \f
9909 EXIT. */
9911 static void
9912 test_diamond ()
9914 gimple_register_cfg_hooks ();
9916 tree fndecl = push_fndecl ("cfg_test_diamond");
9917 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9919 /* Create some empty blocks. */
9920 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9921 basic_block bb_b = create_empty_bb (bb_a);
9922 basic_block bb_c = create_empty_bb (bb_a);
9923 basic_block bb_d = create_empty_bb (bb_b);
9925 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9926 ASSERT_EQ (0, n_edges_for_fn (fun));
9928 /* Create the edges. */
9929 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9930 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9931 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9932 make_edge (bb_b, bb_d, 0);
9933 make_edge (bb_c, bb_d, 0);
9934 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9936 /* Verify the edges. */
9937 ASSERT_EQ (6, n_edges_for_fn (fun));
9938 ASSERT_EQ (1, bb_a->preds->length ());
9939 ASSERT_EQ (2, bb_a->succs->length ());
9940 ASSERT_EQ (1, bb_b->preds->length ());
9941 ASSERT_EQ (1, bb_b->succs->length ());
9942 ASSERT_EQ (1, bb_c->preds->length ());
9943 ASSERT_EQ (1, bb_c->succs->length ());
9944 ASSERT_EQ (2, bb_d->preds->length ());
9945 ASSERT_EQ (1, bb_d->succs->length ());
9947 /* Verify the dominance information. */
9948 calculate_dominance_info (CDI_DOMINATORS);
9949 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9950 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9951 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9952 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9953 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9954 dom_by_a.release ();
9955 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9956 ASSERT_EQ (0, dom_by_b.length ());
9957 dom_by_b.release ();
9958 free_dominance_info (CDI_DOMINATORS);
9960 /* Similarly for post-dominance. */
9961 calculate_dominance_info (CDI_POST_DOMINATORS);
9962 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9963 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9964 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9965 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9966 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9967 postdom_by_d.release ();
9968 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9969 ASSERT_EQ (0, postdom_by_b.length ());
9970 postdom_by_b.release ();
9971 free_dominance_info (CDI_POST_DOMINATORS);
9973 pop_cfun ();
9976 /* Verify that we can handle a CFG containing a "complete" aka
9977 fully-connected subgraph (where A B C D below all have edges
9978 pointing to each other node, also to themselves).
9979 e.g.:
9980 ENTRY EXIT
9986 A<--->B
9987 ^^ ^^
9988 | \ / |
9989 | X |
9990 | / \ |
9991 VV VV
9992 C<--->D
9995 static void
9996 test_fully_connected ()
9998 gimple_register_cfg_hooks ();
10000 tree fndecl = push_fndecl ("cfg_fully_connected");
10001 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10003 const int n = 4;
10005 /* Create some empty blocks. */
10006 auto_vec <basic_block> subgraph_nodes;
10007 for (int i = 0; i < n; i++)
10008 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10010 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10011 ASSERT_EQ (0, n_edges_for_fn (fun));
10013 /* Create the edges. */
10014 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10015 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10016 for (int i = 0; i < n; i++)
10017 for (int j = 0; j < n; j++)
10018 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10020 /* Verify the edges. */
10021 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10022 /* The first one is linked to ENTRY/EXIT as well as itself and
10023 everything else. */
10024 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10025 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10026 /* The other ones in the subgraph are linked to everything in
10027 the subgraph (including themselves). */
10028 for (int i = 1; i < n; i++)
10030 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10031 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10034 /* Verify the dominance information. */
10035 calculate_dominance_info (CDI_DOMINATORS);
10036 /* The initial block in the subgraph should be dominated by ENTRY. */
10037 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10038 get_immediate_dominator (CDI_DOMINATORS,
10039 subgraph_nodes[0]));
10040 /* Every other block in the subgraph should be dominated by the
10041 initial block. */
10042 for (int i = 1; i < n; i++)
10043 ASSERT_EQ (subgraph_nodes[0],
10044 get_immediate_dominator (CDI_DOMINATORS,
10045 subgraph_nodes[i]));
10046 free_dominance_info (CDI_DOMINATORS);
10048 /* Similarly for post-dominance. */
10049 calculate_dominance_info (CDI_POST_DOMINATORS);
10050 /* The initial block in the subgraph should be postdominated by EXIT. */
10051 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10052 get_immediate_dominator (CDI_POST_DOMINATORS,
10053 subgraph_nodes[0]));
10054 /* Every other block in the subgraph should be postdominated by the
10055 initial block, since that leads to EXIT. */
10056 for (int i = 1; i < n; i++)
10057 ASSERT_EQ (subgraph_nodes[0],
10058 get_immediate_dominator (CDI_POST_DOMINATORS,
10059 subgraph_nodes[i]));
10060 free_dominance_info (CDI_POST_DOMINATORS);
10062 pop_cfun ();
10065 /* Run all of the selftests within this file. */
10067 void
10068 tree_cfg_c_tests ()
10070 test_linear_chain ();
10071 test_diamond ();
10072 test_fully_connected ();
10075 } // namespace selftest
10077 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10078 - loop
10079 - nested loops
10080 - switch statement (a block with many out-edges)
10081 - something that jumps to itself
10082 - etc */
10084 #endif /* CHECKING_P */