poly_int: expand_vector_ubsan_overflow
[official-gcc.git] / gcc / tree-cfg.c
blob96b638f67543918c5b05a4c51dd6ef873ea3b80c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity = 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map<edge, tree> *edge_to_cases;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs;
95 /* CFG statistics. */
96 struct cfg_stats_d
98 long num_merged_labels;
101 static struct cfg_stats_d cfg_stats;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
113 location_t locus;
114 int discriminator;
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
129 inline hashval_t
130 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 return LOCATION_LINE (item->locus);
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
138 inline bool
139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
142 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq);
150 /* Edges. */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (const gcond *, tree);
174 static edge find_taken_edge_switch_expr (const gswitch *, tree);
175 static tree find_case_label_for_value (const gswitch *, tree);
176 static void lower_phi_internal_fn ();
178 void
179 init_empty_tree_cfg_for_function (struct function *fn)
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 void
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun);
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
217 static void
218 build_gimple_cfg (gimple_seq seq)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225 init_empty_tree_cfg ();
227 make_blocks (seq);
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 struct loop *loop;
318 basic_block bb;
319 gimple_stmt_iterator gsi;
320 gimple *stmt;
322 FOR_EACH_LOOP (loop, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop->header, loop);
327 /* Then look into the latch, if any. */
328 if (loop->latch)
329 replace_loop_annotate_in_block (loop->latch, loop);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 break;
351 default:
352 gcc_unreachable ();
355 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
356 stmt = gimple_build_assign (gimple_call_lhs (stmt),
357 gimple_call_arg (stmt, 0));
358 gsi_replace (&gsi, stmt, true);
363 /* Lower internal PHI function from GIMPLE FE. */
365 static void
366 lower_phi_internal_fn ()
368 basic_block bb, pred = NULL;
369 gimple_stmt_iterator gsi;
370 tree lhs;
371 gphi *phi_node;
372 gimple *stmt;
374 /* After edge creation, handle __PHI function from GIMPLE FE. */
375 FOR_EACH_BB_FN (bb, cfun)
377 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
379 stmt = gsi_stmt (gsi);
380 if (! gimple_call_internal_p (stmt, IFN_PHI))
381 break;
383 lhs = gimple_call_lhs (stmt);
384 phi_node = create_phi_node (lhs, bb);
386 /* Add arguments to the PHI node. */
387 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
389 tree arg = gimple_call_arg (stmt, i);
390 if (TREE_CODE (arg) == LABEL_DECL)
391 pred = label_to_block (arg);
392 else
394 edge e = find_edge (pred, bb);
395 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
399 gsi_remove (&gsi, true);
404 static unsigned int
405 execute_build_cfg (void)
407 gimple_seq body = gimple_body (current_function_decl);
409 build_gimple_cfg (body);
410 gimple_set_body (current_function_decl, NULL);
411 if (dump_file && (dump_flags & TDF_DETAILS))
413 fprintf (dump_file, "Scope blocks:\n");
414 dump_scope_blocks (dump_file, dump_flags);
416 cleanup_tree_cfg ();
417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
418 replace_loop_annotate ();
419 return 0;
422 namespace {
424 const pass_data pass_data_build_cfg =
426 GIMPLE_PASS, /* type */
427 "cfg", /* name */
428 OPTGROUP_NONE, /* optinfo_flags */
429 TV_TREE_CFG, /* tv_id */
430 PROP_gimple_leh, /* properties_required */
431 ( PROP_cfg | PROP_loops ), /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
437 class pass_build_cfg : public gimple_opt_pass
439 public:
440 pass_build_cfg (gcc::context *ctxt)
441 : gimple_opt_pass (pass_data_build_cfg, ctxt)
444 /* opt_pass methods: */
445 virtual unsigned int execute (function *) { return execute_build_cfg (); }
447 }; // class pass_build_cfg
449 } // anon namespace
451 gimple_opt_pass *
452 make_pass_build_cfg (gcc::context *ctxt)
454 return new pass_build_cfg (ctxt);
458 /* Return true if T is a computed goto. */
460 bool
461 computed_goto_p (gimple *t)
463 return (gimple_code (t) == GIMPLE_GOTO
464 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
467 /* Returns true if the sequence of statements STMTS only contains
468 a call to __builtin_unreachable (). */
470 bool
471 gimple_seq_unreachable_p (gimple_seq stmts)
473 if (stmts == NULL
474 /* Return false if -fsanitize=unreachable, we don't want to
475 optimize away those calls, but rather turn them into
476 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
477 later. */
478 || sanitize_flags_p (SANITIZE_UNREACHABLE))
479 return false;
481 gimple_stmt_iterator gsi = gsi_last (stmts);
483 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
484 return false;
486 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
488 gimple *stmt = gsi_stmt (gsi);
489 if (gimple_code (stmt) != GIMPLE_LABEL
490 && !is_gimple_debug (stmt)
491 && !gimple_clobber_p (stmt))
492 return false;
494 return true;
497 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
498 the other edge points to a bb with just __builtin_unreachable ().
499 I.e. return true for C->M edge in:
500 <bb C>:
502 if (something)
503 goto <bb N>;
504 else
505 goto <bb M>;
506 <bb N>:
507 __builtin_unreachable ();
508 <bb M>: */
510 bool
511 assert_unreachable_fallthru_edge_p (edge e)
513 basic_block pred_bb = e->src;
514 gimple *last = last_stmt (pred_bb);
515 if (last && gimple_code (last) == GIMPLE_COND)
517 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
518 if (other_bb == e->dest)
519 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
520 if (EDGE_COUNT (other_bb->succs) == 0)
521 return gimple_seq_unreachable_p (bb_seq (other_bb));
523 return false;
527 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
528 could alter control flow except via eh. We initialize the flag at
529 CFG build time and only ever clear it later. */
531 static void
532 gimple_call_initialize_ctrl_altering (gimple *stmt)
534 int flags = gimple_call_flags (stmt);
536 /* A call alters control flow if it can make an abnormal goto. */
537 if (call_can_make_abnormal_goto (stmt)
538 /* A call also alters control flow if it does not return. */
539 || flags & ECF_NORETURN
540 /* TM ending statements have backedges out of the transaction.
541 Return true so we split the basic block containing them.
542 Note that the TM_BUILTIN test is merely an optimization. */
543 || ((flags & ECF_TM_BUILTIN)
544 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
545 /* BUILT_IN_RETURN call is same as return statement. */
546 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
547 /* IFN_UNIQUE should be the last insn, to make checking for it
548 as cheap as possible. */
549 || (gimple_call_internal_p (stmt)
550 && gimple_call_internal_unique_p (stmt)))
551 gimple_call_set_ctrl_altering (stmt, true);
552 else
553 gimple_call_set_ctrl_altering (stmt, false);
557 /* Insert SEQ after BB and build a flowgraph. */
559 static basic_block
560 make_blocks_1 (gimple_seq seq, basic_block bb)
562 gimple_stmt_iterator i = gsi_start (seq);
563 gimple *stmt = NULL;
564 gimple *prev_stmt = NULL;
565 bool start_new_block = true;
566 bool first_stmt_of_seq = true;
568 while (!gsi_end_p (i))
570 /* PREV_STMT should only be set to a debug stmt if the debug
571 stmt is before nondebug stmts. Once stmt reaches a nondebug
572 nonlabel, prev_stmt will be set to it, so that
573 stmt_starts_bb_p will know to start a new block if a label is
574 found. However, if stmt was a label after debug stmts only,
575 keep the label in prev_stmt even if we find further debug
576 stmts, for there may be other labels after them, and they
577 should land in the same block. */
578 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
579 prev_stmt = stmt;
580 stmt = gsi_stmt (i);
582 if (stmt && is_gimple_call (stmt))
583 gimple_call_initialize_ctrl_altering (stmt);
585 /* If the statement starts a new basic block or if we have determined
586 in a previous pass that we need to create a new block for STMT, do
587 so now. */
588 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
590 if (!first_stmt_of_seq)
591 gsi_split_seq_before (&i, &seq);
592 bb = create_basic_block (seq, bb);
593 start_new_block = false;
594 prev_stmt = NULL;
597 /* Now add STMT to BB and create the subgraphs for special statement
598 codes. */
599 gimple_set_bb (stmt, bb);
601 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
602 next iteration. */
603 if (stmt_ends_bb_p (stmt))
605 /* If the stmt can make abnormal goto use a new temporary
606 for the assignment to the LHS. This makes sure the old value
607 of the LHS is available on the abnormal edge. Otherwise
608 we will end up with overlapping life-ranges for abnormal
609 SSA names. */
610 if (gimple_has_lhs (stmt)
611 && stmt_can_make_abnormal_goto (stmt)
612 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
614 tree lhs = gimple_get_lhs (stmt);
615 tree tmp = create_tmp_var (TREE_TYPE (lhs));
616 gimple *s = gimple_build_assign (lhs, tmp);
617 gimple_set_location (s, gimple_location (stmt));
618 gimple_set_block (s, gimple_block (stmt));
619 gimple_set_lhs (stmt, tmp);
620 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
621 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
622 DECL_GIMPLE_REG_P (tmp) = 1;
623 gsi_insert_after (&i, s, GSI_SAME_STMT);
625 start_new_block = true;
628 gsi_next (&i);
629 first_stmt_of_seq = false;
631 return bb;
634 /* Build a flowgraph for the sequence of stmts SEQ. */
636 static void
637 make_blocks (gimple_seq seq)
639 /* Look for debug markers right before labels, and move the debug
640 stmts after the labels. Accepting labels among debug markers
641 adds no value, just complexity; if we wanted to annotate labels
642 with view numbers (so sequencing among markers would matter) or
643 somesuch, we're probably better off still moving the labels, but
644 adding other debug annotations in their original positions or
645 emitting nonbind or bind markers associated with the labels in
646 the original position of the labels.
648 Moving labels would probably be simpler, but we can't do that:
649 moving labels assigns label ids to them, and doing so because of
650 debug markers makes for -fcompare-debug and possibly even codegen
651 differences. So, we have to move the debug stmts instead. To
652 that end, we scan SEQ backwards, marking the position of the
653 latest (earliest we find) label, and moving debug stmts that are
654 not separated from it by nondebug nonlabel stmts after the
655 label. */
656 if (MAY_HAVE_DEBUG_MARKER_STMTS)
658 gimple_stmt_iterator label = gsi_none ();
660 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
662 gimple *stmt = gsi_stmt (i);
664 /* If this is the first label we encounter (latest in SEQ)
665 before nondebug stmts, record its position. */
666 if (is_a <glabel *> (stmt))
668 if (gsi_end_p (label))
669 label = i;
670 continue;
673 /* Without a recorded label position to move debug stmts to,
674 there's nothing to do. */
675 if (gsi_end_p (label))
676 continue;
678 /* Move the debug stmt at I after LABEL. */
679 if (is_gimple_debug (stmt))
681 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
682 /* As STMT is removed, I advances to the stmt after
683 STMT, so the gsi_prev in the for "increment"
684 expression gets us to the stmt we're to visit after
685 STMT. LABEL, however, would advance to the moved
686 stmt if we passed it to gsi_move_after, so pass it a
687 copy instead, so as to keep LABEL pointing to the
688 LABEL. */
689 gimple_stmt_iterator copy = label;
690 gsi_move_after (&i, &copy);
691 continue;
694 /* There aren't any (more?) debug stmts before label, so
695 there isn't anything else to move after it. */
696 label = gsi_none ();
700 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
703 /* Create and return a new empty basic block after bb AFTER. */
705 static basic_block
706 create_bb (void *h, void *e, basic_block after)
708 basic_block bb;
710 gcc_assert (!e);
712 /* Create and initialize a new basic block. Since alloc_block uses
713 GC allocation that clears memory to allocate a basic block, we do
714 not have to clear the newly allocated basic block here. */
715 bb = alloc_block ();
717 bb->index = last_basic_block_for_fn (cfun);
718 bb->flags = BB_NEW;
719 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
721 /* Add the new block to the linked list of blocks. */
722 link_block (bb, after);
724 /* Grow the basic block array if needed. */
725 if ((size_t) last_basic_block_for_fn (cfun)
726 == basic_block_info_for_fn (cfun)->length ())
728 size_t new_size =
729 (last_basic_block_for_fn (cfun)
730 + (last_basic_block_for_fn (cfun) + 3) / 4);
731 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
734 /* Add the newly created block to the array. */
735 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
737 n_basic_blocks_for_fn (cfun)++;
738 last_basic_block_for_fn (cfun)++;
740 return bb;
744 /*---------------------------------------------------------------------------
745 Edge creation
746 ---------------------------------------------------------------------------*/
748 /* If basic block BB has an abnormal edge to a basic block
749 containing IFN_ABNORMAL_DISPATCHER internal call, return
750 that the dispatcher's basic block, otherwise return NULL. */
752 basic_block
753 get_abnormal_succ_dispatcher (basic_block bb)
755 edge e;
756 edge_iterator ei;
758 FOR_EACH_EDGE (e, ei, bb->succs)
759 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
761 gimple_stmt_iterator gsi
762 = gsi_start_nondebug_after_labels_bb (e->dest);
763 gimple *g = gsi_stmt (gsi);
764 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
765 return e->dest;
767 return NULL;
770 /* Helper function for make_edges. Create a basic block with
771 with ABNORMAL_DISPATCHER internal call in it if needed, and
772 create abnormal edges from BBS to it and from it to FOR_BB
773 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
775 static void
776 handle_abnormal_edges (basic_block *dispatcher_bbs,
777 basic_block for_bb, int *bb_to_omp_idx,
778 auto_vec<basic_block> *bbs, bool computed_goto)
780 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
781 unsigned int idx = 0;
782 basic_block bb;
783 bool inner = false;
785 if (bb_to_omp_idx)
787 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
788 if (bb_to_omp_idx[for_bb->index] != 0)
789 inner = true;
792 /* If the dispatcher has been created already, then there are basic
793 blocks with abnormal edges to it, so just make a new edge to
794 for_bb. */
795 if (*dispatcher == NULL)
797 /* Check if there are any basic blocks that need to have
798 abnormal edges to this dispatcher. If there are none, return
799 early. */
800 if (bb_to_omp_idx == NULL)
802 if (bbs->is_empty ())
803 return;
805 else
807 FOR_EACH_VEC_ELT (*bbs, idx, bb)
808 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
809 break;
810 if (bb == NULL)
811 return;
814 /* Create the dispatcher bb. */
815 *dispatcher = create_basic_block (NULL, for_bb);
816 if (computed_goto)
818 /* Factor computed gotos into a common computed goto site. Also
819 record the location of that site so that we can un-factor the
820 gotos after we have converted back to normal form. */
821 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
823 /* Create the destination of the factored goto. Each original
824 computed goto will put its desired destination into this
825 variable and jump to the label we create immediately below. */
826 tree var = create_tmp_var (ptr_type_node, "gotovar");
828 /* Build a label for the new block which will contain the
829 factored computed goto. */
830 tree factored_label_decl
831 = create_artificial_label (UNKNOWN_LOCATION);
832 gimple *factored_computed_goto_label
833 = gimple_build_label (factored_label_decl);
834 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
836 /* Build our new computed goto. */
837 gimple *factored_computed_goto = gimple_build_goto (var);
838 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
840 FOR_EACH_VEC_ELT (*bbs, idx, bb)
842 if (bb_to_omp_idx
843 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
844 continue;
846 gsi = gsi_last_bb (bb);
847 gimple *last = gsi_stmt (gsi);
849 gcc_assert (computed_goto_p (last));
851 /* Copy the original computed goto's destination into VAR. */
852 gimple *assignment
853 = gimple_build_assign (var, gimple_goto_dest (last));
854 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
856 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
857 e->goto_locus = gimple_location (last);
858 gsi_remove (&gsi, true);
861 else
863 tree arg = inner ? boolean_true_node : boolean_false_node;
864 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
865 1, arg);
866 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
867 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
869 /* Create predecessor edges of the dispatcher. */
870 FOR_EACH_VEC_ELT (*bbs, idx, bb)
872 if (bb_to_omp_idx
873 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
874 continue;
875 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
880 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
883 /* Creates outgoing edges for BB. Returns 1 when it ends with an
884 computed goto, returns 2 when it ends with a statement that
885 might return to this function via an nonlocal goto, otherwise
886 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
888 static int
889 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
891 gimple *last = last_stmt (bb);
892 bool fallthru = false;
893 int ret = 0;
895 if (!last)
896 return ret;
898 switch (gimple_code (last))
900 case GIMPLE_GOTO:
901 if (make_goto_expr_edges (bb))
902 ret = 1;
903 fallthru = false;
904 break;
905 case GIMPLE_RETURN:
907 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
908 e->goto_locus = gimple_location (last);
909 fallthru = false;
911 break;
912 case GIMPLE_COND:
913 make_cond_expr_edges (bb);
914 fallthru = false;
915 break;
916 case GIMPLE_SWITCH:
917 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
918 fallthru = false;
919 break;
920 case GIMPLE_RESX:
921 make_eh_edges (last);
922 fallthru = false;
923 break;
924 case GIMPLE_EH_DISPATCH:
925 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
926 break;
928 case GIMPLE_CALL:
929 /* If this function receives a nonlocal goto, then we need to
930 make edges from this call site to all the nonlocal goto
931 handlers. */
932 if (stmt_can_make_abnormal_goto (last))
933 ret = 2;
935 /* If this statement has reachable exception handlers, then
936 create abnormal edges to them. */
937 make_eh_edges (last);
939 /* BUILTIN_RETURN is really a return statement. */
940 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
942 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
943 fallthru = false;
945 /* Some calls are known not to return. */
946 else
947 fallthru = !gimple_call_noreturn_p (last);
948 break;
950 case GIMPLE_ASSIGN:
951 /* A GIMPLE_ASSIGN may throw internally and thus be considered
952 control-altering. */
953 if (is_ctrl_altering_stmt (last))
954 make_eh_edges (last);
955 fallthru = true;
956 break;
958 case GIMPLE_ASM:
959 make_gimple_asm_edges (bb);
960 fallthru = true;
961 break;
963 CASE_GIMPLE_OMP:
964 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
965 break;
967 case GIMPLE_TRANSACTION:
969 gtransaction *txn = as_a <gtransaction *> (last);
970 tree label1 = gimple_transaction_label_norm (txn);
971 tree label2 = gimple_transaction_label_uninst (txn);
973 if (label1)
974 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
975 if (label2)
976 make_edge (bb, label_to_block (label2),
977 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
979 tree label3 = gimple_transaction_label_over (txn);
980 if (gimple_transaction_subcode (txn)
981 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
982 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
984 fallthru = false;
986 break;
988 default:
989 gcc_assert (!stmt_ends_bb_p (last));
990 fallthru = true;
991 break;
994 if (fallthru)
995 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
997 return ret;
1000 /* Join all the blocks in the flowgraph. */
1002 static void
1003 make_edges (void)
1005 basic_block bb;
1006 struct omp_region *cur_region = NULL;
1007 auto_vec<basic_block> ab_edge_goto;
1008 auto_vec<basic_block> ab_edge_call;
1009 int *bb_to_omp_idx = NULL;
1010 int cur_omp_region_idx = 0;
1012 /* Create an edge from entry to the first block with executable
1013 statements in it. */
1014 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1015 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
1016 EDGE_FALLTHRU);
1018 /* Traverse the basic block array placing edges. */
1019 FOR_EACH_BB_FN (bb, cfun)
1021 int mer;
1023 if (bb_to_omp_idx)
1024 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
1026 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1027 if (mer == 1)
1028 ab_edge_goto.safe_push (bb);
1029 else if (mer == 2)
1030 ab_edge_call.safe_push (bb);
1032 if (cur_region && bb_to_omp_idx == NULL)
1033 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
1036 /* Computed gotos are hell to deal with, especially if there are
1037 lots of them with a large number of destinations. So we factor
1038 them to a common computed goto location before we build the
1039 edge list. After we convert back to normal form, we will un-factor
1040 the computed gotos since factoring introduces an unwanted jump.
1041 For non-local gotos and abnormal edges from calls to calls that return
1042 twice or forced labels, factor the abnormal edges too, by having all
1043 abnormal edges from the calls go to a common artificial basic block
1044 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1045 basic block to all forced labels and calls returning twice.
1046 We do this per-OpenMP structured block, because those regions
1047 are guaranteed to be single entry single exit by the standard,
1048 so it is not allowed to enter or exit such regions abnormally this way,
1049 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1050 must not transfer control across SESE region boundaries. */
1051 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1053 gimple_stmt_iterator gsi;
1054 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1055 basic_block *dispatcher_bbs = dispatcher_bb_array;
1056 int count = n_basic_blocks_for_fn (cfun);
1058 if (bb_to_omp_idx)
1059 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1061 FOR_EACH_BB_FN (bb, cfun)
1063 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1065 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1066 tree target;
1068 if (!label_stmt)
1069 break;
1071 target = gimple_label_label (label_stmt);
1073 /* Make an edge to every label block that has been marked as a
1074 potential target for a computed goto or a non-local goto. */
1075 if (FORCED_LABEL (target))
1076 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1077 &ab_edge_goto, true);
1078 if (DECL_NONLOCAL (target))
1080 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1081 &ab_edge_call, false);
1082 break;
1086 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1087 gsi_next_nondebug (&gsi);
1088 if (!gsi_end_p (gsi))
1090 /* Make an edge to every setjmp-like call. */
1091 gimple *call_stmt = gsi_stmt (gsi);
1092 if (is_gimple_call (call_stmt)
1093 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1094 || gimple_call_builtin_p (call_stmt,
1095 BUILT_IN_SETJMP_RECEIVER)))
1096 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1097 &ab_edge_call, false);
1101 if (bb_to_omp_idx)
1102 XDELETE (dispatcher_bbs);
1105 XDELETE (bb_to_omp_idx);
1107 omp_free_regions ();
1110 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1111 needed. Returns true if new bbs were created.
1112 Note: This is transitional code, and should not be used for new code. We
1113 should be able to get rid of this by rewriting all target va-arg
1114 gimplification hooks to use an interface gimple_build_cond_value as described
1115 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1117 bool
1118 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1120 gimple *stmt = gsi_stmt (*gsi);
1121 basic_block bb = gimple_bb (stmt);
1122 basic_block lastbb, afterbb;
1123 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1124 edge e;
1125 lastbb = make_blocks_1 (seq, bb);
1126 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1127 return false;
1128 e = split_block (bb, stmt);
1129 /* Move e->dest to come after the new basic blocks. */
1130 afterbb = e->dest;
1131 unlink_block (afterbb);
1132 link_block (afterbb, lastbb);
1133 redirect_edge_succ (e, bb->next_bb);
1134 bb = bb->next_bb;
1135 while (bb != afterbb)
1137 struct omp_region *cur_region = NULL;
1138 profile_count cnt = profile_count::zero ();
1139 bool all = true;
1141 int cur_omp_region_idx = 0;
1142 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1143 gcc_assert (!mer && !cur_region);
1144 add_bb_to_loop (bb, afterbb->loop_father);
1146 edge e;
1147 edge_iterator ei;
1148 FOR_EACH_EDGE (e, ei, bb->preds)
1150 if (e->count ().initialized_p ())
1151 cnt += e->count ();
1152 else
1153 all = false;
1155 tree_guess_outgoing_edge_probabilities (bb);
1156 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1157 bb->count = cnt;
1159 bb = bb->next_bb;
1161 return true;
1164 /* Find the next available discriminator value for LOCUS. The
1165 discriminator distinguishes among several basic blocks that
1166 share a common locus, allowing for more accurate sample-based
1167 profiling. */
1169 static int
1170 next_discriminator_for_locus (location_t locus)
1172 struct locus_discrim_map item;
1173 struct locus_discrim_map **slot;
1175 item.locus = locus;
1176 item.discriminator = 0;
1177 slot = discriminator_per_locus->find_slot_with_hash (
1178 &item, LOCATION_LINE (locus), INSERT);
1179 gcc_assert (slot);
1180 if (*slot == HTAB_EMPTY_ENTRY)
1182 *slot = XNEW (struct locus_discrim_map);
1183 gcc_assert (*slot);
1184 (*slot)->locus = locus;
1185 (*slot)->discriminator = 0;
1187 (*slot)->discriminator++;
1188 return (*slot)->discriminator;
1191 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1193 static bool
1194 same_line_p (location_t locus1, location_t locus2)
1196 expanded_location from, to;
1198 if (locus1 == locus2)
1199 return true;
1201 from = expand_location (locus1);
1202 to = expand_location (locus2);
1204 if (from.line != to.line)
1205 return false;
1206 if (from.file == to.file)
1207 return true;
1208 return (from.file != NULL
1209 && to.file != NULL
1210 && filename_cmp (from.file, to.file) == 0);
1213 /* Assign discriminators to each basic block. */
1215 static void
1216 assign_discriminators (void)
1218 basic_block bb;
1220 FOR_EACH_BB_FN (bb, cfun)
1222 edge e;
1223 edge_iterator ei;
1224 gimple *last = last_stmt (bb);
1225 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1227 if (locus == UNKNOWN_LOCATION)
1228 continue;
1230 FOR_EACH_EDGE (e, ei, bb->succs)
1232 gimple *first = first_non_label_stmt (e->dest);
1233 gimple *last = last_stmt (e->dest);
1234 if ((first && same_line_p (locus, gimple_location (first)))
1235 || (last && same_line_p (locus, gimple_location (last))))
1237 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1238 bb->discriminator = next_discriminator_for_locus (locus);
1239 else
1240 e->dest->discriminator = next_discriminator_for_locus (locus);
1246 /* Create the edges for a GIMPLE_COND starting at block BB. */
1248 static void
1249 make_cond_expr_edges (basic_block bb)
1251 gcond *entry = as_a <gcond *> (last_stmt (bb));
1252 gimple *then_stmt, *else_stmt;
1253 basic_block then_bb, else_bb;
1254 tree then_label, else_label;
1255 edge e;
1257 gcc_assert (entry);
1258 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1260 /* Entry basic blocks for each component. */
1261 then_label = gimple_cond_true_label (entry);
1262 else_label = gimple_cond_false_label (entry);
1263 then_bb = label_to_block (then_label);
1264 else_bb = label_to_block (else_label);
1265 then_stmt = first_stmt (then_bb);
1266 else_stmt = first_stmt (else_bb);
1268 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1269 e->goto_locus = gimple_location (then_stmt);
1270 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1271 if (e)
1272 e->goto_locus = gimple_location (else_stmt);
1274 /* We do not need the labels anymore. */
1275 gimple_cond_set_true_label (entry, NULL_TREE);
1276 gimple_cond_set_false_label (entry, NULL_TREE);
1280 /* Called for each element in the hash table (P) as we delete the
1281 edge to cases hash table.
1283 Clear all the CASE_CHAINs to prevent problems with copying of
1284 SWITCH_EXPRs and structure sharing rules, then free the hash table
1285 element. */
1287 bool
1288 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1290 tree t, next;
1292 for (t = value; t; t = next)
1294 next = CASE_CHAIN (t);
1295 CASE_CHAIN (t) = NULL;
1298 return true;
1301 /* Start recording information mapping edges to case labels. */
1303 void
1304 start_recording_case_labels (void)
1306 gcc_assert (edge_to_cases == NULL);
1307 edge_to_cases = new hash_map<edge, tree>;
1308 touched_switch_bbs = BITMAP_ALLOC (NULL);
1311 /* Return nonzero if we are recording information for case labels. */
1313 static bool
1314 recording_case_labels_p (void)
1316 return (edge_to_cases != NULL);
1319 /* Stop recording information mapping edges to case labels and
1320 remove any information we have recorded. */
1321 void
1322 end_recording_case_labels (void)
1324 bitmap_iterator bi;
1325 unsigned i;
1326 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1327 delete edge_to_cases;
1328 edge_to_cases = NULL;
1329 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1331 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1332 if (bb)
1334 gimple *stmt = last_stmt (bb);
1335 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1336 group_case_labels_stmt (as_a <gswitch *> (stmt));
1339 BITMAP_FREE (touched_switch_bbs);
1342 /* If we are inside a {start,end}_recording_cases block, then return
1343 a chain of CASE_LABEL_EXPRs from T which reference E.
1345 Otherwise return NULL. */
1347 static tree
1348 get_cases_for_edge (edge e, gswitch *t)
1350 tree *slot;
1351 size_t i, n;
1353 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1354 chains available. Return NULL so the caller can detect this case. */
1355 if (!recording_case_labels_p ())
1356 return NULL;
1358 slot = edge_to_cases->get (e);
1359 if (slot)
1360 return *slot;
1362 /* If we did not find E in the hash table, then this must be the first
1363 time we have been queried for information about E & T. Add all the
1364 elements from T to the hash table then perform the query again. */
1366 n = gimple_switch_num_labels (t);
1367 for (i = 0; i < n; i++)
1369 tree elt = gimple_switch_label (t, i);
1370 tree lab = CASE_LABEL (elt);
1371 basic_block label_bb = label_to_block (lab);
1372 edge this_edge = find_edge (e->src, label_bb);
1374 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1375 a new chain. */
1376 tree &s = edge_to_cases->get_or_insert (this_edge);
1377 CASE_CHAIN (elt) = s;
1378 s = elt;
1381 return *edge_to_cases->get (e);
1384 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1386 static void
1387 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1389 size_t i, n;
1391 n = gimple_switch_num_labels (entry);
1393 for (i = 0; i < n; ++i)
1395 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1396 basic_block label_bb = label_to_block (lab);
1397 make_edge (bb, label_bb, 0);
1402 /* Return the basic block holding label DEST. */
1404 basic_block
1405 label_to_block_fn (struct function *ifun, tree dest)
1407 int uid = LABEL_DECL_UID (dest);
1409 /* We would die hard when faced by an undefined label. Emit a label to
1410 the very first basic block. This will hopefully make even the dataflow
1411 and undefined variable warnings quite right. */
1412 if (seen_error () && uid < 0)
1414 gimple_stmt_iterator gsi =
1415 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1416 gimple *stmt;
1418 stmt = gimple_build_label (dest);
1419 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1420 uid = LABEL_DECL_UID (dest);
1422 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1423 return NULL;
1424 return (*ifun->cfg->x_label_to_block_map)[uid];
1427 /* Create edges for a goto statement at block BB. Returns true
1428 if abnormal edges should be created. */
1430 static bool
1431 make_goto_expr_edges (basic_block bb)
1433 gimple_stmt_iterator last = gsi_last_bb (bb);
1434 gimple *goto_t = gsi_stmt (last);
1436 /* A simple GOTO creates normal edges. */
1437 if (simple_goto_p (goto_t))
1439 tree dest = gimple_goto_dest (goto_t);
1440 basic_block label_bb = label_to_block (dest);
1441 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1442 e->goto_locus = gimple_location (goto_t);
1443 gsi_remove (&last, true);
1444 return false;
1447 /* A computed GOTO creates abnormal edges. */
1448 return true;
1451 /* Create edges for an asm statement with labels at block BB. */
1453 static void
1454 make_gimple_asm_edges (basic_block bb)
1456 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1457 int i, n = gimple_asm_nlabels (stmt);
1459 for (i = 0; i < n; ++i)
1461 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1462 basic_block label_bb = label_to_block (label);
1463 make_edge (bb, label_bb, 0);
1467 /*---------------------------------------------------------------------------
1468 Flowgraph analysis
1469 ---------------------------------------------------------------------------*/
1471 /* Cleanup useless labels in basic blocks. This is something we wish
1472 to do early because it allows us to group case labels before creating
1473 the edges for the CFG, and it speeds up block statement iterators in
1474 all passes later on.
1475 We rerun this pass after CFG is created, to get rid of the labels that
1476 are no longer referenced. After then we do not run it any more, since
1477 (almost) no new labels should be created. */
1479 /* A map from basic block index to the leading label of that block. */
1480 static struct label_record
1482 /* The label. */
1483 tree label;
1485 /* True if the label is referenced from somewhere. */
1486 bool used;
1487 } *label_for_bb;
1489 /* Given LABEL return the first label in the same basic block. */
1491 static tree
1492 main_block_label (tree label)
1494 basic_block bb = label_to_block (label);
1495 tree main_label = label_for_bb[bb->index].label;
1497 /* label_to_block possibly inserted undefined label into the chain. */
1498 if (!main_label)
1500 label_for_bb[bb->index].label = label;
1501 main_label = label;
1504 label_for_bb[bb->index].used = true;
1505 return main_label;
1508 /* Clean up redundant labels within the exception tree. */
1510 static void
1511 cleanup_dead_labels_eh (void)
1513 eh_landing_pad lp;
1514 eh_region r;
1515 tree lab;
1516 int i;
1518 if (cfun->eh == NULL)
1519 return;
1521 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1522 if (lp && lp->post_landing_pad)
1524 lab = main_block_label (lp->post_landing_pad);
1525 if (lab != lp->post_landing_pad)
1527 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1528 EH_LANDING_PAD_NR (lab) = lp->index;
1532 FOR_ALL_EH_REGION (r)
1533 switch (r->type)
1535 case ERT_CLEANUP:
1536 case ERT_MUST_NOT_THROW:
1537 break;
1539 case ERT_TRY:
1541 eh_catch c;
1542 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1544 lab = c->label;
1545 if (lab)
1546 c->label = main_block_label (lab);
1549 break;
1551 case ERT_ALLOWED_EXCEPTIONS:
1552 lab = r->u.allowed.label;
1553 if (lab)
1554 r->u.allowed.label = main_block_label (lab);
1555 break;
1560 /* Cleanup redundant labels. This is a three-step process:
1561 1) Find the leading label for each block.
1562 2) Redirect all references to labels to the leading labels.
1563 3) Cleanup all useless labels. */
1565 void
1566 cleanup_dead_labels (void)
1568 basic_block bb;
1569 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1571 /* Find a suitable label for each block. We use the first user-defined
1572 label if there is one, or otherwise just the first label we see. */
1573 FOR_EACH_BB_FN (bb, cfun)
1575 gimple_stmt_iterator i;
1577 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1579 tree label;
1580 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1582 if (!label_stmt)
1583 break;
1585 label = gimple_label_label (label_stmt);
1587 /* If we have not yet seen a label for the current block,
1588 remember this one and see if there are more labels. */
1589 if (!label_for_bb[bb->index].label)
1591 label_for_bb[bb->index].label = label;
1592 continue;
1595 /* If we did see a label for the current block already, but it
1596 is an artificially created label, replace it if the current
1597 label is a user defined label. */
1598 if (!DECL_ARTIFICIAL (label)
1599 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1601 label_for_bb[bb->index].label = label;
1602 break;
1607 /* Now redirect all jumps/branches to the selected label.
1608 First do so for each block ending in a control statement. */
1609 FOR_EACH_BB_FN (bb, cfun)
1611 gimple *stmt = last_stmt (bb);
1612 tree label, new_label;
1614 if (!stmt)
1615 continue;
1617 switch (gimple_code (stmt))
1619 case GIMPLE_COND:
1621 gcond *cond_stmt = as_a <gcond *> (stmt);
1622 label = gimple_cond_true_label (cond_stmt);
1623 if (label)
1625 new_label = main_block_label (label);
1626 if (new_label != label)
1627 gimple_cond_set_true_label (cond_stmt, new_label);
1630 label = gimple_cond_false_label (cond_stmt);
1631 if (label)
1633 new_label = main_block_label (label);
1634 if (new_label != label)
1635 gimple_cond_set_false_label (cond_stmt, new_label);
1638 break;
1640 case GIMPLE_SWITCH:
1642 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1643 size_t i, n = gimple_switch_num_labels (switch_stmt);
1645 /* Replace all destination labels. */
1646 for (i = 0; i < n; ++i)
1648 tree case_label = gimple_switch_label (switch_stmt, i);
1649 label = CASE_LABEL (case_label);
1650 new_label = main_block_label (label);
1651 if (new_label != label)
1652 CASE_LABEL (case_label) = new_label;
1654 break;
1657 case GIMPLE_ASM:
1659 gasm *asm_stmt = as_a <gasm *> (stmt);
1660 int i, n = gimple_asm_nlabels (asm_stmt);
1662 for (i = 0; i < n; ++i)
1664 tree cons = gimple_asm_label_op (asm_stmt, i);
1665 tree label = main_block_label (TREE_VALUE (cons));
1666 TREE_VALUE (cons) = label;
1668 break;
1671 /* We have to handle gotos until they're removed, and we don't
1672 remove them until after we've created the CFG edges. */
1673 case GIMPLE_GOTO:
1674 if (!computed_goto_p (stmt))
1676 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1677 label = gimple_goto_dest (goto_stmt);
1678 new_label = main_block_label (label);
1679 if (new_label != label)
1680 gimple_goto_set_dest (goto_stmt, new_label);
1682 break;
1684 case GIMPLE_TRANSACTION:
1686 gtransaction *txn = as_a <gtransaction *> (stmt);
1688 label = gimple_transaction_label_norm (txn);
1689 if (label)
1691 new_label = main_block_label (label);
1692 if (new_label != label)
1693 gimple_transaction_set_label_norm (txn, new_label);
1696 label = gimple_transaction_label_uninst (txn);
1697 if (label)
1699 new_label = main_block_label (label);
1700 if (new_label != label)
1701 gimple_transaction_set_label_uninst (txn, new_label);
1704 label = gimple_transaction_label_over (txn);
1705 if (label)
1707 new_label = main_block_label (label);
1708 if (new_label != label)
1709 gimple_transaction_set_label_over (txn, new_label);
1712 break;
1714 default:
1715 break;
1719 /* Do the same for the exception region tree labels. */
1720 cleanup_dead_labels_eh ();
1722 /* Finally, purge dead labels. All user-defined labels and labels that
1723 can be the target of non-local gotos and labels which have their
1724 address taken are preserved. */
1725 FOR_EACH_BB_FN (bb, cfun)
1727 gimple_stmt_iterator i;
1728 tree label_for_this_bb = label_for_bb[bb->index].label;
1730 if (!label_for_this_bb)
1731 continue;
1733 /* If the main label of the block is unused, we may still remove it. */
1734 if (!label_for_bb[bb->index].used)
1735 label_for_this_bb = NULL;
1737 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1739 tree label;
1740 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1742 if (!label_stmt)
1743 break;
1745 label = gimple_label_label (label_stmt);
1747 if (label == label_for_this_bb
1748 || !DECL_ARTIFICIAL (label)
1749 || DECL_NONLOCAL (label)
1750 || FORCED_LABEL (label))
1751 gsi_next (&i);
1752 else
1753 gsi_remove (&i, true);
1757 free (label_for_bb);
1760 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1761 the ones jumping to the same label.
1762 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1764 bool
1765 group_case_labels_stmt (gswitch *stmt)
1767 int old_size = gimple_switch_num_labels (stmt);
1768 int i, next_index, new_size;
1769 basic_block default_bb = NULL;
1771 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1773 /* Look for possible opportunities to merge cases. */
1774 new_size = i = 1;
1775 while (i < old_size)
1777 tree base_case, base_high;
1778 basic_block base_bb;
1780 base_case = gimple_switch_label (stmt, i);
1782 gcc_assert (base_case);
1783 base_bb = label_to_block (CASE_LABEL (base_case));
1785 /* Discard cases that have the same destination as the default case or
1786 whose destiniation blocks have already been removed as unreachable. */
1787 if (base_bb == NULL || base_bb == default_bb)
1789 i++;
1790 continue;
1793 base_high = CASE_HIGH (base_case)
1794 ? CASE_HIGH (base_case)
1795 : CASE_LOW (base_case);
1796 next_index = i + 1;
1798 /* Try to merge case labels. Break out when we reach the end
1799 of the label vector or when we cannot merge the next case
1800 label with the current one. */
1801 while (next_index < old_size)
1803 tree merge_case = gimple_switch_label (stmt, next_index);
1804 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1805 wide_int bhp1 = wi::to_wide (base_high) + 1;
1807 /* Merge the cases if they jump to the same place,
1808 and their ranges are consecutive. */
1809 if (merge_bb == base_bb
1810 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1812 base_high = CASE_HIGH (merge_case) ?
1813 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1814 CASE_HIGH (base_case) = base_high;
1815 next_index++;
1817 else
1818 break;
1821 /* Discard cases that have an unreachable destination block. */
1822 if (EDGE_COUNT (base_bb->succs) == 0
1823 && gimple_seq_unreachable_p (bb_seq (base_bb))
1824 /* Don't optimize this if __builtin_unreachable () is the
1825 implicitly added one by the C++ FE too early, before
1826 -Wreturn-type can be diagnosed. We'll optimize it later
1827 during switchconv pass or any other cfg cleanup. */
1828 && (gimple_in_ssa_p (cfun)
1829 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1830 != BUILTINS_LOCATION)))
1832 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1833 if (base_edge != NULL)
1834 remove_edge_and_dominated_blocks (base_edge);
1835 i = next_index;
1836 continue;
1839 if (new_size < i)
1840 gimple_switch_set_label (stmt, new_size,
1841 gimple_switch_label (stmt, i));
1842 i = next_index;
1843 new_size++;
1846 gcc_assert (new_size <= old_size);
1848 if (new_size < old_size)
1849 gimple_switch_set_num_labels (stmt, new_size);
1851 return new_size < old_size;
1854 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1855 and scan the sorted vector of cases. Combine the ones jumping to the
1856 same label. */
1858 bool
1859 group_case_labels (void)
1861 basic_block bb;
1862 bool changed = false;
1864 FOR_EACH_BB_FN (bb, cfun)
1866 gimple *stmt = last_stmt (bb);
1867 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1868 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1871 return changed;
1874 /* Checks whether we can merge block B into block A. */
1876 static bool
1877 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1879 gimple *stmt;
1881 if (!single_succ_p (a))
1882 return false;
1884 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1885 return false;
1887 if (single_succ (a) != b)
1888 return false;
1890 if (!single_pred_p (b))
1891 return false;
1893 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1894 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1895 return false;
1897 /* If A ends by a statement causing exceptions or something similar, we
1898 cannot merge the blocks. */
1899 stmt = last_stmt (a);
1900 if (stmt && stmt_ends_bb_p (stmt))
1901 return false;
1903 /* Do not allow a block with only a non-local label to be merged. */
1904 if (stmt)
1905 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1906 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1907 return false;
1909 /* Examine the labels at the beginning of B. */
1910 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1911 gsi_next (&gsi))
1913 tree lab;
1914 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1915 if (!label_stmt)
1916 break;
1917 lab = gimple_label_label (label_stmt);
1919 /* Do not remove user forced labels or for -O0 any user labels. */
1920 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1921 return false;
1924 /* Protect simple loop latches. We only want to avoid merging
1925 the latch with the loop header or with a block in another
1926 loop in this case. */
1927 if (current_loops
1928 && b->loop_father->latch == b
1929 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1930 && (b->loop_father->header == a
1931 || b->loop_father != a->loop_father))
1932 return false;
1934 /* It must be possible to eliminate all phi nodes in B. If ssa form
1935 is not up-to-date and a name-mapping is registered, we cannot eliminate
1936 any phis. Symbols marked for renaming are never a problem though. */
1937 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1938 gsi_next (&gsi))
1940 gphi *phi = gsi.phi ();
1941 /* Technically only new names matter. */
1942 if (name_registered_for_update_p (PHI_RESULT (phi)))
1943 return false;
1946 /* When not optimizing, don't merge if we'd lose goto_locus. */
1947 if (!optimize
1948 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1950 location_t goto_locus = single_succ_edge (a)->goto_locus;
1951 gimple_stmt_iterator prev, next;
1952 prev = gsi_last_nondebug_bb (a);
1953 next = gsi_after_labels (b);
1954 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1955 gsi_next_nondebug (&next);
1956 if ((gsi_end_p (prev)
1957 || gimple_location (gsi_stmt (prev)) != goto_locus)
1958 && (gsi_end_p (next)
1959 || gimple_location (gsi_stmt (next)) != goto_locus))
1960 return false;
1963 return true;
1966 /* Replaces all uses of NAME by VAL. */
1968 void
1969 replace_uses_by (tree name, tree val)
1971 imm_use_iterator imm_iter;
1972 use_operand_p use;
1973 gimple *stmt;
1974 edge e;
1976 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1978 /* Mark the block if we change the last stmt in it. */
1979 if (cfgcleanup_altered_bbs
1980 && stmt_ends_bb_p (stmt))
1981 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1983 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1985 replace_exp (use, val);
1987 if (gimple_code (stmt) == GIMPLE_PHI)
1989 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1990 PHI_ARG_INDEX_FROM_USE (use));
1991 if (e->flags & EDGE_ABNORMAL
1992 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1994 /* This can only occur for virtual operands, since
1995 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1996 would prevent replacement. */
1997 gcc_checking_assert (virtual_operand_p (name));
1998 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2003 if (gimple_code (stmt) != GIMPLE_PHI)
2005 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2006 gimple *orig_stmt = stmt;
2007 size_t i;
2009 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2010 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2011 only change sth from non-invariant to invariant, and only
2012 when propagating constants. */
2013 if (is_gimple_min_invariant (val))
2014 for (i = 0; i < gimple_num_ops (stmt); i++)
2016 tree op = gimple_op (stmt, i);
2017 /* Operands may be empty here. For example, the labels
2018 of a GIMPLE_COND are nulled out following the creation
2019 of the corresponding CFG edges. */
2020 if (op && TREE_CODE (op) == ADDR_EXPR)
2021 recompute_tree_invariant_for_addr_expr (op);
2024 if (fold_stmt (&gsi))
2025 stmt = gsi_stmt (gsi);
2027 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2028 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2030 update_stmt (stmt);
2034 gcc_checking_assert (has_zero_uses (name));
2036 /* Also update the trees stored in loop structures. */
2037 if (current_loops)
2039 struct loop *loop;
2041 FOR_EACH_LOOP (loop, 0)
2043 substitute_in_loop_info (loop, name, val);
2048 /* Merge block B into block A. */
2050 static void
2051 gimple_merge_blocks (basic_block a, basic_block b)
2053 gimple_stmt_iterator last, gsi;
2054 gphi_iterator psi;
2056 if (dump_file)
2057 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2059 /* Remove all single-valued PHI nodes from block B of the form
2060 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2061 gsi = gsi_last_bb (a);
2062 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2064 gimple *phi = gsi_stmt (psi);
2065 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2066 gimple *copy;
2067 bool may_replace_uses = (virtual_operand_p (def)
2068 || may_propagate_copy (def, use));
2070 /* In case we maintain loop closed ssa form, do not propagate arguments
2071 of loop exit phi nodes. */
2072 if (current_loops
2073 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2074 && !virtual_operand_p (def)
2075 && TREE_CODE (use) == SSA_NAME
2076 && a->loop_father != b->loop_father)
2077 may_replace_uses = false;
2079 if (!may_replace_uses)
2081 gcc_assert (!virtual_operand_p (def));
2083 /* Note that just emitting the copies is fine -- there is no problem
2084 with ordering of phi nodes. This is because A is the single
2085 predecessor of B, therefore results of the phi nodes cannot
2086 appear as arguments of the phi nodes. */
2087 copy = gimple_build_assign (def, use);
2088 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2089 remove_phi_node (&psi, false);
2091 else
2093 /* If we deal with a PHI for virtual operands, we can simply
2094 propagate these without fussing with folding or updating
2095 the stmt. */
2096 if (virtual_operand_p (def))
2098 imm_use_iterator iter;
2099 use_operand_p use_p;
2100 gimple *stmt;
2102 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2103 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2104 SET_USE (use_p, use);
2106 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2107 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2109 else
2110 replace_uses_by (def, use);
2112 remove_phi_node (&psi, true);
2116 /* Ensure that B follows A. */
2117 move_block_after (b, a);
2119 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2120 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2122 /* Remove labels from B and set gimple_bb to A for other statements. */
2123 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2125 gimple *stmt = gsi_stmt (gsi);
2126 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2128 tree label = gimple_label_label (label_stmt);
2129 int lp_nr;
2131 gsi_remove (&gsi, false);
2133 /* Now that we can thread computed gotos, we might have
2134 a situation where we have a forced label in block B
2135 However, the label at the start of block B might still be
2136 used in other ways (think about the runtime checking for
2137 Fortran assigned gotos). So we can not just delete the
2138 label. Instead we move the label to the start of block A. */
2139 if (FORCED_LABEL (label))
2141 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2142 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2144 /* Other user labels keep around in a form of a debug stmt. */
2145 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2147 gimple *dbg = gimple_build_debug_bind (label,
2148 integer_zero_node,
2149 stmt);
2150 gimple_debug_bind_reset_value (dbg);
2151 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2154 lp_nr = EH_LANDING_PAD_NR (label);
2155 if (lp_nr)
2157 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2158 lp->post_landing_pad = NULL;
2161 else
2163 gimple_set_bb (stmt, a);
2164 gsi_next (&gsi);
2168 /* When merging two BBs, if their counts are different, the larger count
2169 is selected as the new bb count. This is to handle inconsistent
2170 profiles. */
2171 if (a->loop_father == b->loop_father)
2173 a->count = a->count.merge (b->count);
2176 /* Merge the sequences. */
2177 last = gsi_last_bb (a);
2178 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2179 set_bb_seq (b, NULL);
2181 if (cfgcleanup_altered_bbs)
2182 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2186 /* Return the one of two successors of BB that is not reachable by a
2187 complex edge, if there is one. Else, return BB. We use
2188 this in optimizations that use post-dominators for their heuristics,
2189 to catch the cases in C++ where function calls are involved. */
2191 basic_block
2192 single_noncomplex_succ (basic_block bb)
2194 edge e0, e1;
2195 if (EDGE_COUNT (bb->succs) != 2)
2196 return bb;
2198 e0 = EDGE_SUCC (bb, 0);
2199 e1 = EDGE_SUCC (bb, 1);
2200 if (e0->flags & EDGE_COMPLEX)
2201 return e1->dest;
2202 if (e1->flags & EDGE_COMPLEX)
2203 return e0->dest;
2205 return bb;
2208 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2210 void
2211 notice_special_calls (gcall *call)
2213 int flags = gimple_call_flags (call);
2215 if (flags & ECF_MAY_BE_ALLOCA)
2216 cfun->calls_alloca = true;
2217 if (flags & ECF_RETURNS_TWICE)
2218 cfun->calls_setjmp = true;
2222 /* Clear flags set by notice_special_calls. Used by dead code removal
2223 to update the flags. */
2225 void
2226 clear_special_calls (void)
2228 cfun->calls_alloca = false;
2229 cfun->calls_setjmp = false;
2232 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2234 static void
2235 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2237 /* Since this block is no longer reachable, we can just delete all
2238 of its PHI nodes. */
2239 remove_phi_nodes (bb);
2241 /* Remove edges to BB's successors. */
2242 while (EDGE_COUNT (bb->succs) > 0)
2243 remove_edge (EDGE_SUCC (bb, 0));
2247 /* Remove statements of basic block BB. */
2249 static void
2250 remove_bb (basic_block bb)
2252 gimple_stmt_iterator i;
2254 if (dump_file)
2256 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2257 if (dump_flags & TDF_DETAILS)
2259 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2260 fprintf (dump_file, "\n");
2264 if (current_loops)
2266 struct loop *loop = bb->loop_father;
2268 /* If a loop gets removed, clean up the information associated
2269 with it. */
2270 if (loop->latch == bb
2271 || loop->header == bb)
2272 free_numbers_of_iterations_estimates (loop);
2275 /* Remove all the instructions in the block. */
2276 if (bb_seq (bb) != NULL)
2278 /* Walk backwards so as to get a chance to substitute all
2279 released DEFs into debug stmts. See
2280 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2281 details. */
2282 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2284 gimple *stmt = gsi_stmt (i);
2285 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2286 if (label_stmt
2287 && (FORCED_LABEL (gimple_label_label (label_stmt))
2288 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2290 basic_block new_bb;
2291 gimple_stmt_iterator new_gsi;
2293 /* A non-reachable non-local label may still be referenced.
2294 But it no longer needs to carry the extra semantics of
2295 non-locality. */
2296 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2298 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2299 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2302 new_bb = bb->prev_bb;
2303 new_gsi = gsi_start_bb (new_bb);
2304 gsi_remove (&i, false);
2305 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2307 else
2309 /* Release SSA definitions. */
2310 release_defs (stmt);
2311 gsi_remove (&i, true);
2314 if (gsi_end_p (i))
2315 i = gsi_last_bb (bb);
2316 else
2317 gsi_prev (&i);
2321 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2322 bb->il.gimple.seq = NULL;
2323 bb->il.gimple.phi_nodes = NULL;
2327 /* Given a basic block BB and a value VAL for use in the final statement
2328 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2329 the edge that will be taken out of the block.
2330 If VAL is NULL_TREE, then the current value of the final statement's
2331 predicate or index is used.
2332 If the value does not match a unique edge, NULL is returned. */
2334 edge
2335 find_taken_edge (basic_block bb, tree val)
2337 gimple *stmt;
2339 stmt = last_stmt (bb);
2341 /* Handle ENTRY and EXIT. */
2342 if (!stmt)
2343 return NULL;
2345 if (gimple_code (stmt) == GIMPLE_COND)
2346 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2348 if (gimple_code (stmt) == GIMPLE_SWITCH)
2349 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2351 if (computed_goto_p (stmt))
2353 /* Only optimize if the argument is a label, if the argument is
2354 not a label then we can not construct a proper CFG.
2356 It may be the case that we only need to allow the LABEL_REF to
2357 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2358 appear inside a LABEL_EXPR just to be safe. */
2359 if (val
2360 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2361 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2362 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2365 /* Otherwise we only know the taken successor edge if it's unique. */
2366 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2369 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2370 statement, determine which of the outgoing edges will be taken out of the
2371 block. Return NULL if either edge may be taken. */
2373 static edge
2374 find_taken_edge_computed_goto (basic_block bb, tree val)
2376 basic_block dest;
2377 edge e = NULL;
2379 dest = label_to_block (val);
2380 if (dest)
2382 e = find_edge (bb, dest);
2383 gcc_assert (e != NULL);
2386 return e;
2389 /* Given COND_STMT and a constant value VAL for use as the predicate,
2390 determine which of the two edges will be taken out of
2391 the statement's block. Return NULL if either edge may be taken.
2392 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2393 is used. */
2395 static edge
2396 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2398 edge true_edge, false_edge;
2400 if (val == NULL_TREE)
2402 /* Use the current value of the predicate. */
2403 if (gimple_cond_true_p (cond_stmt))
2404 val = integer_one_node;
2405 else if (gimple_cond_false_p (cond_stmt))
2406 val = integer_zero_node;
2407 else
2408 return NULL;
2410 else if (TREE_CODE (val) != INTEGER_CST)
2411 return NULL;
2413 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2414 &true_edge, &false_edge);
2416 return (integer_zerop (val) ? false_edge : true_edge);
2419 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2420 which edge will be taken out of the statement's block. Return NULL if any
2421 edge may be taken.
2422 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2423 is used. */
2425 static edge
2426 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2428 basic_block dest_bb;
2429 edge e;
2430 tree taken_case;
2432 if (gimple_switch_num_labels (switch_stmt) == 1)
2433 taken_case = gimple_switch_default_label (switch_stmt);
2434 else
2436 if (val == NULL_TREE)
2437 val = gimple_switch_index (switch_stmt);
2438 if (TREE_CODE (val) != INTEGER_CST)
2439 return NULL;
2440 else
2441 taken_case = find_case_label_for_value (switch_stmt, val);
2443 dest_bb = label_to_block (CASE_LABEL (taken_case));
2445 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2446 gcc_assert (e);
2447 return e;
2451 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2452 We can make optimal use here of the fact that the case labels are
2453 sorted: We can do a binary search for a case matching VAL. */
2455 static tree
2456 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2458 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2459 tree default_case = gimple_switch_default_label (switch_stmt);
2461 for (low = 0, high = n; high - low > 1; )
2463 size_t i = (high + low) / 2;
2464 tree t = gimple_switch_label (switch_stmt, i);
2465 int cmp;
2467 /* Cache the result of comparing CASE_LOW and val. */
2468 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2470 if (cmp > 0)
2471 high = i;
2472 else
2473 low = i;
2475 if (CASE_HIGH (t) == NULL)
2477 /* A singe-valued case label. */
2478 if (cmp == 0)
2479 return t;
2481 else
2483 /* A case range. We can only handle integer ranges. */
2484 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2485 return t;
2489 return default_case;
2493 /* Dump a basic block on stderr. */
2495 void
2496 gimple_debug_bb (basic_block bb)
2498 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2502 /* Dump basic block with index N on stderr. */
2504 basic_block
2505 gimple_debug_bb_n (int n)
2507 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2508 return BASIC_BLOCK_FOR_FN (cfun, n);
2512 /* Dump the CFG on stderr.
2514 FLAGS are the same used by the tree dumping functions
2515 (see TDF_* in dumpfile.h). */
2517 void
2518 gimple_debug_cfg (dump_flags_t flags)
2520 gimple_dump_cfg (stderr, flags);
2524 /* Dump the program showing basic block boundaries on the given FILE.
2526 FLAGS are the same used by the tree dumping functions (see TDF_* in
2527 tree.h). */
2529 void
2530 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2532 if (flags & TDF_DETAILS)
2534 dump_function_header (file, current_function_decl, flags);
2535 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2536 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2537 last_basic_block_for_fn (cfun));
2539 brief_dump_cfg (file, flags);
2540 fprintf (file, "\n");
2543 if (flags & TDF_STATS)
2544 dump_cfg_stats (file);
2546 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2550 /* Dump CFG statistics on FILE. */
2552 void
2553 dump_cfg_stats (FILE *file)
2555 static long max_num_merged_labels = 0;
2556 unsigned long size, total = 0;
2557 long num_edges;
2558 basic_block bb;
2559 const char * const fmt_str = "%-30s%-13s%12s\n";
2560 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2561 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2562 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2563 const char *funcname = current_function_name ();
2565 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2567 fprintf (file, "---------------------------------------------------------\n");
2568 fprintf (file, fmt_str, "", " Number of ", "Memory");
2569 fprintf (file, fmt_str, "", " instances ", "used ");
2570 fprintf (file, "---------------------------------------------------------\n");
2572 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2573 total += size;
2574 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2575 SCALE (size), LABEL (size));
2577 num_edges = 0;
2578 FOR_EACH_BB_FN (bb, cfun)
2579 num_edges += EDGE_COUNT (bb->succs);
2580 size = num_edges * sizeof (struct edge_def);
2581 total += size;
2582 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2584 fprintf (file, "---------------------------------------------------------\n");
2585 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2586 LABEL (total));
2587 fprintf (file, "---------------------------------------------------------\n");
2588 fprintf (file, "\n");
2590 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2591 max_num_merged_labels = cfg_stats.num_merged_labels;
2593 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2594 cfg_stats.num_merged_labels, max_num_merged_labels);
2596 fprintf (file, "\n");
2600 /* Dump CFG statistics on stderr. Keep extern so that it's always
2601 linked in the final executable. */
2603 DEBUG_FUNCTION void
2604 debug_cfg_stats (void)
2606 dump_cfg_stats (stderr);
2609 /*---------------------------------------------------------------------------
2610 Miscellaneous helpers
2611 ---------------------------------------------------------------------------*/
2613 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2614 flow. Transfers of control flow associated with EH are excluded. */
2616 static bool
2617 call_can_make_abnormal_goto (gimple *t)
2619 /* If the function has no non-local labels, then a call cannot make an
2620 abnormal transfer of control. */
2621 if (!cfun->has_nonlocal_label
2622 && !cfun->calls_setjmp)
2623 return false;
2625 /* Likewise if the call has no side effects. */
2626 if (!gimple_has_side_effects (t))
2627 return false;
2629 /* Likewise if the called function is leaf. */
2630 if (gimple_call_flags (t) & ECF_LEAF)
2631 return false;
2633 return true;
2637 /* Return true if T can make an abnormal transfer of control flow.
2638 Transfers of control flow associated with EH are excluded. */
2640 bool
2641 stmt_can_make_abnormal_goto (gimple *t)
2643 if (computed_goto_p (t))
2644 return true;
2645 if (is_gimple_call (t))
2646 return call_can_make_abnormal_goto (t);
2647 return false;
2651 /* Return true if T represents a stmt that always transfers control. */
2653 bool
2654 is_ctrl_stmt (gimple *t)
2656 switch (gimple_code (t))
2658 case GIMPLE_COND:
2659 case GIMPLE_SWITCH:
2660 case GIMPLE_GOTO:
2661 case GIMPLE_RETURN:
2662 case GIMPLE_RESX:
2663 return true;
2664 default:
2665 return false;
2670 /* Return true if T is a statement that may alter the flow of control
2671 (e.g., a call to a non-returning function). */
2673 bool
2674 is_ctrl_altering_stmt (gimple *t)
2676 gcc_assert (t);
2678 switch (gimple_code (t))
2680 case GIMPLE_CALL:
2681 /* Per stmt call flag indicates whether the call could alter
2682 controlflow. */
2683 if (gimple_call_ctrl_altering_p (t))
2684 return true;
2685 break;
2687 case GIMPLE_EH_DISPATCH:
2688 /* EH_DISPATCH branches to the individual catch handlers at
2689 this level of a try or allowed-exceptions region. It can
2690 fallthru to the next statement as well. */
2691 return true;
2693 case GIMPLE_ASM:
2694 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2695 return true;
2696 break;
2698 CASE_GIMPLE_OMP:
2699 /* OpenMP directives alter control flow. */
2700 return true;
2702 case GIMPLE_TRANSACTION:
2703 /* A transaction start alters control flow. */
2704 return true;
2706 default:
2707 break;
2710 /* If a statement can throw, it alters control flow. */
2711 return stmt_can_throw_internal (t);
2715 /* Return true if T is a simple local goto. */
2717 bool
2718 simple_goto_p (gimple *t)
2720 return (gimple_code (t) == GIMPLE_GOTO
2721 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2725 /* Return true if STMT should start a new basic block. PREV_STMT is
2726 the statement preceding STMT. It is used when STMT is a label or a
2727 case label. Labels should only start a new basic block if their
2728 previous statement wasn't a label. Otherwise, sequence of labels
2729 would generate unnecessary basic blocks that only contain a single
2730 label. */
2732 static inline bool
2733 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2735 if (stmt == NULL)
2736 return false;
2738 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2739 any nondebug stmts in the block. We don't want to start another
2740 block in this case: the debug stmt will already have started the
2741 one STMT would start if we weren't outputting debug stmts. */
2742 if (prev_stmt && is_gimple_debug (prev_stmt))
2743 return false;
2745 /* Labels start a new basic block only if the preceding statement
2746 wasn't a label of the same type. This prevents the creation of
2747 consecutive blocks that have nothing but a single label. */
2748 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2750 /* Nonlocal and computed GOTO targets always start a new block. */
2751 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2752 || FORCED_LABEL (gimple_label_label (label_stmt)))
2753 return true;
2755 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2757 if (DECL_NONLOCAL (gimple_label_label (
2758 as_a <glabel *> (prev_stmt))))
2759 return true;
2761 cfg_stats.num_merged_labels++;
2762 return false;
2764 else
2765 return true;
2767 else if (gimple_code (stmt) == GIMPLE_CALL)
2769 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2770 /* setjmp acts similar to a nonlocal GOTO target and thus should
2771 start a new block. */
2772 return true;
2773 if (gimple_call_internal_p (stmt, IFN_PHI)
2774 && prev_stmt
2775 && gimple_code (prev_stmt) != GIMPLE_LABEL
2776 && (gimple_code (prev_stmt) != GIMPLE_CALL
2777 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2778 /* PHI nodes start a new block unless preceeded by a label
2779 or another PHI. */
2780 return true;
2783 return false;
2787 /* Return true if T should end a basic block. */
2789 bool
2790 stmt_ends_bb_p (gimple *t)
2792 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2795 /* Remove block annotations and other data structures. */
2797 void
2798 delete_tree_cfg_annotations (struct function *fn)
2800 vec_free (label_to_block_map_for_fn (fn));
2803 /* Return the virtual phi in BB. */
2805 gphi *
2806 get_virtual_phi (basic_block bb)
2808 for (gphi_iterator gsi = gsi_start_phis (bb);
2809 !gsi_end_p (gsi);
2810 gsi_next (&gsi))
2812 gphi *phi = gsi.phi ();
2814 if (virtual_operand_p (PHI_RESULT (phi)))
2815 return phi;
2818 return NULL;
2821 /* Return the first statement in basic block BB. */
2823 gimple *
2824 first_stmt (basic_block bb)
2826 gimple_stmt_iterator i = gsi_start_bb (bb);
2827 gimple *stmt = NULL;
2829 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2831 gsi_next (&i);
2832 stmt = NULL;
2834 return stmt;
2837 /* Return the first non-label statement in basic block BB. */
2839 static gimple *
2840 first_non_label_stmt (basic_block bb)
2842 gimple_stmt_iterator i = gsi_start_bb (bb);
2843 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2844 gsi_next (&i);
2845 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2848 /* Return the last statement in basic block BB. */
2850 gimple *
2851 last_stmt (basic_block bb)
2853 gimple_stmt_iterator i = gsi_last_bb (bb);
2854 gimple *stmt = NULL;
2856 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2858 gsi_prev (&i);
2859 stmt = NULL;
2861 return stmt;
2864 /* Return the last statement of an otherwise empty block. Return NULL
2865 if the block is totally empty, or if it contains more than one
2866 statement. */
2868 gimple *
2869 last_and_only_stmt (basic_block bb)
2871 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2872 gimple *last, *prev;
2874 if (gsi_end_p (i))
2875 return NULL;
2877 last = gsi_stmt (i);
2878 gsi_prev_nondebug (&i);
2879 if (gsi_end_p (i))
2880 return last;
2882 /* Empty statements should no longer appear in the instruction stream.
2883 Everything that might have appeared before should be deleted by
2884 remove_useless_stmts, and the optimizers should just gsi_remove
2885 instead of smashing with build_empty_stmt.
2887 Thus the only thing that should appear here in a block containing
2888 one executable statement is a label. */
2889 prev = gsi_stmt (i);
2890 if (gimple_code (prev) == GIMPLE_LABEL)
2891 return last;
2892 else
2893 return NULL;
2896 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2898 static void
2899 reinstall_phi_args (edge new_edge, edge old_edge)
2901 edge_var_map *vm;
2902 int i;
2903 gphi_iterator phis;
2905 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2906 if (!v)
2907 return;
2909 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2910 v->iterate (i, &vm) && !gsi_end_p (phis);
2911 i++, gsi_next (&phis))
2913 gphi *phi = phis.phi ();
2914 tree result = redirect_edge_var_map_result (vm);
2915 tree arg = redirect_edge_var_map_def (vm);
2917 gcc_assert (result == gimple_phi_result (phi));
2919 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2922 redirect_edge_var_map_clear (old_edge);
2925 /* Returns the basic block after which the new basic block created
2926 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2927 near its "logical" location. This is of most help to humans looking
2928 at debugging dumps. */
2930 basic_block
2931 split_edge_bb_loc (edge edge_in)
2933 basic_block dest = edge_in->dest;
2934 basic_block dest_prev = dest->prev_bb;
2936 if (dest_prev)
2938 edge e = find_edge (dest_prev, dest);
2939 if (e && !(e->flags & EDGE_COMPLEX))
2940 return edge_in->src;
2942 return dest_prev;
2945 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2946 Abort on abnormal edges. */
2948 static basic_block
2949 gimple_split_edge (edge edge_in)
2951 basic_block new_bb, after_bb, dest;
2952 edge new_edge, e;
2954 /* Abnormal edges cannot be split. */
2955 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2957 dest = edge_in->dest;
2959 after_bb = split_edge_bb_loc (edge_in);
2961 new_bb = create_empty_bb (after_bb);
2962 new_bb->count = edge_in->count ();
2964 e = redirect_edge_and_branch (edge_in, new_bb);
2965 gcc_assert (e == edge_in);
2967 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2968 reinstall_phi_args (new_edge, e);
2970 return new_bb;
2974 /* Verify properties of the address expression T with base object BASE. */
2976 static tree
2977 verify_address (tree t, tree base)
2979 bool old_constant;
2980 bool old_side_effects;
2981 bool new_constant;
2982 bool new_side_effects;
2984 old_constant = TREE_CONSTANT (t);
2985 old_side_effects = TREE_SIDE_EFFECTS (t);
2987 recompute_tree_invariant_for_addr_expr (t);
2988 new_side_effects = TREE_SIDE_EFFECTS (t);
2989 new_constant = TREE_CONSTANT (t);
2991 if (old_constant != new_constant)
2993 error ("constant not recomputed when ADDR_EXPR changed");
2994 return t;
2996 if (old_side_effects != new_side_effects)
2998 error ("side effects not recomputed when ADDR_EXPR changed");
2999 return t;
3002 if (!(VAR_P (base)
3003 || TREE_CODE (base) == PARM_DECL
3004 || TREE_CODE (base) == RESULT_DECL))
3005 return NULL_TREE;
3007 if (DECL_GIMPLE_REG_P (base))
3009 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3010 return base;
3013 return NULL_TREE;
3016 /* Callback for walk_tree, check that all elements with address taken are
3017 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3018 inside a PHI node. */
3020 static tree
3021 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3023 tree t = *tp, x;
3025 if (TYPE_P (t))
3026 *walk_subtrees = 0;
3028 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3029 #define CHECK_OP(N, MSG) \
3030 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3031 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3033 switch (TREE_CODE (t))
3035 case SSA_NAME:
3036 if (SSA_NAME_IN_FREE_LIST (t))
3038 error ("SSA name in freelist but still referenced");
3039 return *tp;
3041 break;
3043 case PARM_DECL:
3044 case VAR_DECL:
3045 case RESULT_DECL:
3047 tree context = decl_function_context (t);
3048 if (context != cfun->decl
3049 && !SCOPE_FILE_SCOPE_P (context)
3050 && !TREE_STATIC (t)
3051 && !DECL_EXTERNAL (t))
3053 error ("Local declaration from a different function");
3054 return t;
3057 break;
3059 case INDIRECT_REF:
3060 error ("INDIRECT_REF in gimple IL");
3061 return t;
3063 case MEM_REF:
3064 x = TREE_OPERAND (t, 0);
3065 if (!POINTER_TYPE_P (TREE_TYPE (x))
3066 || !is_gimple_mem_ref_addr (x))
3068 error ("invalid first operand of MEM_REF");
3069 return x;
3071 if (!poly_int_tree_p (TREE_OPERAND (t, 1))
3072 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3074 error ("invalid offset operand of MEM_REF");
3075 return TREE_OPERAND (t, 1);
3077 if (TREE_CODE (x) == ADDR_EXPR)
3079 tree va = verify_address (x, TREE_OPERAND (x, 0));
3080 if (va)
3081 return va;
3082 x = TREE_OPERAND (x, 0);
3084 walk_tree (&x, verify_expr, data, NULL);
3085 *walk_subtrees = 0;
3086 break;
3088 case ASSERT_EXPR:
3089 x = fold (ASSERT_EXPR_COND (t));
3090 if (x == boolean_false_node)
3092 error ("ASSERT_EXPR with an always-false condition");
3093 return *tp;
3095 break;
3097 case MODIFY_EXPR:
3098 error ("MODIFY_EXPR not expected while having tuples");
3099 return *tp;
3101 case ADDR_EXPR:
3103 tree tem;
3105 gcc_assert (is_gimple_address (t));
3107 /* Skip any references (they will be checked when we recurse down the
3108 tree) and ensure that any variable used as a prefix is marked
3109 addressable. */
3110 for (x = TREE_OPERAND (t, 0);
3111 handled_component_p (x);
3112 x = TREE_OPERAND (x, 0))
3115 if ((tem = verify_address (t, x)))
3116 return tem;
3118 if (!(VAR_P (x)
3119 || TREE_CODE (x) == PARM_DECL
3120 || TREE_CODE (x) == RESULT_DECL))
3121 return NULL;
3123 if (!TREE_ADDRESSABLE (x))
3125 error ("address taken, but ADDRESSABLE bit not set");
3126 return x;
3129 break;
3132 case COND_EXPR:
3133 x = COND_EXPR_COND (t);
3134 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3136 error ("non-integral used in condition");
3137 return x;
3139 if (!is_gimple_condexpr (x))
3141 error ("invalid conditional operand");
3142 return x;
3144 break;
3146 case NON_LVALUE_EXPR:
3147 case TRUTH_NOT_EXPR:
3148 gcc_unreachable ();
3150 CASE_CONVERT:
3151 case FIX_TRUNC_EXPR:
3152 case FLOAT_EXPR:
3153 case NEGATE_EXPR:
3154 case ABS_EXPR:
3155 case BIT_NOT_EXPR:
3156 CHECK_OP (0, "invalid operand to unary operator");
3157 break;
3159 case REALPART_EXPR:
3160 case IMAGPART_EXPR:
3161 case BIT_FIELD_REF:
3162 if (!is_gimple_reg_type (TREE_TYPE (t)))
3164 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3165 return t;
3168 if (TREE_CODE (t) == BIT_FIELD_REF)
3170 tree t0 = TREE_OPERAND (t, 0);
3171 tree t1 = TREE_OPERAND (t, 1);
3172 tree t2 = TREE_OPERAND (t, 2);
3173 poly_uint64 size, bitpos;
3174 if (!poly_int_tree_p (t1, &size)
3175 || !poly_int_tree_p (t2, &bitpos)
3176 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3177 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3179 error ("invalid position or size operand to BIT_FIELD_REF");
3180 return t;
3182 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3183 && maybe_ne (TYPE_PRECISION (TREE_TYPE (t)), size))
3185 error ("integral result type precision does not match "
3186 "field size of BIT_FIELD_REF");
3187 return t;
3189 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3190 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3191 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t))),
3192 size))
3194 error ("mode size of non-integral result does not "
3195 "match field size of BIT_FIELD_REF");
3196 return t;
3198 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3199 && maybe_gt (size + bitpos,
3200 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (t0)))))
3202 error ("position plus size exceeds size of referenced object in "
3203 "BIT_FIELD_REF");
3204 return t;
3207 t = TREE_OPERAND (t, 0);
3209 /* Fall-through. */
3210 case COMPONENT_REF:
3211 case ARRAY_REF:
3212 case ARRAY_RANGE_REF:
3213 case VIEW_CONVERT_EXPR:
3214 /* We have a nest of references. Verify that each of the operands
3215 that determine where to reference is either a constant or a variable,
3216 verify that the base is valid, and then show we've already checked
3217 the subtrees. */
3218 while (handled_component_p (t))
3220 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3221 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3222 else if (TREE_CODE (t) == ARRAY_REF
3223 || TREE_CODE (t) == ARRAY_RANGE_REF)
3225 CHECK_OP (1, "invalid array index");
3226 if (TREE_OPERAND (t, 2))
3227 CHECK_OP (2, "invalid array lower bound");
3228 if (TREE_OPERAND (t, 3))
3229 CHECK_OP (3, "invalid array stride");
3231 else if (TREE_CODE (t) == BIT_FIELD_REF
3232 || TREE_CODE (t) == REALPART_EXPR
3233 || TREE_CODE (t) == IMAGPART_EXPR)
3235 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3236 "REALPART_EXPR");
3237 return t;
3240 t = TREE_OPERAND (t, 0);
3243 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3245 error ("invalid reference prefix");
3246 return t;
3248 walk_tree (&t, verify_expr, data, NULL);
3249 *walk_subtrees = 0;
3250 break;
3251 case PLUS_EXPR:
3252 case MINUS_EXPR:
3253 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3254 POINTER_PLUS_EXPR. */
3255 if (POINTER_TYPE_P (TREE_TYPE (t)))
3257 error ("invalid operand to plus/minus, type is a pointer");
3258 return t;
3260 CHECK_OP (0, "invalid operand to binary operator");
3261 CHECK_OP (1, "invalid operand to binary operator");
3262 break;
3264 case POINTER_DIFF_EXPR:
3265 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3266 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3268 error ("invalid operand to pointer diff, operand is not a pointer");
3269 return t;
3271 if (TREE_CODE (TREE_TYPE (t)) != INTEGER_TYPE
3272 || TYPE_UNSIGNED (TREE_TYPE (t))
3273 || (TYPE_PRECISION (TREE_TYPE (t))
3274 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))))
3276 error ("invalid type for pointer diff");
3277 return t;
3279 CHECK_OP (0, "invalid operand to pointer diff");
3280 CHECK_OP (1, "invalid operand to pointer diff");
3281 break;
3283 case POINTER_PLUS_EXPR:
3284 /* Check to make sure the first operand is a pointer or reference type. */
3285 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3287 error ("invalid operand to pointer plus, first operand is not a pointer");
3288 return t;
3290 /* Check to make sure the second operand is a ptrofftype. */
3291 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3293 error ("invalid operand to pointer plus, second operand is not an "
3294 "integer type of appropriate width");
3295 return t;
3297 /* FALLTHROUGH */
3298 case LT_EXPR:
3299 case LE_EXPR:
3300 case GT_EXPR:
3301 case GE_EXPR:
3302 case EQ_EXPR:
3303 case NE_EXPR:
3304 case UNORDERED_EXPR:
3305 case ORDERED_EXPR:
3306 case UNLT_EXPR:
3307 case UNLE_EXPR:
3308 case UNGT_EXPR:
3309 case UNGE_EXPR:
3310 case UNEQ_EXPR:
3311 case LTGT_EXPR:
3312 case MULT_EXPR:
3313 case TRUNC_DIV_EXPR:
3314 case CEIL_DIV_EXPR:
3315 case FLOOR_DIV_EXPR:
3316 case ROUND_DIV_EXPR:
3317 case TRUNC_MOD_EXPR:
3318 case CEIL_MOD_EXPR:
3319 case FLOOR_MOD_EXPR:
3320 case ROUND_MOD_EXPR:
3321 case RDIV_EXPR:
3322 case EXACT_DIV_EXPR:
3323 case MIN_EXPR:
3324 case MAX_EXPR:
3325 case LSHIFT_EXPR:
3326 case RSHIFT_EXPR:
3327 case LROTATE_EXPR:
3328 case RROTATE_EXPR:
3329 case BIT_IOR_EXPR:
3330 case BIT_XOR_EXPR:
3331 case BIT_AND_EXPR:
3332 CHECK_OP (0, "invalid operand to binary operator");
3333 CHECK_OP (1, "invalid operand to binary operator");
3334 break;
3336 case CONSTRUCTOR:
3337 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3338 *walk_subtrees = 0;
3339 break;
3341 case CASE_LABEL_EXPR:
3342 if (CASE_CHAIN (t))
3344 error ("invalid CASE_CHAIN");
3345 return t;
3347 break;
3349 default:
3350 break;
3352 return NULL;
3354 #undef CHECK_OP
3358 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3359 Returns true if there is an error, otherwise false. */
3361 static bool
3362 verify_types_in_gimple_min_lval (tree expr)
3364 tree op;
3366 if (is_gimple_id (expr))
3367 return false;
3369 if (TREE_CODE (expr) != TARGET_MEM_REF
3370 && TREE_CODE (expr) != MEM_REF)
3372 error ("invalid expression for min lvalue");
3373 return true;
3376 /* TARGET_MEM_REFs are strange beasts. */
3377 if (TREE_CODE (expr) == TARGET_MEM_REF)
3378 return false;
3380 op = TREE_OPERAND (expr, 0);
3381 if (!is_gimple_val (op))
3383 error ("invalid operand in indirect reference");
3384 debug_generic_stmt (op);
3385 return true;
3387 /* Memory references now generally can involve a value conversion. */
3389 return false;
3392 /* Verify if EXPR is a valid GIMPLE reference expression. If
3393 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3394 if there is an error, otherwise false. */
3396 static bool
3397 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3399 while (handled_component_p (expr))
3401 tree op = TREE_OPERAND (expr, 0);
3403 if (TREE_CODE (expr) == ARRAY_REF
3404 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3406 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3407 || (TREE_OPERAND (expr, 2)
3408 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3409 || (TREE_OPERAND (expr, 3)
3410 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3412 error ("invalid operands to array reference");
3413 debug_generic_stmt (expr);
3414 return true;
3418 /* Verify if the reference array element types are compatible. */
3419 if (TREE_CODE (expr) == ARRAY_REF
3420 && !useless_type_conversion_p (TREE_TYPE (expr),
3421 TREE_TYPE (TREE_TYPE (op))))
3423 error ("type mismatch in array reference");
3424 debug_generic_stmt (TREE_TYPE (expr));
3425 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3426 return true;
3428 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3429 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3430 TREE_TYPE (TREE_TYPE (op))))
3432 error ("type mismatch in array range reference");
3433 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3434 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3435 return true;
3438 if ((TREE_CODE (expr) == REALPART_EXPR
3439 || TREE_CODE (expr) == IMAGPART_EXPR)
3440 && !useless_type_conversion_p (TREE_TYPE (expr),
3441 TREE_TYPE (TREE_TYPE (op))))
3443 error ("type mismatch in real/imagpart reference");
3444 debug_generic_stmt (TREE_TYPE (expr));
3445 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3446 return true;
3449 if (TREE_CODE (expr) == COMPONENT_REF
3450 && !useless_type_conversion_p (TREE_TYPE (expr),
3451 TREE_TYPE (TREE_OPERAND (expr, 1))))
3453 error ("type mismatch in component reference");
3454 debug_generic_stmt (TREE_TYPE (expr));
3455 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3456 return true;
3459 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3461 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3462 that their operand is not an SSA name or an invariant when
3463 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3464 bug). Otherwise there is nothing to verify, gross mismatches at
3465 most invoke undefined behavior. */
3466 if (require_lvalue
3467 && (TREE_CODE (op) == SSA_NAME
3468 || is_gimple_min_invariant (op)))
3470 error ("conversion of an SSA_NAME on the left hand side");
3471 debug_generic_stmt (expr);
3472 return true;
3474 else if (TREE_CODE (op) == SSA_NAME
3475 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3477 error ("conversion of register to a different size");
3478 debug_generic_stmt (expr);
3479 return true;
3481 else if (!handled_component_p (op))
3482 return false;
3485 expr = op;
3488 if (TREE_CODE (expr) == MEM_REF)
3490 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3492 error ("invalid address operand in MEM_REF");
3493 debug_generic_stmt (expr);
3494 return true;
3496 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3497 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3499 error ("invalid offset operand in MEM_REF");
3500 debug_generic_stmt (expr);
3501 return true;
3504 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3506 if (!TMR_BASE (expr)
3507 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3509 error ("invalid address operand in TARGET_MEM_REF");
3510 return true;
3512 if (!TMR_OFFSET (expr)
3513 || !poly_int_tree_p (TMR_OFFSET (expr))
3514 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3516 error ("invalid offset operand in TARGET_MEM_REF");
3517 debug_generic_stmt (expr);
3518 return true;
3522 return ((require_lvalue || !is_gimple_min_invariant (expr))
3523 && verify_types_in_gimple_min_lval (expr));
3526 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3527 list of pointer-to types that is trivially convertible to DEST. */
3529 static bool
3530 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3532 tree src;
3534 if (!TYPE_POINTER_TO (src_obj))
3535 return true;
3537 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3538 if (useless_type_conversion_p (dest, src))
3539 return true;
3541 return false;
3544 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3545 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3547 static bool
3548 valid_fixed_convert_types_p (tree type1, tree type2)
3550 return (FIXED_POINT_TYPE_P (type1)
3551 && (INTEGRAL_TYPE_P (type2)
3552 || SCALAR_FLOAT_TYPE_P (type2)
3553 || FIXED_POINT_TYPE_P (type2)));
3556 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3557 is a problem, otherwise false. */
3559 static bool
3560 verify_gimple_call (gcall *stmt)
3562 tree fn = gimple_call_fn (stmt);
3563 tree fntype, fndecl;
3564 unsigned i;
3566 if (gimple_call_internal_p (stmt))
3568 if (fn)
3570 error ("gimple call has two targets");
3571 debug_generic_stmt (fn);
3572 return true;
3574 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3575 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3577 return false;
3580 else
3582 if (!fn)
3584 error ("gimple call has no target");
3585 return true;
3589 if (fn && !is_gimple_call_addr (fn))
3591 error ("invalid function in gimple call");
3592 debug_generic_stmt (fn);
3593 return true;
3596 if (fn
3597 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3598 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3599 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3601 error ("non-function in gimple call");
3602 return true;
3605 fndecl = gimple_call_fndecl (stmt);
3606 if (fndecl
3607 && TREE_CODE (fndecl) == FUNCTION_DECL
3608 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3609 && !DECL_PURE_P (fndecl)
3610 && !TREE_READONLY (fndecl))
3612 error ("invalid pure const state for function");
3613 return true;
3616 tree lhs = gimple_call_lhs (stmt);
3617 if (lhs
3618 && (!is_gimple_lvalue (lhs)
3619 || verify_types_in_gimple_reference (lhs, true)))
3621 error ("invalid LHS in gimple call");
3622 return true;
3625 if (gimple_call_ctrl_altering_p (stmt)
3626 && gimple_call_noreturn_p (stmt)
3627 && should_remove_lhs_p (lhs))
3629 error ("LHS in noreturn call");
3630 return true;
3633 fntype = gimple_call_fntype (stmt);
3634 if (fntype
3635 && lhs
3636 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3637 /* ??? At least C++ misses conversions at assignments from
3638 void * call results.
3639 For now simply allow arbitrary pointer type conversions. */
3640 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3641 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3643 error ("invalid conversion in gimple call");
3644 debug_generic_stmt (TREE_TYPE (lhs));
3645 debug_generic_stmt (TREE_TYPE (fntype));
3646 return true;
3649 if (gimple_call_chain (stmt)
3650 && !is_gimple_val (gimple_call_chain (stmt)))
3652 error ("invalid static chain in gimple call");
3653 debug_generic_stmt (gimple_call_chain (stmt));
3654 return true;
3657 /* If there is a static chain argument, the call should either be
3658 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3659 if (gimple_call_chain (stmt)
3660 && fndecl
3661 && !DECL_STATIC_CHAIN (fndecl))
3663 error ("static chain with function that doesn%'t use one");
3664 return true;
3667 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3669 switch (DECL_FUNCTION_CODE (fndecl))
3671 case BUILT_IN_UNREACHABLE:
3672 case BUILT_IN_TRAP:
3673 if (gimple_call_num_args (stmt) > 0)
3675 /* Built-in unreachable with parameters might not be caught by
3676 undefined behavior sanitizer. Front-ends do check users do not
3677 call them that way but we also produce calls to
3678 __builtin_unreachable internally, for example when IPA figures
3679 out a call cannot happen in a legal program. In such cases,
3680 we must make sure arguments are stripped off. */
3681 error ("__builtin_unreachable or __builtin_trap call with "
3682 "arguments");
3683 return true;
3685 break;
3686 default:
3687 break;
3691 /* ??? The C frontend passes unpromoted arguments in case it
3692 didn't see a function declaration before the call. So for now
3693 leave the call arguments mostly unverified. Once we gimplify
3694 unit-at-a-time we have a chance to fix this. */
3696 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3698 tree arg = gimple_call_arg (stmt, i);
3699 if ((is_gimple_reg_type (TREE_TYPE (arg))
3700 && !is_gimple_val (arg))
3701 || (!is_gimple_reg_type (TREE_TYPE (arg))
3702 && !is_gimple_lvalue (arg)))
3704 error ("invalid argument to gimple call");
3705 debug_generic_expr (arg);
3706 return true;
3710 return false;
3713 /* Verifies the gimple comparison with the result type TYPE and
3714 the operands OP0 and OP1, comparison code is CODE. */
3716 static bool
3717 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3719 tree op0_type = TREE_TYPE (op0);
3720 tree op1_type = TREE_TYPE (op1);
3722 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3724 error ("invalid operands in gimple comparison");
3725 return true;
3728 /* For comparisons we do not have the operations type as the
3729 effective type the comparison is carried out in. Instead
3730 we require that either the first operand is trivially
3731 convertible into the second, or the other way around.
3732 Because we special-case pointers to void we allow
3733 comparisons of pointers with the same mode as well. */
3734 if (!useless_type_conversion_p (op0_type, op1_type)
3735 && !useless_type_conversion_p (op1_type, op0_type)
3736 && (!POINTER_TYPE_P (op0_type)
3737 || !POINTER_TYPE_P (op1_type)
3738 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3740 error ("mismatching comparison operand types");
3741 debug_generic_expr (op0_type);
3742 debug_generic_expr (op1_type);
3743 return true;
3746 /* The resulting type of a comparison may be an effective boolean type. */
3747 if (INTEGRAL_TYPE_P (type)
3748 && (TREE_CODE (type) == BOOLEAN_TYPE
3749 || TYPE_PRECISION (type) == 1))
3751 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3752 || TREE_CODE (op1_type) == VECTOR_TYPE)
3753 && code != EQ_EXPR && code != NE_EXPR
3754 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3755 && !VECTOR_INTEGER_TYPE_P (op0_type))
3757 error ("unsupported operation or type for vector comparison"
3758 " returning a boolean");
3759 debug_generic_expr (op0_type);
3760 debug_generic_expr (op1_type);
3761 return true;
3764 /* Or a boolean vector type with the same element count
3765 as the comparison operand types. */
3766 else if (TREE_CODE (type) == VECTOR_TYPE
3767 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3769 if (TREE_CODE (op0_type) != VECTOR_TYPE
3770 || TREE_CODE (op1_type) != VECTOR_TYPE)
3772 error ("non-vector operands in vector comparison");
3773 debug_generic_expr (op0_type);
3774 debug_generic_expr (op1_type);
3775 return true;
3778 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3780 error ("invalid vector comparison resulting type");
3781 debug_generic_expr (type);
3782 return true;
3785 else
3787 error ("bogus comparison result type");
3788 debug_generic_expr (type);
3789 return true;
3792 return false;
3795 /* Verify a gimple assignment statement STMT with an unary rhs.
3796 Returns true if anything is wrong. */
3798 static bool
3799 verify_gimple_assign_unary (gassign *stmt)
3801 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3802 tree lhs = gimple_assign_lhs (stmt);
3803 tree lhs_type = TREE_TYPE (lhs);
3804 tree rhs1 = gimple_assign_rhs1 (stmt);
3805 tree rhs1_type = TREE_TYPE (rhs1);
3807 if (!is_gimple_reg (lhs))
3809 error ("non-register as LHS of unary operation");
3810 return true;
3813 if (!is_gimple_val (rhs1))
3815 error ("invalid operand in unary operation");
3816 return true;
3819 /* First handle conversions. */
3820 switch (rhs_code)
3822 CASE_CONVERT:
3824 /* Allow conversions from pointer type to integral type only if
3825 there is no sign or zero extension involved.
3826 For targets were the precision of ptrofftype doesn't match that
3827 of pointers we need to allow arbitrary conversions to ptrofftype. */
3828 if ((POINTER_TYPE_P (lhs_type)
3829 && INTEGRAL_TYPE_P (rhs1_type))
3830 || (POINTER_TYPE_P (rhs1_type)
3831 && INTEGRAL_TYPE_P (lhs_type)
3832 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3833 || ptrofftype_p (sizetype))))
3834 return false;
3836 /* Allow conversion from integral to offset type and vice versa. */
3837 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3838 && INTEGRAL_TYPE_P (rhs1_type))
3839 || (INTEGRAL_TYPE_P (lhs_type)
3840 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3841 return false;
3843 /* Otherwise assert we are converting between types of the
3844 same kind. */
3845 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3847 error ("invalid types in nop conversion");
3848 debug_generic_expr (lhs_type);
3849 debug_generic_expr (rhs1_type);
3850 return true;
3853 return false;
3856 case ADDR_SPACE_CONVERT_EXPR:
3858 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3859 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3860 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3862 error ("invalid types in address space conversion");
3863 debug_generic_expr (lhs_type);
3864 debug_generic_expr (rhs1_type);
3865 return true;
3868 return false;
3871 case FIXED_CONVERT_EXPR:
3873 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3874 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3876 error ("invalid types in fixed-point conversion");
3877 debug_generic_expr (lhs_type);
3878 debug_generic_expr (rhs1_type);
3879 return true;
3882 return false;
3885 case FLOAT_EXPR:
3887 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3888 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3889 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3891 error ("invalid types in conversion to floating point");
3892 debug_generic_expr (lhs_type);
3893 debug_generic_expr (rhs1_type);
3894 return true;
3897 return false;
3900 case FIX_TRUNC_EXPR:
3902 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3903 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3904 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3906 error ("invalid types in conversion to integer");
3907 debug_generic_expr (lhs_type);
3908 debug_generic_expr (rhs1_type);
3909 return true;
3912 return false;
3915 case VEC_UNPACK_HI_EXPR:
3916 case VEC_UNPACK_LO_EXPR:
3917 case VEC_UNPACK_FLOAT_HI_EXPR:
3918 case VEC_UNPACK_FLOAT_LO_EXPR:
3919 /* FIXME. */
3920 return false;
3922 case NEGATE_EXPR:
3923 case ABS_EXPR:
3924 case BIT_NOT_EXPR:
3925 case PAREN_EXPR:
3926 case CONJ_EXPR:
3927 break;
3929 case VEC_DUPLICATE_EXPR:
3930 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3931 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3933 error ("vec_duplicate should be from a scalar to a like vector");
3934 debug_generic_expr (lhs_type);
3935 debug_generic_expr (rhs1_type);
3936 return true;
3938 return false;
3940 default:
3941 gcc_unreachable ();
3944 /* For the remaining codes assert there is no conversion involved. */
3945 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3947 error ("non-trivial conversion in unary operation");
3948 debug_generic_expr (lhs_type);
3949 debug_generic_expr (rhs1_type);
3950 return true;
3953 return false;
3956 /* Verify a gimple assignment statement STMT with a binary rhs.
3957 Returns true if anything is wrong. */
3959 static bool
3960 verify_gimple_assign_binary (gassign *stmt)
3962 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3963 tree lhs = gimple_assign_lhs (stmt);
3964 tree lhs_type = TREE_TYPE (lhs);
3965 tree rhs1 = gimple_assign_rhs1 (stmt);
3966 tree rhs1_type = TREE_TYPE (rhs1);
3967 tree rhs2 = gimple_assign_rhs2 (stmt);
3968 tree rhs2_type = TREE_TYPE (rhs2);
3970 if (!is_gimple_reg (lhs))
3972 error ("non-register as LHS of binary operation");
3973 return true;
3976 if (!is_gimple_val (rhs1)
3977 || !is_gimple_val (rhs2))
3979 error ("invalid operands in binary operation");
3980 return true;
3983 /* First handle operations that involve different types. */
3984 switch (rhs_code)
3986 case COMPLEX_EXPR:
3988 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3989 || !(INTEGRAL_TYPE_P (rhs1_type)
3990 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3991 || !(INTEGRAL_TYPE_P (rhs2_type)
3992 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3994 error ("type mismatch in complex expression");
3995 debug_generic_expr (lhs_type);
3996 debug_generic_expr (rhs1_type);
3997 debug_generic_expr (rhs2_type);
3998 return true;
4001 return false;
4004 case LSHIFT_EXPR:
4005 case RSHIFT_EXPR:
4006 case LROTATE_EXPR:
4007 case RROTATE_EXPR:
4009 /* Shifts and rotates are ok on integral types, fixed point
4010 types and integer vector types. */
4011 if ((!INTEGRAL_TYPE_P (rhs1_type)
4012 && !FIXED_POINT_TYPE_P (rhs1_type)
4013 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
4014 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
4015 || (!INTEGRAL_TYPE_P (rhs2_type)
4016 /* Vector shifts of vectors are also ok. */
4017 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
4018 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4019 && TREE_CODE (rhs2_type) == VECTOR_TYPE
4020 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
4021 || !useless_type_conversion_p (lhs_type, rhs1_type))
4023 error ("type mismatch in shift expression");
4024 debug_generic_expr (lhs_type);
4025 debug_generic_expr (rhs1_type);
4026 debug_generic_expr (rhs2_type);
4027 return true;
4030 return false;
4033 case WIDEN_LSHIFT_EXPR:
4035 if (!INTEGRAL_TYPE_P (lhs_type)
4036 || !INTEGRAL_TYPE_P (rhs1_type)
4037 || TREE_CODE (rhs2) != INTEGER_CST
4038 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
4040 error ("type mismatch in widening vector shift expression");
4041 debug_generic_expr (lhs_type);
4042 debug_generic_expr (rhs1_type);
4043 debug_generic_expr (rhs2_type);
4044 return true;
4047 return false;
4050 case VEC_WIDEN_LSHIFT_HI_EXPR:
4051 case VEC_WIDEN_LSHIFT_LO_EXPR:
4053 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4054 || TREE_CODE (lhs_type) != VECTOR_TYPE
4055 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4056 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4057 || TREE_CODE (rhs2) != INTEGER_CST
4058 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4059 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4061 error ("type mismatch in widening vector shift expression");
4062 debug_generic_expr (lhs_type);
4063 debug_generic_expr (rhs1_type);
4064 debug_generic_expr (rhs2_type);
4065 return true;
4068 return false;
4071 case PLUS_EXPR:
4072 case MINUS_EXPR:
4074 tree lhs_etype = lhs_type;
4075 tree rhs1_etype = rhs1_type;
4076 tree rhs2_etype = rhs2_type;
4077 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
4079 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4080 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4082 error ("invalid non-vector operands to vector valued plus");
4083 return true;
4085 lhs_etype = TREE_TYPE (lhs_type);
4086 rhs1_etype = TREE_TYPE (rhs1_type);
4087 rhs2_etype = TREE_TYPE (rhs2_type);
4089 if (POINTER_TYPE_P (lhs_etype)
4090 || POINTER_TYPE_P (rhs1_etype)
4091 || POINTER_TYPE_P (rhs2_etype))
4093 error ("invalid (pointer) operands to plus/minus");
4094 return true;
4097 /* Continue with generic binary expression handling. */
4098 break;
4101 case POINTER_PLUS_EXPR:
4103 if (!POINTER_TYPE_P (rhs1_type)
4104 || !useless_type_conversion_p (lhs_type, rhs1_type)
4105 || !ptrofftype_p (rhs2_type))
4107 error ("type mismatch in pointer plus expression");
4108 debug_generic_stmt (lhs_type);
4109 debug_generic_stmt (rhs1_type);
4110 debug_generic_stmt (rhs2_type);
4111 return true;
4114 return false;
4117 case POINTER_DIFF_EXPR:
4119 if (!POINTER_TYPE_P (rhs1_type)
4120 || !POINTER_TYPE_P (rhs2_type)
4121 /* Because we special-case pointers to void we allow difference
4122 of arbitrary pointers with the same mode. */
4123 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4124 || TREE_CODE (lhs_type) != INTEGER_TYPE
4125 || TYPE_UNSIGNED (lhs_type)
4126 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4128 error ("type mismatch in pointer diff expression");
4129 debug_generic_stmt (lhs_type);
4130 debug_generic_stmt (rhs1_type);
4131 debug_generic_stmt (rhs2_type);
4132 return true;
4135 return false;
4138 case TRUTH_ANDIF_EXPR:
4139 case TRUTH_ORIF_EXPR:
4140 case TRUTH_AND_EXPR:
4141 case TRUTH_OR_EXPR:
4142 case TRUTH_XOR_EXPR:
4144 gcc_unreachable ();
4146 case LT_EXPR:
4147 case LE_EXPR:
4148 case GT_EXPR:
4149 case GE_EXPR:
4150 case EQ_EXPR:
4151 case NE_EXPR:
4152 case UNORDERED_EXPR:
4153 case ORDERED_EXPR:
4154 case UNLT_EXPR:
4155 case UNLE_EXPR:
4156 case UNGT_EXPR:
4157 case UNGE_EXPR:
4158 case UNEQ_EXPR:
4159 case LTGT_EXPR:
4160 /* Comparisons are also binary, but the result type is not
4161 connected to the operand types. */
4162 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4164 case WIDEN_MULT_EXPR:
4165 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4166 return true;
4167 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4168 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4170 case WIDEN_SUM_EXPR:
4172 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4173 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4174 && ((!INTEGRAL_TYPE_P (rhs1_type)
4175 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4176 || (!INTEGRAL_TYPE_P (lhs_type)
4177 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4178 || !useless_type_conversion_p (lhs_type, rhs2_type)
4179 || (GET_MODE_SIZE (element_mode (rhs2_type))
4180 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4182 error ("type mismatch in widening sum reduction");
4183 debug_generic_expr (lhs_type);
4184 debug_generic_expr (rhs1_type);
4185 debug_generic_expr (rhs2_type);
4186 return true;
4188 return false;
4191 case VEC_WIDEN_MULT_HI_EXPR:
4192 case VEC_WIDEN_MULT_LO_EXPR:
4193 case VEC_WIDEN_MULT_EVEN_EXPR:
4194 case VEC_WIDEN_MULT_ODD_EXPR:
4196 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4197 || TREE_CODE (lhs_type) != VECTOR_TYPE
4198 || !types_compatible_p (rhs1_type, rhs2_type)
4199 || (GET_MODE_SIZE (element_mode (lhs_type))
4200 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4202 error ("type mismatch in vector widening multiplication");
4203 debug_generic_expr (lhs_type);
4204 debug_generic_expr (rhs1_type);
4205 debug_generic_expr (rhs2_type);
4206 return true;
4208 return false;
4211 case VEC_PACK_TRUNC_EXPR:
4212 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4213 vector boolean types. */
4214 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4215 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4216 && types_compatible_p (rhs1_type, rhs2_type)
4217 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4218 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4219 return false;
4221 /* Fallthru. */
4222 case VEC_PACK_SAT_EXPR:
4223 case VEC_PACK_FIX_TRUNC_EXPR:
4225 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4226 || TREE_CODE (lhs_type) != VECTOR_TYPE
4227 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4228 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4229 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4230 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4231 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4232 || !types_compatible_p (rhs1_type, rhs2_type)
4233 || (GET_MODE_SIZE (element_mode (rhs1_type))
4234 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4236 error ("type mismatch in vector pack expression");
4237 debug_generic_expr (lhs_type);
4238 debug_generic_expr (rhs1_type);
4239 debug_generic_expr (rhs2_type);
4240 return true;
4243 return false;
4246 case MULT_EXPR:
4247 case MULT_HIGHPART_EXPR:
4248 case TRUNC_DIV_EXPR:
4249 case CEIL_DIV_EXPR:
4250 case FLOOR_DIV_EXPR:
4251 case ROUND_DIV_EXPR:
4252 case TRUNC_MOD_EXPR:
4253 case CEIL_MOD_EXPR:
4254 case FLOOR_MOD_EXPR:
4255 case ROUND_MOD_EXPR:
4256 case RDIV_EXPR:
4257 case EXACT_DIV_EXPR:
4258 case MIN_EXPR:
4259 case MAX_EXPR:
4260 case BIT_IOR_EXPR:
4261 case BIT_XOR_EXPR:
4262 case BIT_AND_EXPR:
4263 /* Continue with generic binary expression handling. */
4264 break;
4266 case VEC_SERIES_EXPR:
4267 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4269 error ("type mismatch in series expression");
4270 debug_generic_expr (rhs1_type);
4271 debug_generic_expr (rhs2_type);
4272 return true;
4274 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4275 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4277 error ("vector type expected in series expression");
4278 debug_generic_expr (lhs_type);
4279 return true;
4281 return false;
4283 default:
4284 gcc_unreachable ();
4287 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4288 || !useless_type_conversion_p (lhs_type, rhs2_type))
4290 error ("type mismatch in binary expression");
4291 debug_generic_stmt (lhs_type);
4292 debug_generic_stmt (rhs1_type);
4293 debug_generic_stmt (rhs2_type);
4294 return true;
4297 return false;
4300 /* Verify a gimple assignment statement STMT with a ternary rhs.
4301 Returns true if anything is wrong. */
4303 static bool
4304 verify_gimple_assign_ternary (gassign *stmt)
4306 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4307 tree lhs = gimple_assign_lhs (stmt);
4308 tree lhs_type = TREE_TYPE (lhs);
4309 tree rhs1 = gimple_assign_rhs1 (stmt);
4310 tree rhs1_type = TREE_TYPE (rhs1);
4311 tree rhs2 = gimple_assign_rhs2 (stmt);
4312 tree rhs2_type = TREE_TYPE (rhs2);
4313 tree rhs3 = gimple_assign_rhs3 (stmt);
4314 tree rhs3_type = TREE_TYPE (rhs3);
4316 if (!is_gimple_reg (lhs))
4318 error ("non-register as LHS of ternary operation");
4319 return true;
4322 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4323 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4324 || !is_gimple_val (rhs2)
4325 || !is_gimple_val (rhs3))
4327 error ("invalid operands in ternary operation");
4328 return true;
4331 /* First handle operations that involve different types. */
4332 switch (rhs_code)
4334 case WIDEN_MULT_PLUS_EXPR:
4335 case WIDEN_MULT_MINUS_EXPR:
4336 if ((!INTEGRAL_TYPE_P (rhs1_type)
4337 && !FIXED_POINT_TYPE_P (rhs1_type))
4338 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4339 || !useless_type_conversion_p (lhs_type, rhs3_type)
4340 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4341 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4343 error ("type mismatch in widening multiply-accumulate expression");
4344 debug_generic_expr (lhs_type);
4345 debug_generic_expr (rhs1_type);
4346 debug_generic_expr (rhs2_type);
4347 debug_generic_expr (rhs3_type);
4348 return true;
4350 break;
4352 case FMA_EXPR:
4353 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4354 || !useless_type_conversion_p (lhs_type, rhs2_type)
4355 || !useless_type_conversion_p (lhs_type, rhs3_type))
4357 error ("type mismatch in fused multiply-add expression");
4358 debug_generic_expr (lhs_type);
4359 debug_generic_expr (rhs1_type);
4360 debug_generic_expr (rhs2_type);
4361 debug_generic_expr (rhs3_type);
4362 return true;
4364 break;
4366 case VEC_COND_EXPR:
4367 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4368 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4369 != TYPE_VECTOR_SUBPARTS (lhs_type))
4371 error ("the first argument of a VEC_COND_EXPR must be of a "
4372 "boolean vector type of the same number of elements "
4373 "as the result");
4374 debug_generic_expr (lhs_type);
4375 debug_generic_expr (rhs1_type);
4376 return true;
4378 /* Fallthrough. */
4379 case COND_EXPR:
4380 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4381 || !useless_type_conversion_p (lhs_type, rhs3_type))
4383 error ("type mismatch in conditional expression");
4384 debug_generic_expr (lhs_type);
4385 debug_generic_expr (rhs2_type);
4386 debug_generic_expr (rhs3_type);
4387 return true;
4389 break;
4391 case VEC_PERM_EXPR:
4392 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4393 || !useless_type_conversion_p (lhs_type, rhs2_type))
4395 error ("type mismatch in vector permute expression");
4396 debug_generic_expr (lhs_type);
4397 debug_generic_expr (rhs1_type);
4398 debug_generic_expr (rhs2_type);
4399 debug_generic_expr (rhs3_type);
4400 return true;
4403 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4404 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4405 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4407 error ("vector types expected in vector permute expression");
4408 debug_generic_expr (lhs_type);
4409 debug_generic_expr (rhs1_type);
4410 debug_generic_expr (rhs2_type);
4411 debug_generic_expr (rhs3_type);
4412 return true;
4415 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4416 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4417 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4418 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4419 != TYPE_VECTOR_SUBPARTS (lhs_type))
4421 error ("vectors with different element number found "
4422 "in vector permute expression");
4423 debug_generic_expr (lhs_type);
4424 debug_generic_expr (rhs1_type);
4425 debug_generic_expr (rhs2_type);
4426 debug_generic_expr (rhs3_type);
4427 return true;
4430 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4431 || (TREE_CODE (rhs3) != VECTOR_CST
4432 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4433 (TREE_TYPE (rhs3_type)))
4434 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4435 (TREE_TYPE (rhs1_type))))))
4437 error ("invalid mask type in vector permute expression");
4438 debug_generic_expr (lhs_type);
4439 debug_generic_expr (rhs1_type);
4440 debug_generic_expr (rhs2_type);
4441 debug_generic_expr (rhs3_type);
4442 return true;
4445 return false;
4447 case SAD_EXPR:
4448 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4449 || !useless_type_conversion_p (lhs_type, rhs3_type)
4450 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4451 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4453 error ("type mismatch in sad expression");
4454 debug_generic_expr (lhs_type);
4455 debug_generic_expr (rhs1_type);
4456 debug_generic_expr (rhs2_type);
4457 debug_generic_expr (rhs3_type);
4458 return true;
4461 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4462 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4463 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4465 error ("vector types expected in sad expression");
4466 debug_generic_expr (lhs_type);
4467 debug_generic_expr (rhs1_type);
4468 debug_generic_expr (rhs2_type);
4469 debug_generic_expr (rhs3_type);
4470 return true;
4473 return false;
4475 case BIT_INSERT_EXPR:
4476 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4478 error ("type mismatch in BIT_INSERT_EXPR");
4479 debug_generic_expr (lhs_type);
4480 debug_generic_expr (rhs1_type);
4481 return true;
4483 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4484 && INTEGRAL_TYPE_P (rhs2_type))
4485 || (VECTOR_TYPE_P (rhs1_type)
4486 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4488 error ("not allowed type combination in BIT_INSERT_EXPR");
4489 debug_generic_expr (rhs1_type);
4490 debug_generic_expr (rhs2_type);
4491 return true;
4493 if (! tree_fits_uhwi_p (rhs3)
4494 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4495 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4497 error ("invalid position or size in BIT_INSERT_EXPR");
4498 return true;
4500 if (INTEGRAL_TYPE_P (rhs1_type))
4502 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4503 if (bitpos >= TYPE_PRECISION (rhs1_type)
4504 || (bitpos + TYPE_PRECISION (rhs2_type)
4505 > TYPE_PRECISION (rhs1_type)))
4507 error ("insertion out of range in BIT_INSERT_EXPR");
4508 return true;
4511 else if (VECTOR_TYPE_P (rhs1_type))
4513 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4514 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4515 if (bitpos % bitsize != 0)
4517 error ("vector insertion not at element boundary");
4518 return true;
4521 return false;
4523 case DOT_PROD_EXPR:
4525 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4526 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4527 && ((!INTEGRAL_TYPE_P (rhs1_type)
4528 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4529 || (!INTEGRAL_TYPE_P (lhs_type)
4530 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4531 || !types_compatible_p (rhs1_type, rhs2_type)
4532 || !useless_type_conversion_p (lhs_type, rhs3_type)
4533 || (GET_MODE_SIZE (element_mode (rhs3_type))
4534 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4536 error ("type mismatch in dot product reduction");
4537 debug_generic_expr (lhs_type);
4538 debug_generic_expr (rhs1_type);
4539 debug_generic_expr (rhs2_type);
4540 return true;
4542 return false;
4545 case REALIGN_LOAD_EXPR:
4546 /* FIXME. */
4547 return false;
4549 default:
4550 gcc_unreachable ();
4552 return false;
4555 /* Verify a gimple assignment statement STMT with a single rhs.
4556 Returns true if anything is wrong. */
4558 static bool
4559 verify_gimple_assign_single (gassign *stmt)
4561 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4562 tree lhs = gimple_assign_lhs (stmt);
4563 tree lhs_type = TREE_TYPE (lhs);
4564 tree rhs1 = gimple_assign_rhs1 (stmt);
4565 tree rhs1_type = TREE_TYPE (rhs1);
4566 bool res = false;
4568 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4570 error ("non-trivial conversion at assignment");
4571 debug_generic_expr (lhs_type);
4572 debug_generic_expr (rhs1_type);
4573 return true;
4576 if (gimple_clobber_p (stmt)
4577 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4579 error ("non-decl/MEM_REF LHS in clobber statement");
4580 debug_generic_expr (lhs);
4581 return true;
4584 if (handled_component_p (lhs)
4585 || TREE_CODE (lhs) == MEM_REF
4586 || TREE_CODE (lhs) == TARGET_MEM_REF)
4587 res |= verify_types_in_gimple_reference (lhs, true);
4589 /* Special codes we cannot handle via their class. */
4590 switch (rhs_code)
4592 case ADDR_EXPR:
4594 tree op = TREE_OPERAND (rhs1, 0);
4595 if (!is_gimple_addressable (op))
4597 error ("invalid operand in unary expression");
4598 return true;
4601 /* Technically there is no longer a need for matching types, but
4602 gimple hygiene asks for this check. In LTO we can end up
4603 combining incompatible units and thus end up with addresses
4604 of globals that change their type to a common one. */
4605 if (!in_lto_p
4606 && !types_compatible_p (TREE_TYPE (op),
4607 TREE_TYPE (TREE_TYPE (rhs1)))
4608 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4609 TREE_TYPE (op)))
4611 error ("type mismatch in address expression");
4612 debug_generic_stmt (TREE_TYPE (rhs1));
4613 debug_generic_stmt (TREE_TYPE (op));
4614 return true;
4617 return verify_types_in_gimple_reference (op, true);
4620 /* tcc_reference */
4621 case INDIRECT_REF:
4622 error ("INDIRECT_REF in gimple IL");
4623 return true;
4625 case COMPONENT_REF:
4626 case BIT_FIELD_REF:
4627 case ARRAY_REF:
4628 case ARRAY_RANGE_REF:
4629 case VIEW_CONVERT_EXPR:
4630 case REALPART_EXPR:
4631 case IMAGPART_EXPR:
4632 case TARGET_MEM_REF:
4633 case MEM_REF:
4634 if (!is_gimple_reg (lhs)
4635 && is_gimple_reg_type (TREE_TYPE (lhs)))
4637 error ("invalid rhs for gimple memory store");
4638 debug_generic_stmt (lhs);
4639 debug_generic_stmt (rhs1);
4640 return true;
4642 return res || verify_types_in_gimple_reference (rhs1, false);
4644 /* tcc_constant */
4645 case SSA_NAME:
4646 case INTEGER_CST:
4647 case REAL_CST:
4648 case FIXED_CST:
4649 case COMPLEX_CST:
4650 case VECTOR_CST:
4651 case STRING_CST:
4652 return res;
4654 /* tcc_declaration */
4655 case CONST_DECL:
4656 return res;
4657 case VAR_DECL:
4658 case PARM_DECL:
4659 if (!is_gimple_reg (lhs)
4660 && !is_gimple_reg (rhs1)
4661 && is_gimple_reg_type (TREE_TYPE (lhs)))
4663 error ("invalid rhs for gimple memory store");
4664 debug_generic_stmt (lhs);
4665 debug_generic_stmt (rhs1);
4666 return true;
4668 return res;
4670 case CONSTRUCTOR:
4671 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4673 unsigned int i;
4674 tree elt_i, elt_v, elt_t = NULL_TREE;
4676 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4677 return res;
4678 /* For vector CONSTRUCTORs we require that either it is empty
4679 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4680 (then the element count must be correct to cover the whole
4681 outer vector and index must be NULL on all elements, or it is
4682 a CONSTRUCTOR of scalar elements, where we as an exception allow
4683 smaller number of elements (assuming zero filling) and
4684 consecutive indexes as compared to NULL indexes (such
4685 CONSTRUCTORs can appear in the IL from FEs). */
4686 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4688 if (elt_t == NULL_TREE)
4690 elt_t = TREE_TYPE (elt_v);
4691 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4693 tree elt_t = TREE_TYPE (elt_v);
4694 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4695 TREE_TYPE (elt_t)))
4697 error ("incorrect type of vector CONSTRUCTOR"
4698 " elements");
4699 debug_generic_stmt (rhs1);
4700 return true;
4702 else if (CONSTRUCTOR_NELTS (rhs1)
4703 * TYPE_VECTOR_SUBPARTS (elt_t)
4704 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4706 error ("incorrect number of vector CONSTRUCTOR"
4707 " elements");
4708 debug_generic_stmt (rhs1);
4709 return true;
4712 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4713 elt_t))
4715 error ("incorrect type of vector CONSTRUCTOR elements");
4716 debug_generic_stmt (rhs1);
4717 return true;
4719 else if (CONSTRUCTOR_NELTS (rhs1)
4720 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4722 error ("incorrect number of vector CONSTRUCTOR elements");
4723 debug_generic_stmt (rhs1);
4724 return true;
4727 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4729 error ("incorrect type of vector CONSTRUCTOR elements");
4730 debug_generic_stmt (rhs1);
4731 return true;
4733 if (elt_i != NULL_TREE
4734 && (TREE_CODE (elt_t) == VECTOR_TYPE
4735 || TREE_CODE (elt_i) != INTEGER_CST
4736 || compare_tree_int (elt_i, i) != 0))
4738 error ("vector CONSTRUCTOR with non-NULL element index");
4739 debug_generic_stmt (rhs1);
4740 return true;
4742 if (!is_gimple_val (elt_v))
4744 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4745 debug_generic_stmt (rhs1);
4746 return true;
4750 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4752 error ("non-vector CONSTRUCTOR with elements");
4753 debug_generic_stmt (rhs1);
4754 return true;
4756 return res;
4757 case OBJ_TYPE_REF:
4758 case ASSERT_EXPR:
4759 case WITH_SIZE_EXPR:
4760 /* FIXME. */
4761 return res;
4763 default:;
4766 return res;
4769 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4770 is a problem, otherwise false. */
4772 static bool
4773 verify_gimple_assign (gassign *stmt)
4775 switch (gimple_assign_rhs_class (stmt))
4777 case GIMPLE_SINGLE_RHS:
4778 return verify_gimple_assign_single (stmt);
4780 case GIMPLE_UNARY_RHS:
4781 return verify_gimple_assign_unary (stmt);
4783 case GIMPLE_BINARY_RHS:
4784 return verify_gimple_assign_binary (stmt);
4786 case GIMPLE_TERNARY_RHS:
4787 return verify_gimple_assign_ternary (stmt);
4789 default:
4790 gcc_unreachable ();
4794 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4795 is a problem, otherwise false. */
4797 static bool
4798 verify_gimple_return (greturn *stmt)
4800 tree op = gimple_return_retval (stmt);
4801 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4803 /* We cannot test for present return values as we do not fix up missing
4804 return values from the original source. */
4805 if (op == NULL)
4806 return false;
4808 if (!is_gimple_val (op)
4809 && TREE_CODE (op) != RESULT_DECL)
4811 error ("invalid operand in return statement");
4812 debug_generic_stmt (op);
4813 return true;
4816 if ((TREE_CODE (op) == RESULT_DECL
4817 && DECL_BY_REFERENCE (op))
4818 || (TREE_CODE (op) == SSA_NAME
4819 && SSA_NAME_VAR (op)
4820 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4821 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4822 op = TREE_TYPE (op);
4824 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4826 error ("invalid conversion in return statement");
4827 debug_generic_stmt (restype);
4828 debug_generic_stmt (TREE_TYPE (op));
4829 return true;
4832 return false;
4836 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4837 is a problem, otherwise false. */
4839 static bool
4840 verify_gimple_goto (ggoto *stmt)
4842 tree dest = gimple_goto_dest (stmt);
4844 /* ??? We have two canonical forms of direct goto destinations, a
4845 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4846 if (TREE_CODE (dest) != LABEL_DECL
4847 && (!is_gimple_val (dest)
4848 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4850 error ("goto destination is neither a label nor a pointer");
4851 return true;
4854 return false;
4857 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4858 is a problem, otherwise false. */
4860 static bool
4861 verify_gimple_switch (gswitch *stmt)
4863 unsigned int i, n;
4864 tree elt, prev_upper_bound = NULL_TREE;
4865 tree index_type, elt_type = NULL_TREE;
4867 if (!is_gimple_val (gimple_switch_index (stmt)))
4869 error ("invalid operand to switch statement");
4870 debug_generic_stmt (gimple_switch_index (stmt));
4871 return true;
4874 index_type = TREE_TYPE (gimple_switch_index (stmt));
4875 if (! INTEGRAL_TYPE_P (index_type))
4877 error ("non-integral type switch statement");
4878 debug_generic_expr (index_type);
4879 return true;
4882 elt = gimple_switch_label (stmt, 0);
4883 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4885 error ("invalid default case label in switch statement");
4886 debug_generic_expr (elt);
4887 return true;
4890 n = gimple_switch_num_labels (stmt);
4891 for (i = 1; i < n; i++)
4893 elt = gimple_switch_label (stmt, i);
4895 if (! CASE_LOW (elt))
4897 error ("invalid case label in switch statement");
4898 debug_generic_expr (elt);
4899 return true;
4901 if (CASE_HIGH (elt)
4902 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4904 error ("invalid case range in switch statement");
4905 debug_generic_expr (elt);
4906 return true;
4909 if (elt_type)
4911 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4912 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4914 error ("type mismatch for case label in switch statement");
4915 debug_generic_expr (elt);
4916 return true;
4919 else
4921 elt_type = TREE_TYPE (CASE_LOW (elt));
4922 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4924 error ("type precision mismatch in switch statement");
4925 return true;
4929 if (prev_upper_bound)
4931 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4933 error ("case labels not sorted in switch statement");
4934 return true;
4938 prev_upper_bound = CASE_HIGH (elt);
4939 if (! prev_upper_bound)
4940 prev_upper_bound = CASE_LOW (elt);
4943 return false;
4946 /* Verify a gimple debug statement STMT.
4947 Returns true if anything is wrong. */
4949 static bool
4950 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4952 /* There isn't much that could be wrong in a gimple debug stmt. A
4953 gimple debug bind stmt, for example, maps a tree, that's usually
4954 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4955 component or member of an aggregate type, to another tree, that
4956 can be an arbitrary expression. These stmts expand into debug
4957 insns, and are converted to debug notes by var-tracking.c. */
4958 return false;
4961 /* Verify a gimple label statement STMT.
4962 Returns true if anything is wrong. */
4964 static bool
4965 verify_gimple_label (glabel *stmt)
4967 tree decl = gimple_label_label (stmt);
4968 int uid;
4969 bool err = false;
4971 if (TREE_CODE (decl) != LABEL_DECL)
4972 return true;
4973 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4974 && DECL_CONTEXT (decl) != current_function_decl)
4976 error ("label's context is not the current function decl");
4977 err |= true;
4980 uid = LABEL_DECL_UID (decl);
4981 if (cfun->cfg
4982 && (uid == -1
4983 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4985 error ("incorrect entry in label_to_block_map");
4986 err |= true;
4989 uid = EH_LANDING_PAD_NR (decl);
4990 if (uid)
4992 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4993 if (decl != lp->post_landing_pad)
4995 error ("incorrect setting of landing pad number");
4996 err |= true;
5000 return err;
5003 /* Verify a gimple cond statement STMT.
5004 Returns true if anything is wrong. */
5006 static bool
5007 verify_gimple_cond (gcond *stmt)
5009 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5011 error ("invalid comparison code in gimple cond");
5012 return true;
5014 if (!(!gimple_cond_true_label (stmt)
5015 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5016 || !(!gimple_cond_false_label (stmt)
5017 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5019 error ("invalid labels in gimple cond");
5020 return true;
5023 return verify_gimple_comparison (boolean_type_node,
5024 gimple_cond_lhs (stmt),
5025 gimple_cond_rhs (stmt),
5026 gimple_cond_code (stmt));
5029 /* Verify the GIMPLE statement STMT. Returns true if there is an
5030 error, otherwise false. */
5032 static bool
5033 verify_gimple_stmt (gimple *stmt)
5035 switch (gimple_code (stmt))
5037 case GIMPLE_ASSIGN:
5038 return verify_gimple_assign (as_a <gassign *> (stmt));
5040 case GIMPLE_LABEL:
5041 return verify_gimple_label (as_a <glabel *> (stmt));
5043 case GIMPLE_CALL:
5044 return verify_gimple_call (as_a <gcall *> (stmt));
5046 case GIMPLE_COND:
5047 return verify_gimple_cond (as_a <gcond *> (stmt));
5049 case GIMPLE_GOTO:
5050 return verify_gimple_goto (as_a <ggoto *> (stmt));
5052 case GIMPLE_SWITCH:
5053 return verify_gimple_switch (as_a <gswitch *> (stmt));
5055 case GIMPLE_RETURN:
5056 return verify_gimple_return (as_a <greturn *> (stmt));
5058 case GIMPLE_ASM:
5059 return false;
5061 case GIMPLE_TRANSACTION:
5062 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5064 /* Tuples that do not have tree operands. */
5065 case GIMPLE_NOP:
5066 case GIMPLE_PREDICT:
5067 case GIMPLE_RESX:
5068 case GIMPLE_EH_DISPATCH:
5069 case GIMPLE_EH_MUST_NOT_THROW:
5070 return false;
5072 CASE_GIMPLE_OMP:
5073 /* OpenMP directives are validated by the FE and never operated
5074 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5075 non-gimple expressions when the main index variable has had
5076 its address taken. This does not affect the loop itself
5077 because the header of an GIMPLE_OMP_FOR is merely used to determine
5078 how to setup the parallel iteration. */
5079 return false;
5081 case GIMPLE_DEBUG:
5082 return verify_gimple_debug (stmt);
5084 default:
5085 gcc_unreachable ();
5089 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5090 and false otherwise. */
5092 static bool
5093 verify_gimple_phi (gimple *phi)
5095 bool err = false;
5096 unsigned i;
5097 tree phi_result = gimple_phi_result (phi);
5098 bool virtual_p;
5100 if (!phi_result)
5102 error ("invalid PHI result");
5103 return true;
5106 virtual_p = virtual_operand_p (phi_result);
5107 if (TREE_CODE (phi_result) != SSA_NAME
5108 || (virtual_p
5109 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5111 error ("invalid PHI result");
5112 err = true;
5115 for (i = 0; i < gimple_phi_num_args (phi); i++)
5117 tree t = gimple_phi_arg_def (phi, i);
5119 if (!t)
5121 error ("missing PHI def");
5122 err |= true;
5123 continue;
5125 /* Addressable variables do have SSA_NAMEs but they
5126 are not considered gimple values. */
5127 else if ((TREE_CODE (t) == SSA_NAME
5128 && virtual_p != virtual_operand_p (t))
5129 || (virtual_p
5130 && (TREE_CODE (t) != SSA_NAME
5131 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5132 || (!virtual_p
5133 && !is_gimple_val (t)))
5135 error ("invalid PHI argument");
5136 debug_generic_expr (t);
5137 err |= true;
5139 #ifdef ENABLE_TYPES_CHECKING
5140 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5142 error ("incompatible types in PHI argument %u", i);
5143 debug_generic_stmt (TREE_TYPE (phi_result));
5144 debug_generic_stmt (TREE_TYPE (t));
5145 err |= true;
5147 #endif
5150 return err;
5153 /* Verify the GIMPLE statements inside the sequence STMTS. */
5155 static bool
5156 verify_gimple_in_seq_2 (gimple_seq stmts)
5158 gimple_stmt_iterator ittr;
5159 bool err = false;
5161 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5163 gimple *stmt = gsi_stmt (ittr);
5165 switch (gimple_code (stmt))
5167 case GIMPLE_BIND:
5168 err |= verify_gimple_in_seq_2 (
5169 gimple_bind_body (as_a <gbind *> (stmt)));
5170 break;
5172 case GIMPLE_TRY:
5173 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5174 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5175 break;
5177 case GIMPLE_EH_FILTER:
5178 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5179 break;
5181 case GIMPLE_EH_ELSE:
5183 geh_else *eh_else = as_a <geh_else *> (stmt);
5184 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5185 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5187 break;
5189 case GIMPLE_CATCH:
5190 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5191 as_a <gcatch *> (stmt)));
5192 break;
5194 case GIMPLE_TRANSACTION:
5195 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5196 break;
5198 default:
5200 bool err2 = verify_gimple_stmt (stmt);
5201 if (err2)
5202 debug_gimple_stmt (stmt);
5203 err |= err2;
5208 return err;
5211 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5212 is a problem, otherwise false. */
5214 static bool
5215 verify_gimple_transaction (gtransaction *stmt)
5217 tree lab;
5219 lab = gimple_transaction_label_norm (stmt);
5220 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5221 return true;
5222 lab = gimple_transaction_label_uninst (stmt);
5223 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5224 return true;
5225 lab = gimple_transaction_label_over (stmt);
5226 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5227 return true;
5229 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5233 /* Verify the GIMPLE statements inside the statement list STMTS. */
5235 DEBUG_FUNCTION void
5236 verify_gimple_in_seq (gimple_seq stmts)
5238 timevar_push (TV_TREE_STMT_VERIFY);
5239 if (verify_gimple_in_seq_2 (stmts))
5240 internal_error ("verify_gimple failed");
5241 timevar_pop (TV_TREE_STMT_VERIFY);
5244 /* Return true when the T can be shared. */
5246 static bool
5247 tree_node_can_be_shared (tree t)
5249 if (IS_TYPE_OR_DECL_P (t)
5250 || is_gimple_min_invariant (t)
5251 || TREE_CODE (t) == SSA_NAME
5252 || t == error_mark_node
5253 || TREE_CODE (t) == IDENTIFIER_NODE)
5254 return true;
5256 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5257 return true;
5259 if (DECL_P (t))
5260 return true;
5262 return false;
5265 /* Called via walk_tree. Verify tree sharing. */
5267 static tree
5268 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5270 hash_set<void *> *visited = (hash_set<void *> *) data;
5272 if (tree_node_can_be_shared (*tp))
5274 *walk_subtrees = false;
5275 return NULL;
5278 if (visited->add (*tp))
5279 return *tp;
5281 return NULL;
5284 /* Called via walk_gimple_stmt. Verify tree sharing. */
5286 static tree
5287 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5289 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5290 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5293 static bool eh_error_found;
5294 bool
5295 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5296 hash_set<gimple *> *visited)
5298 if (!visited->contains (stmt))
5300 error ("dead STMT in EH table");
5301 debug_gimple_stmt (stmt);
5302 eh_error_found = true;
5304 return true;
5307 /* Verify if the location LOCs block is in BLOCKS. */
5309 static bool
5310 verify_location (hash_set<tree> *blocks, location_t loc)
5312 tree block = LOCATION_BLOCK (loc);
5313 if (block != NULL_TREE
5314 && !blocks->contains (block))
5316 error ("location references block not in block tree");
5317 return true;
5319 if (block != NULL_TREE)
5320 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5321 return false;
5324 /* Called via walk_tree. Verify that expressions have no blocks. */
5326 static tree
5327 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5329 if (!EXPR_P (*tp))
5331 *walk_subtrees = false;
5332 return NULL;
5335 location_t loc = EXPR_LOCATION (*tp);
5336 if (LOCATION_BLOCK (loc) != NULL)
5337 return *tp;
5339 return NULL;
5342 /* Called via walk_tree. Verify locations of expressions. */
5344 static tree
5345 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5347 hash_set<tree> *blocks = (hash_set<tree> *) data;
5349 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5351 tree t = DECL_DEBUG_EXPR (*tp);
5352 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5353 if (addr)
5354 return addr;
5356 if ((VAR_P (*tp)
5357 || TREE_CODE (*tp) == PARM_DECL
5358 || TREE_CODE (*tp) == RESULT_DECL)
5359 && DECL_HAS_VALUE_EXPR_P (*tp))
5361 tree t = DECL_VALUE_EXPR (*tp);
5362 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5363 if (addr)
5364 return addr;
5367 if (!EXPR_P (*tp))
5369 *walk_subtrees = false;
5370 return NULL;
5373 location_t loc = EXPR_LOCATION (*tp);
5374 if (verify_location (blocks, loc))
5375 return *tp;
5377 return NULL;
5380 /* Called via walk_gimple_op. Verify locations of expressions. */
5382 static tree
5383 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5385 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5386 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5389 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5391 static void
5392 collect_subblocks (hash_set<tree> *blocks, tree block)
5394 tree t;
5395 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5397 blocks->add (t);
5398 collect_subblocks (blocks, t);
5402 /* Verify the GIMPLE statements in the CFG of FN. */
5404 DEBUG_FUNCTION void
5405 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5407 basic_block bb;
5408 bool err = false;
5410 timevar_push (TV_TREE_STMT_VERIFY);
5411 hash_set<void *> visited;
5412 hash_set<gimple *> visited_stmts;
5414 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5415 hash_set<tree> blocks;
5416 if (DECL_INITIAL (fn->decl))
5418 blocks.add (DECL_INITIAL (fn->decl));
5419 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5422 FOR_EACH_BB_FN (bb, fn)
5424 gimple_stmt_iterator gsi;
5426 for (gphi_iterator gpi = gsi_start_phis (bb);
5427 !gsi_end_p (gpi);
5428 gsi_next (&gpi))
5430 gphi *phi = gpi.phi ();
5431 bool err2 = false;
5432 unsigned i;
5434 visited_stmts.add (phi);
5436 if (gimple_bb (phi) != bb)
5438 error ("gimple_bb (phi) is set to a wrong basic block");
5439 err2 = true;
5442 err2 |= verify_gimple_phi (phi);
5444 /* Only PHI arguments have locations. */
5445 if (gimple_location (phi) != UNKNOWN_LOCATION)
5447 error ("PHI node with location");
5448 err2 = true;
5451 for (i = 0; i < gimple_phi_num_args (phi); i++)
5453 tree arg = gimple_phi_arg_def (phi, i);
5454 tree addr = walk_tree (&arg, verify_node_sharing_1,
5455 &visited, NULL);
5456 if (addr)
5458 error ("incorrect sharing of tree nodes");
5459 debug_generic_expr (addr);
5460 err2 |= true;
5462 location_t loc = gimple_phi_arg_location (phi, i);
5463 if (virtual_operand_p (gimple_phi_result (phi))
5464 && loc != UNKNOWN_LOCATION)
5466 error ("virtual PHI with argument locations");
5467 err2 = true;
5469 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5470 if (addr)
5472 debug_generic_expr (addr);
5473 err2 = true;
5475 err2 |= verify_location (&blocks, loc);
5478 if (err2)
5479 debug_gimple_stmt (phi);
5480 err |= err2;
5483 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5485 gimple *stmt = gsi_stmt (gsi);
5486 bool err2 = false;
5487 struct walk_stmt_info wi;
5488 tree addr;
5489 int lp_nr;
5491 visited_stmts.add (stmt);
5493 if (gimple_bb (stmt) != bb)
5495 error ("gimple_bb (stmt) is set to a wrong basic block");
5496 err2 = true;
5499 err2 |= verify_gimple_stmt (stmt);
5500 err2 |= verify_location (&blocks, gimple_location (stmt));
5502 memset (&wi, 0, sizeof (wi));
5503 wi.info = (void *) &visited;
5504 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5505 if (addr)
5507 error ("incorrect sharing of tree nodes");
5508 debug_generic_expr (addr);
5509 err2 |= true;
5512 memset (&wi, 0, sizeof (wi));
5513 wi.info = (void *) &blocks;
5514 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5515 if (addr)
5517 debug_generic_expr (addr);
5518 err2 |= true;
5521 /* ??? Instead of not checking these stmts at all the walker
5522 should know its context via wi. */
5523 if (!is_gimple_debug (stmt)
5524 && !is_gimple_omp (stmt))
5526 memset (&wi, 0, sizeof (wi));
5527 addr = walk_gimple_op (stmt, verify_expr, &wi);
5528 if (addr)
5530 debug_generic_expr (addr);
5531 inform (gimple_location (stmt), "in statement");
5532 err2 |= true;
5536 /* If the statement is marked as part of an EH region, then it is
5537 expected that the statement could throw. Verify that when we
5538 have optimizations that simplify statements such that we prove
5539 that they cannot throw, that we update other data structures
5540 to match. */
5541 lp_nr = lookup_stmt_eh_lp (stmt);
5542 if (lp_nr > 0)
5544 if (!stmt_could_throw_p (stmt))
5546 if (verify_nothrow)
5548 error ("statement marked for throw, but doesn%'t");
5549 err2 |= true;
5552 else if (!gsi_one_before_end_p (gsi))
5554 error ("statement marked for throw in middle of block");
5555 err2 |= true;
5559 if (err2)
5560 debug_gimple_stmt (stmt);
5561 err |= err2;
5565 eh_error_found = false;
5566 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5567 if (eh_table)
5568 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5569 (&visited_stmts);
5571 if (err || eh_error_found)
5572 internal_error ("verify_gimple failed");
5574 verify_histograms ();
5575 timevar_pop (TV_TREE_STMT_VERIFY);
5579 /* Verifies that the flow information is OK. */
5581 static int
5582 gimple_verify_flow_info (void)
5584 int err = 0;
5585 basic_block bb;
5586 gimple_stmt_iterator gsi;
5587 gimple *stmt;
5588 edge e;
5589 edge_iterator ei;
5591 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5592 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5594 error ("ENTRY_BLOCK has IL associated with it");
5595 err = 1;
5598 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5599 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5601 error ("EXIT_BLOCK has IL associated with it");
5602 err = 1;
5605 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5606 if (e->flags & EDGE_FALLTHRU)
5608 error ("fallthru to exit from bb %d", e->src->index);
5609 err = 1;
5612 FOR_EACH_BB_FN (bb, cfun)
5614 bool found_ctrl_stmt = false;
5616 stmt = NULL;
5618 /* Skip labels on the start of basic block. */
5619 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5621 tree label;
5622 gimple *prev_stmt = stmt;
5624 stmt = gsi_stmt (gsi);
5626 if (gimple_code (stmt) != GIMPLE_LABEL)
5627 break;
5629 label = gimple_label_label (as_a <glabel *> (stmt));
5630 if (prev_stmt && DECL_NONLOCAL (label))
5632 error ("nonlocal label ");
5633 print_generic_expr (stderr, label);
5634 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5635 bb->index);
5636 err = 1;
5639 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5641 error ("EH landing pad label ");
5642 print_generic_expr (stderr, label);
5643 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5644 bb->index);
5645 err = 1;
5648 if (label_to_block (label) != bb)
5650 error ("label ");
5651 print_generic_expr (stderr, label);
5652 fprintf (stderr, " to block does not match in bb %d",
5653 bb->index);
5654 err = 1;
5657 if (decl_function_context (label) != current_function_decl)
5659 error ("label ");
5660 print_generic_expr (stderr, label);
5661 fprintf (stderr, " has incorrect context in bb %d",
5662 bb->index);
5663 err = 1;
5667 /* Verify that body of basic block BB is free of control flow. */
5668 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5670 gimple *stmt = gsi_stmt (gsi);
5672 if (found_ctrl_stmt)
5674 error ("control flow in the middle of basic block %d",
5675 bb->index);
5676 err = 1;
5679 if (stmt_ends_bb_p (stmt))
5680 found_ctrl_stmt = true;
5682 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5684 error ("label ");
5685 print_generic_expr (stderr, gimple_label_label (label_stmt));
5686 fprintf (stderr, " in the middle of basic block %d", bb->index);
5687 err = 1;
5691 gsi = gsi_last_nondebug_bb (bb);
5692 if (gsi_end_p (gsi))
5693 continue;
5695 stmt = gsi_stmt (gsi);
5697 if (gimple_code (stmt) == GIMPLE_LABEL)
5698 continue;
5700 err |= verify_eh_edges (stmt);
5702 if (is_ctrl_stmt (stmt))
5704 FOR_EACH_EDGE (e, ei, bb->succs)
5705 if (e->flags & EDGE_FALLTHRU)
5707 error ("fallthru edge after a control statement in bb %d",
5708 bb->index);
5709 err = 1;
5713 if (gimple_code (stmt) != GIMPLE_COND)
5715 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5716 after anything else but if statement. */
5717 FOR_EACH_EDGE (e, ei, bb->succs)
5718 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5720 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5721 bb->index);
5722 err = 1;
5726 switch (gimple_code (stmt))
5728 case GIMPLE_COND:
5730 edge true_edge;
5731 edge false_edge;
5733 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5735 if (!true_edge
5736 || !false_edge
5737 || !(true_edge->flags & EDGE_TRUE_VALUE)
5738 || !(false_edge->flags & EDGE_FALSE_VALUE)
5739 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5740 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5741 || EDGE_COUNT (bb->succs) >= 3)
5743 error ("wrong outgoing edge flags at end of bb %d",
5744 bb->index);
5745 err = 1;
5748 break;
5750 case GIMPLE_GOTO:
5751 if (simple_goto_p (stmt))
5753 error ("explicit goto at end of bb %d", bb->index);
5754 err = 1;
5756 else
5758 /* FIXME. We should double check that the labels in the
5759 destination blocks have their address taken. */
5760 FOR_EACH_EDGE (e, ei, bb->succs)
5761 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5762 | EDGE_FALSE_VALUE))
5763 || !(e->flags & EDGE_ABNORMAL))
5765 error ("wrong outgoing edge flags at end of bb %d",
5766 bb->index);
5767 err = 1;
5770 break;
5772 case GIMPLE_CALL:
5773 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5774 break;
5775 /* fallthru */
5776 case GIMPLE_RETURN:
5777 if (!single_succ_p (bb)
5778 || (single_succ_edge (bb)->flags
5779 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5780 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5782 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5783 err = 1;
5785 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5787 error ("return edge does not point to exit in bb %d",
5788 bb->index);
5789 err = 1;
5791 break;
5793 case GIMPLE_SWITCH:
5795 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5796 tree prev;
5797 edge e;
5798 size_t i, n;
5800 n = gimple_switch_num_labels (switch_stmt);
5802 /* Mark all the destination basic blocks. */
5803 for (i = 0; i < n; ++i)
5805 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5806 basic_block label_bb = label_to_block (lab);
5807 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5808 label_bb->aux = (void *)1;
5811 /* Verify that the case labels are sorted. */
5812 prev = gimple_switch_label (switch_stmt, 0);
5813 for (i = 1; i < n; ++i)
5815 tree c = gimple_switch_label (switch_stmt, i);
5816 if (!CASE_LOW (c))
5818 error ("found default case not at the start of "
5819 "case vector");
5820 err = 1;
5821 continue;
5823 if (CASE_LOW (prev)
5824 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5826 error ("case labels not sorted: ");
5827 print_generic_expr (stderr, prev);
5828 fprintf (stderr," is greater than ");
5829 print_generic_expr (stderr, c);
5830 fprintf (stderr," but comes before it.\n");
5831 err = 1;
5833 prev = c;
5835 /* VRP will remove the default case if it can prove it will
5836 never be executed. So do not verify there always exists
5837 a default case here. */
5839 FOR_EACH_EDGE (e, ei, bb->succs)
5841 if (!e->dest->aux)
5843 error ("extra outgoing edge %d->%d",
5844 bb->index, e->dest->index);
5845 err = 1;
5848 e->dest->aux = (void *)2;
5849 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5850 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5852 error ("wrong outgoing edge flags at end of bb %d",
5853 bb->index);
5854 err = 1;
5858 /* Check that we have all of them. */
5859 for (i = 0; i < n; ++i)
5861 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5862 basic_block label_bb = label_to_block (lab);
5864 if (label_bb->aux != (void *)2)
5866 error ("missing edge %i->%i", bb->index, label_bb->index);
5867 err = 1;
5871 FOR_EACH_EDGE (e, ei, bb->succs)
5872 e->dest->aux = (void *)0;
5874 break;
5876 case GIMPLE_EH_DISPATCH:
5877 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5878 break;
5880 default:
5881 break;
5885 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5886 verify_dominators (CDI_DOMINATORS);
5888 return err;
5892 /* Updates phi nodes after creating a forwarder block joined
5893 by edge FALLTHRU. */
5895 static void
5896 gimple_make_forwarder_block (edge fallthru)
5898 edge e;
5899 edge_iterator ei;
5900 basic_block dummy, bb;
5901 tree var;
5902 gphi_iterator gsi;
5904 dummy = fallthru->src;
5905 bb = fallthru->dest;
5907 if (single_pred_p (bb))
5908 return;
5910 /* If we redirected a branch we must create new PHI nodes at the
5911 start of BB. */
5912 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5914 gphi *phi, *new_phi;
5916 phi = gsi.phi ();
5917 var = gimple_phi_result (phi);
5918 new_phi = create_phi_node (var, bb);
5919 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5920 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5921 UNKNOWN_LOCATION);
5924 /* Add the arguments we have stored on edges. */
5925 FOR_EACH_EDGE (e, ei, bb->preds)
5927 if (e == fallthru)
5928 continue;
5930 flush_pending_stmts (e);
5935 /* Return a non-special label in the head of basic block BLOCK.
5936 Create one if it doesn't exist. */
5938 tree
5939 gimple_block_label (basic_block bb)
5941 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5942 bool first = true;
5943 tree label;
5944 glabel *stmt;
5946 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5948 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5949 if (!stmt)
5950 break;
5951 label = gimple_label_label (stmt);
5952 if (!DECL_NONLOCAL (label))
5954 if (!first)
5955 gsi_move_before (&i, &s);
5956 return label;
5960 label = create_artificial_label (UNKNOWN_LOCATION);
5961 stmt = gimple_build_label (label);
5962 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5963 return label;
5967 /* Attempt to perform edge redirection by replacing a possibly complex
5968 jump instruction by a goto or by removing the jump completely.
5969 This can apply only if all edges now point to the same block. The
5970 parameters and return values are equivalent to
5971 redirect_edge_and_branch. */
5973 static edge
5974 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5976 basic_block src = e->src;
5977 gimple_stmt_iterator i;
5978 gimple *stmt;
5980 /* We can replace or remove a complex jump only when we have exactly
5981 two edges. */
5982 if (EDGE_COUNT (src->succs) != 2
5983 /* Verify that all targets will be TARGET. Specifically, the
5984 edge that is not E must also go to TARGET. */
5985 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5986 return NULL;
5988 i = gsi_last_bb (src);
5989 if (gsi_end_p (i))
5990 return NULL;
5992 stmt = gsi_stmt (i);
5994 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5996 gsi_remove (&i, true);
5997 e = ssa_redirect_edge (e, target);
5998 e->flags = EDGE_FALLTHRU;
5999 return e;
6002 return NULL;
6006 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6007 edge representing the redirected branch. */
6009 static edge
6010 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6012 basic_block bb = e->src;
6013 gimple_stmt_iterator gsi;
6014 edge ret;
6015 gimple *stmt;
6017 if (e->flags & EDGE_ABNORMAL)
6018 return NULL;
6020 if (e->dest == dest)
6021 return NULL;
6023 if (e->flags & EDGE_EH)
6024 return redirect_eh_edge (e, dest);
6026 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6028 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6029 if (ret)
6030 return ret;
6033 gsi = gsi_last_nondebug_bb (bb);
6034 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6036 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6038 case GIMPLE_COND:
6039 /* For COND_EXPR, we only need to redirect the edge. */
6040 break;
6042 case GIMPLE_GOTO:
6043 /* No non-abnormal edges should lead from a non-simple goto, and
6044 simple ones should be represented implicitly. */
6045 gcc_unreachable ();
6047 case GIMPLE_SWITCH:
6049 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6050 tree label = gimple_block_label (dest);
6051 tree cases = get_cases_for_edge (e, switch_stmt);
6053 /* If we have a list of cases associated with E, then use it
6054 as it's a lot faster than walking the entire case vector. */
6055 if (cases)
6057 edge e2 = find_edge (e->src, dest);
6058 tree last, first;
6060 first = cases;
6061 while (cases)
6063 last = cases;
6064 CASE_LABEL (cases) = label;
6065 cases = CASE_CHAIN (cases);
6068 /* If there was already an edge in the CFG, then we need
6069 to move all the cases associated with E to E2. */
6070 if (e2)
6072 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6074 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6075 CASE_CHAIN (cases2) = first;
6077 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6079 else
6081 size_t i, n = gimple_switch_num_labels (switch_stmt);
6083 for (i = 0; i < n; i++)
6085 tree elt = gimple_switch_label (switch_stmt, i);
6086 if (label_to_block (CASE_LABEL (elt)) == e->dest)
6087 CASE_LABEL (elt) = label;
6091 break;
6093 case GIMPLE_ASM:
6095 gasm *asm_stmt = as_a <gasm *> (stmt);
6096 int i, n = gimple_asm_nlabels (asm_stmt);
6097 tree label = NULL;
6099 for (i = 0; i < n; ++i)
6101 tree cons = gimple_asm_label_op (asm_stmt, i);
6102 if (label_to_block (TREE_VALUE (cons)) == e->dest)
6104 if (!label)
6105 label = gimple_block_label (dest);
6106 TREE_VALUE (cons) = label;
6110 /* If we didn't find any label matching the former edge in the
6111 asm labels, we must be redirecting the fallthrough
6112 edge. */
6113 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6115 break;
6117 case GIMPLE_RETURN:
6118 gsi_remove (&gsi, true);
6119 e->flags |= EDGE_FALLTHRU;
6120 break;
6122 case GIMPLE_OMP_RETURN:
6123 case GIMPLE_OMP_CONTINUE:
6124 case GIMPLE_OMP_SECTIONS_SWITCH:
6125 case GIMPLE_OMP_FOR:
6126 /* The edges from OMP constructs can be simply redirected. */
6127 break;
6129 case GIMPLE_EH_DISPATCH:
6130 if (!(e->flags & EDGE_FALLTHRU))
6131 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6132 break;
6134 case GIMPLE_TRANSACTION:
6135 if (e->flags & EDGE_TM_ABORT)
6136 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6137 gimple_block_label (dest));
6138 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6139 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6140 gimple_block_label (dest));
6141 else
6142 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6143 gimple_block_label (dest));
6144 break;
6146 default:
6147 /* Otherwise it must be a fallthru edge, and we don't need to
6148 do anything besides redirecting it. */
6149 gcc_assert (e->flags & EDGE_FALLTHRU);
6150 break;
6153 /* Update/insert PHI nodes as necessary. */
6155 /* Now update the edges in the CFG. */
6156 e = ssa_redirect_edge (e, dest);
6158 return e;
6161 /* Returns true if it is possible to remove edge E by redirecting
6162 it to the destination of the other edge from E->src. */
6164 static bool
6165 gimple_can_remove_branch_p (const_edge e)
6167 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6168 return false;
6170 return true;
6173 /* Simple wrapper, as we can always redirect fallthru edges. */
6175 static basic_block
6176 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6178 e = gimple_redirect_edge_and_branch (e, dest);
6179 gcc_assert (e);
6181 return NULL;
6185 /* Splits basic block BB after statement STMT (but at least after the
6186 labels). If STMT is NULL, BB is split just after the labels. */
6188 static basic_block
6189 gimple_split_block (basic_block bb, void *stmt)
6191 gimple_stmt_iterator gsi;
6192 gimple_stmt_iterator gsi_tgt;
6193 gimple_seq list;
6194 basic_block new_bb;
6195 edge e;
6196 edge_iterator ei;
6198 new_bb = create_empty_bb (bb);
6200 /* Redirect the outgoing edges. */
6201 new_bb->succs = bb->succs;
6202 bb->succs = NULL;
6203 FOR_EACH_EDGE (e, ei, new_bb->succs)
6204 e->src = new_bb;
6206 /* Get a stmt iterator pointing to the first stmt to move. */
6207 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6208 gsi = gsi_after_labels (bb);
6209 else
6211 gsi = gsi_for_stmt ((gimple *) stmt);
6212 gsi_next (&gsi);
6215 /* Move everything from GSI to the new basic block. */
6216 if (gsi_end_p (gsi))
6217 return new_bb;
6219 /* Split the statement list - avoid re-creating new containers as this
6220 brings ugly quadratic memory consumption in the inliner.
6221 (We are still quadratic since we need to update stmt BB pointers,
6222 sadly.) */
6223 gsi_split_seq_before (&gsi, &list);
6224 set_bb_seq (new_bb, list);
6225 for (gsi_tgt = gsi_start (list);
6226 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6227 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6229 return new_bb;
6233 /* Moves basic block BB after block AFTER. */
6235 static bool
6236 gimple_move_block_after (basic_block bb, basic_block after)
6238 if (bb->prev_bb == after)
6239 return true;
6241 unlink_block (bb);
6242 link_block (bb, after);
6244 return true;
6248 /* Return TRUE if block BB has no executable statements, otherwise return
6249 FALSE. */
6251 static bool
6252 gimple_empty_block_p (basic_block bb)
6254 /* BB must have no executable statements. */
6255 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6256 if (phi_nodes (bb))
6257 return false;
6258 if (gsi_end_p (gsi))
6259 return true;
6260 if (is_gimple_debug (gsi_stmt (gsi)))
6261 gsi_next_nondebug (&gsi);
6262 return gsi_end_p (gsi);
6266 /* Split a basic block if it ends with a conditional branch and if the
6267 other part of the block is not empty. */
6269 static basic_block
6270 gimple_split_block_before_cond_jump (basic_block bb)
6272 gimple *last, *split_point;
6273 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6274 if (gsi_end_p (gsi))
6275 return NULL;
6276 last = gsi_stmt (gsi);
6277 if (gimple_code (last) != GIMPLE_COND
6278 && gimple_code (last) != GIMPLE_SWITCH)
6279 return NULL;
6280 gsi_prev (&gsi);
6281 split_point = gsi_stmt (gsi);
6282 return split_block (bb, split_point)->dest;
6286 /* Return true if basic_block can be duplicated. */
6288 static bool
6289 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6291 return true;
6294 /* Create a duplicate of the basic block BB. NOTE: This does not
6295 preserve SSA form. */
6297 static basic_block
6298 gimple_duplicate_bb (basic_block bb)
6300 basic_block new_bb;
6301 gimple_stmt_iterator gsi_tgt;
6303 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6305 /* Copy the PHI nodes. We ignore PHI node arguments here because
6306 the incoming edges have not been setup yet. */
6307 for (gphi_iterator gpi = gsi_start_phis (bb);
6308 !gsi_end_p (gpi);
6309 gsi_next (&gpi))
6311 gphi *phi, *copy;
6312 phi = gpi.phi ();
6313 copy = create_phi_node (NULL_TREE, new_bb);
6314 create_new_def_for (gimple_phi_result (phi), copy,
6315 gimple_phi_result_ptr (copy));
6316 gimple_set_uid (copy, gimple_uid (phi));
6319 gsi_tgt = gsi_start_bb (new_bb);
6320 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6321 !gsi_end_p (gsi);
6322 gsi_next (&gsi))
6324 def_operand_p def_p;
6325 ssa_op_iter op_iter;
6326 tree lhs;
6327 gimple *stmt, *copy;
6329 stmt = gsi_stmt (gsi);
6330 if (gimple_code (stmt) == GIMPLE_LABEL)
6331 continue;
6333 /* Don't duplicate label debug stmts. */
6334 if (gimple_debug_bind_p (stmt)
6335 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6336 == LABEL_DECL)
6337 continue;
6339 /* Create a new copy of STMT and duplicate STMT's virtual
6340 operands. */
6341 copy = gimple_copy (stmt);
6342 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6344 maybe_duplicate_eh_stmt (copy, stmt);
6345 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6347 /* When copying around a stmt writing into a local non-user
6348 aggregate, make sure it won't share stack slot with other
6349 vars. */
6350 lhs = gimple_get_lhs (stmt);
6351 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6353 tree base = get_base_address (lhs);
6354 if (base
6355 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6356 && DECL_IGNORED_P (base)
6357 && !TREE_STATIC (base)
6358 && !DECL_EXTERNAL (base)
6359 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6360 DECL_NONSHAREABLE (base) = 1;
6363 /* Create new names for all the definitions created by COPY and
6364 add replacement mappings for each new name. */
6365 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6366 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6369 return new_bb;
6372 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6374 static void
6375 add_phi_args_after_copy_edge (edge e_copy)
6377 basic_block bb, bb_copy = e_copy->src, dest;
6378 edge e;
6379 edge_iterator ei;
6380 gphi *phi, *phi_copy;
6381 tree def;
6382 gphi_iterator psi, psi_copy;
6384 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6385 return;
6387 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6389 if (e_copy->dest->flags & BB_DUPLICATED)
6390 dest = get_bb_original (e_copy->dest);
6391 else
6392 dest = e_copy->dest;
6394 e = find_edge (bb, dest);
6395 if (!e)
6397 /* During loop unrolling the target of the latch edge is copied.
6398 In this case we are not looking for edge to dest, but to
6399 duplicated block whose original was dest. */
6400 FOR_EACH_EDGE (e, ei, bb->succs)
6402 if ((e->dest->flags & BB_DUPLICATED)
6403 && get_bb_original (e->dest) == dest)
6404 break;
6407 gcc_assert (e != NULL);
6410 for (psi = gsi_start_phis (e->dest),
6411 psi_copy = gsi_start_phis (e_copy->dest);
6412 !gsi_end_p (psi);
6413 gsi_next (&psi), gsi_next (&psi_copy))
6415 phi = psi.phi ();
6416 phi_copy = psi_copy.phi ();
6417 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6418 add_phi_arg (phi_copy, def, e_copy,
6419 gimple_phi_arg_location_from_edge (phi, e));
6424 /* Basic block BB_COPY was created by code duplication. Add phi node
6425 arguments for edges going out of BB_COPY. The blocks that were
6426 duplicated have BB_DUPLICATED set. */
6428 void
6429 add_phi_args_after_copy_bb (basic_block bb_copy)
6431 edge e_copy;
6432 edge_iterator ei;
6434 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6436 add_phi_args_after_copy_edge (e_copy);
6440 /* Blocks in REGION_COPY array of length N_REGION were created by
6441 duplication of basic blocks. Add phi node arguments for edges
6442 going from these blocks. If E_COPY is not NULL, also add
6443 phi node arguments for its destination.*/
6445 void
6446 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6447 edge e_copy)
6449 unsigned i;
6451 for (i = 0; i < n_region; i++)
6452 region_copy[i]->flags |= BB_DUPLICATED;
6454 for (i = 0; i < n_region; i++)
6455 add_phi_args_after_copy_bb (region_copy[i]);
6456 if (e_copy)
6457 add_phi_args_after_copy_edge (e_copy);
6459 for (i = 0; i < n_region; i++)
6460 region_copy[i]->flags &= ~BB_DUPLICATED;
6463 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6464 important exit edge EXIT. By important we mean that no SSA name defined
6465 inside region is live over the other exit edges of the region. All entry
6466 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6467 to the duplicate of the region. Dominance and loop information is
6468 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6469 UPDATE_DOMINANCE is false then we assume that the caller will update the
6470 dominance information after calling this function. The new basic
6471 blocks are stored to REGION_COPY in the same order as they had in REGION,
6472 provided that REGION_COPY is not NULL.
6473 The function returns false if it is unable to copy the region,
6474 true otherwise. */
6476 bool
6477 gimple_duplicate_sese_region (edge entry, edge exit,
6478 basic_block *region, unsigned n_region,
6479 basic_block *region_copy,
6480 bool update_dominance)
6482 unsigned i;
6483 bool free_region_copy = false, copying_header = false;
6484 struct loop *loop = entry->dest->loop_father;
6485 edge exit_copy;
6486 vec<basic_block> doms = vNULL;
6487 edge redirected;
6488 profile_count total_count = profile_count::uninitialized ();
6489 profile_count entry_count = profile_count::uninitialized ();
6491 if (!can_copy_bbs_p (region, n_region))
6492 return false;
6494 /* Some sanity checking. Note that we do not check for all possible
6495 missuses of the functions. I.e. if you ask to copy something weird,
6496 it will work, but the state of structures probably will not be
6497 correct. */
6498 for (i = 0; i < n_region; i++)
6500 /* We do not handle subloops, i.e. all the blocks must belong to the
6501 same loop. */
6502 if (region[i]->loop_father != loop)
6503 return false;
6505 if (region[i] != entry->dest
6506 && region[i] == loop->header)
6507 return false;
6510 /* In case the function is used for loop header copying (which is the primary
6511 use), ensure that EXIT and its copy will be new latch and entry edges. */
6512 if (loop->header == entry->dest)
6514 copying_header = true;
6516 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6517 return false;
6519 for (i = 0; i < n_region; i++)
6520 if (region[i] != exit->src
6521 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6522 return false;
6525 initialize_original_copy_tables ();
6527 if (copying_header)
6528 set_loop_copy (loop, loop_outer (loop));
6529 else
6530 set_loop_copy (loop, loop);
6532 if (!region_copy)
6534 region_copy = XNEWVEC (basic_block, n_region);
6535 free_region_copy = true;
6538 /* Record blocks outside the region that are dominated by something
6539 inside. */
6540 if (update_dominance)
6542 doms.create (0);
6543 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6546 if (entry->dest->count.initialized_p ())
6548 total_count = entry->dest->count;
6549 entry_count = entry->count ();
6550 /* Fix up corner cases, to avoid division by zero or creation of negative
6551 frequencies. */
6552 if (entry_count > total_count)
6553 entry_count = total_count;
6556 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6557 split_edge_bb_loc (entry), update_dominance);
6558 if (total_count.initialized_p () && entry_count.initialized_p ())
6560 scale_bbs_frequencies_profile_count (region, n_region,
6561 total_count - entry_count,
6562 total_count);
6563 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6564 total_count);
6567 if (copying_header)
6569 loop->header = exit->dest;
6570 loop->latch = exit->src;
6573 /* Redirect the entry and add the phi node arguments. */
6574 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6575 gcc_assert (redirected != NULL);
6576 flush_pending_stmts (entry);
6578 /* Concerning updating of dominators: We must recount dominators
6579 for entry block and its copy. Anything that is outside of the
6580 region, but was dominated by something inside needs recounting as
6581 well. */
6582 if (update_dominance)
6584 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6585 doms.safe_push (get_bb_original (entry->dest));
6586 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6587 doms.release ();
6590 /* Add the other PHI node arguments. */
6591 add_phi_args_after_copy (region_copy, n_region, NULL);
6593 if (free_region_copy)
6594 free (region_copy);
6596 free_original_copy_tables ();
6597 return true;
6600 /* Checks if BB is part of the region defined by N_REGION BBS. */
6601 static bool
6602 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6604 unsigned int n;
6606 for (n = 0; n < n_region; n++)
6608 if (bb == bbs[n])
6609 return true;
6611 return false;
6614 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6615 are stored to REGION_COPY in the same order in that they appear
6616 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6617 the region, EXIT an exit from it. The condition guarding EXIT
6618 is moved to ENTRY. Returns true if duplication succeeds, false
6619 otherwise.
6621 For example,
6623 some_code;
6624 if (cond)
6626 else
6629 is transformed to
6631 if (cond)
6633 some_code;
6636 else
6638 some_code;
6643 bool
6644 gimple_duplicate_sese_tail (edge entry, edge exit,
6645 basic_block *region, unsigned n_region,
6646 basic_block *region_copy)
6648 unsigned i;
6649 bool free_region_copy = false;
6650 struct loop *loop = exit->dest->loop_father;
6651 struct loop *orig_loop = entry->dest->loop_father;
6652 basic_block switch_bb, entry_bb, nentry_bb;
6653 vec<basic_block> doms;
6654 profile_count total_count = profile_count::uninitialized (),
6655 exit_count = profile_count::uninitialized ();
6656 edge exits[2], nexits[2], e;
6657 gimple_stmt_iterator gsi;
6658 gimple *cond_stmt;
6659 edge sorig, snew;
6660 basic_block exit_bb;
6661 gphi_iterator psi;
6662 gphi *phi;
6663 tree def;
6664 struct loop *target, *aloop, *cloop;
6666 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6667 exits[0] = exit;
6668 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6670 if (!can_copy_bbs_p (region, n_region))
6671 return false;
6673 initialize_original_copy_tables ();
6674 set_loop_copy (orig_loop, loop);
6676 target= loop;
6677 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6679 if (bb_part_of_region_p (aloop->header, region, n_region))
6681 cloop = duplicate_loop (aloop, target);
6682 duplicate_subloops (aloop, cloop);
6686 if (!region_copy)
6688 region_copy = XNEWVEC (basic_block, n_region);
6689 free_region_copy = true;
6692 gcc_assert (!need_ssa_update_p (cfun));
6694 /* Record blocks outside the region that are dominated by something
6695 inside. */
6696 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6698 total_count = exit->src->count;
6699 exit_count = exit->count ();
6700 /* Fix up corner cases, to avoid division by zero or creation of negative
6701 frequencies. */
6702 if (exit_count > total_count)
6703 exit_count = total_count;
6705 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6706 split_edge_bb_loc (exit), true);
6707 if (total_count.initialized_p () && exit_count.initialized_p ())
6709 scale_bbs_frequencies_profile_count (region, n_region,
6710 total_count - exit_count,
6711 total_count);
6712 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6713 total_count);
6716 /* Create the switch block, and put the exit condition to it. */
6717 entry_bb = entry->dest;
6718 nentry_bb = get_bb_copy (entry_bb);
6719 if (!last_stmt (entry->src)
6720 || !stmt_ends_bb_p (last_stmt (entry->src)))
6721 switch_bb = entry->src;
6722 else
6723 switch_bb = split_edge (entry);
6724 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6726 gsi = gsi_last_bb (switch_bb);
6727 cond_stmt = last_stmt (exit->src);
6728 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6729 cond_stmt = gimple_copy (cond_stmt);
6731 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6733 sorig = single_succ_edge (switch_bb);
6734 sorig->flags = exits[1]->flags;
6735 sorig->probability = exits[1]->probability;
6736 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6737 snew->probability = exits[0]->probability;
6740 /* Register the new edge from SWITCH_BB in loop exit lists. */
6741 rescan_loop_exit (snew, true, false);
6743 /* Add the PHI node arguments. */
6744 add_phi_args_after_copy (region_copy, n_region, snew);
6746 /* Get rid of now superfluous conditions and associated edges (and phi node
6747 arguments). */
6748 exit_bb = exit->dest;
6750 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6751 PENDING_STMT (e) = NULL;
6753 /* The latch of ORIG_LOOP was copied, and so was the backedge
6754 to the original header. We redirect this backedge to EXIT_BB. */
6755 for (i = 0; i < n_region; i++)
6756 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6758 gcc_assert (single_succ_edge (region_copy[i]));
6759 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6760 PENDING_STMT (e) = NULL;
6761 for (psi = gsi_start_phis (exit_bb);
6762 !gsi_end_p (psi);
6763 gsi_next (&psi))
6765 phi = psi.phi ();
6766 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6767 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6770 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6771 PENDING_STMT (e) = NULL;
6773 /* Anything that is outside of the region, but was dominated by something
6774 inside needs to update dominance info. */
6775 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6776 doms.release ();
6777 /* Update the SSA web. */
6778 update_ssa (TODO_update_ssa);
6780 if (free_region_copy)
6781 free (region_copy);
6783 free_original_copy_tables ();
6784 return true;
6787 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6788 adding blocks when the dominator traversal reaches EXIT. This
6789 function silently assumes that ENTRY strictly dominates EXIT. */
6791 void
6792 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6793 vec<basic_block> *bbs_p)
6795 basic_block son;
6797 for (son = first_dom_son (CDI_DOMINATORS, entry);
6798 son;
6799 son = next_dom_son (CDI_DOMINATORS, son))
6801 bbs_p->safe_push (son);
6802 if (son != exit)
6803 gather_blocks_in_sese_region (son, exit, bbs_p);
6807 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6808 The duplicates are recorded in VARS_MAP. */
6810 static void
6811 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6812 tree to_context)
6814 tree t = *tp, new_t;
6815 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6817 if (DECL_CONTEXT (t) == to_context)
6818 return;
6820 bool existed;
6821 tree &loc = vars_map->get_or_insert (t, &existed);
6823 if (!existed)
6825 if (SSA_VAR_P (t))
6827 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6828 add_local_decl (f, new_t);
6830 else
6832 gcc_assert (TREE_CODE (t) == CONST_DECL);
6833 new_t = copy_node (t);
6835 DECL_CONTEXT (new_t) = to_context;
6837 loc = new_t;
6839 else
6840 new_t = loc;
6842 *tp = new_t;
6846 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6847 VARS_MAP maps old ssa names and var_decls to the new ones. */
6849 static tree
6850 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6851 tree to_context)
6853 tree new_name;
6855 gcc_assert (!virtual_operand_p (name));
6857 tree *loc = vars_map->get (name);
6859 if (!loc)
6861 tree decl = SSA_NAME_VAR (name);
6862 if (decl)
6864 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6865 replace_by_duplicate_decl (&decl, vars_map, to_context);
6866 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6867 decl, SSA_NAME_DEF_STMT (name));
6869 else
6870 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6871 name, SSA_NAME_DEF_STMT (name));
6873 /* Now that we've used the def stmt to define new_name, make sure it
6874 doesn't define name anymore. */
6875 SSA_NAME_DEF_STMT (name) = NULL;
6877 vars_map->put (name, new_name);
6879 else
6880 new_name = *loc;
6882 return new_name;
6885 struct move_stmt_d
6887 tree orig_block;
6888 tree new_block;
6889 tree from_context;
6890 tree to_context;
6891 hash_map<tree, tree> *vars_map;
6892 htab_t new_label_map;
6893 hash_map<void *, void *> *eh_map;
6894 bool remap_decls_p;
6897 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6898 contained in *TP if it has been ORIG_BLOCK previously and change the
6899 DECL_CONTEXT of every local variable referenced in *TP. */
6901 static tree
6902 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6904 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6905 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6906 tree t = *tp;
6908 if (EXPR_P (t))
6910 tree block = TREE_BLOCK (t);
6911 if (block == NULL_TREE)
6913 else if (block == p->orig_block
6914 || p->orig_block == NULL_TREE)
6915 TREE_SET_BLOCK (t, p->new_block);
6916 else if (flag_checking)
6918 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6919 block = BLOCK_SUPERCONTEXT (block);
6920 gcc_assert (block == p->orig_block);
6923 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6925 if (TREE_CODE (t) == SSA_NAME)
6926 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6927 else if (TREE_CODE (t) == PARM_DECL
6928 && gimple_in_ssa_p (cfun))
6929 *tp = *(p->vars_map->get (t));
6930 else if (TREE_CODE (t) == LABEL_DECL)
6932 if (p->new_label_map)
6934 struct tree_map in, *out;
6935 in.base.from = t;
6936 out = (struct tree_map *)
6937 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6938 if (out)
6939 *tp = t = out->to;
6942 /* For FORCED_LABELs we can end up with references from other
6943 functions if some SESE regions are outlined. It is UB to
6944 jump in between them, but they could be used just for printing
6945 addresses etc. In that case, DECL_CONTEXT on the label should
6946 be the function containing the glabel stmt with that LABEL_DECL,
6947 rather than whatever function a reference to the label was seen
6948 last time. */
6949 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6950 DECL_CONTEXT (t) = p->to_context;
6952 else if (p->remap_decls_p)
6954 /* Replace T with its duplicate. T should no longer appear in the
6955 parent function, so this looks wasteful; however, it may appear
6956 in referenced_vars, and more importantly, as virtual operands of
6957 statements, and in alias lists of other variables. It would be
6958 quite difficult to expunge it from all those places. ??? It might
6959 suffice to do this for addressable variables. */
6960 if ((VAR_P (t) && !is_global_var (t))
6961 || TREE_CODE (t) == CONST_DECL)
6962 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6964 *walk_subtrees = 0;
6966 else if (TYPE_P (t))
6967 *walk_subtrees = 0;
6969 return NULL_TREE;
6972 /* Helper for move_stmt_r. Given an EH region number for the source
6973 function, map that to the duplicate EH regio number in the dest. */
6975 static int
6976 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6978 eh_region old_r, new_r;
6980 old_r = get_eh_region_from_number (old_nr);
6981 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6983 return new_r->index;
6986 /* Similar, but operate on INTEGER_CSTs. */
6988 static tree
6989 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6991 int old_nr, new_nr;
6993 old_nr = tree_to_shwi (old_t_nr);
6994 new_nr = move_stmt_eh_region_nr (old_nr, p);
6996 return build_int_cst (integer_type_node, new_nr);
6999 /* Like move_stmt_op, but for gimple statements.
7001 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7002 contained in the current statement in *GSI_P and change the
7003 DECL_CONTEXT of every local variable referenced in the current
7004 statement. */
7006 static tree
7007 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7008 struct walk_stmt_info *wi)
7010 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7011 gimple *stmt = gsi_stmt (*gsi_p);
7012 tree block = gimple_block (stmt);
7014 if (block == p->orig_block
7015 || (p->orig_block == NULL_TREE
7016 && block != NULL_TREE))
7017 gimple_set_block (stmt, p->new_block);
7019 switch (gimple_code (stmt))
7021 case GIMPLE_CALL:
7022 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7024 tree r, fndecl = gimple_call_fndecl (stmt);
7025 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
7026 switch (DECL_FUNCTION_CODE (fndecl))
7028 case BUILT_IN_EH_COPY_VALUES:
7029 r = gimple_call_arg (stmt, 1);
7030 r = move_stmt_eh_region_tree_nr (r, p);
7031 gimple_call_set_arg (stmt, 1, r);
7032 /* FALLTHRU */
7034 case BUILT_IN_EH_POINTER:
7035 case BUILT_IN_EH_FILTER:
7036 r = gimple_call_arg (stmt, 0);
7037 r = move_stmt_eh_region_tree_nr (r, p);
7038 gimple_call_set_arg (stmt, 0, r);
7039 break;
7041 default:
7042 break;
7045 break;
7047 case GIMPLE_RESX:
7049 gresx *resx_stmt = as_a <gresx *> (stmt);
7050 int r = gimple_resx_region (resx_stmt);
7051 r = move_stmt_eh_region_nr (r, p);
7052 gimple_resx_set_region (resx_stmt, r);
7054 break;
7056 case GIMPLE_EH_DISPATCH:
7058 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7059 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7060 r = move_stmt_eh_region_nr (r, p);
7061 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7063 break;
7065 case GIMPLE_OMP_RETURN:
7066 case GIMPLE_OMP_CONTINUE:
7067 break;
7069 case GIMPLE_LABEL:
7071 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7072 so that such labels can be referenced from other regions.
7073 Make sure to update it when seeing a GIMPLE_LABEL though,
7074 that is the owner of the label. */
7075 walk_gimple_op (stmt, move_stmt_op, wi);
7076 *handled_ops_p = true;
7077 tree label = gimple_label_label (as_a <glabel *> (stmt));
7078 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7079 DECL_CONTEXT (label) = p->to_context;
7081 break;
7083 default:
7084 if (is_gimple_omp (stmt))
7086 /* Do not remap variables inside OMP directives. Variables
7087 referenced in clauses and directive header belong to the
7088 parent function and should not be moved into the child
7089 function. */
7090 bool save_remap_decls_p = p->remap_decls_p;
7091 p->remap_decls_p = false;
7092 *handled_ops_p = true;
7094 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7095 move_stmt_op, wi);
7097 p->remap_decls_p = save_remap_decls_p;
7099 break;
7102 return NULL_TREE;
7105 /* Move basic block BB from function CFUN to function DEST_FN. The
7106 block is moved out of the original linked list and placed after
7107 block AFTER in the new list. Also, the block is removed from the
7108 original array of blocks and placed in DEST_FN's array of blocks.
7109 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7110 updated to reflect the moved edges.
7112 The local variables are remapped to new instances, VARS_MAP is used
7113 to record the mapping. */
7115 static void
7116 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7117 basic_block after, bool update_edge_count_p,
7118 struct move_stmt_d *d)
7120 struct control_flow_graph *cfg;
7121 edge_iterator ei;
7122 edge e;
7123 gimple_stmt_iterator si;
7124 unsigned old_len, new_len;
7126 /* Remove BB from dominance structures. */
7127 delete_from_dominance_info (CDI_DOMINATORS, bb);
7129 /* Move BB from its current loop to the copy in the new function. */
7130 if (current_loops)
7132 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
7133 if (new_loop)
7134 bb->loop_father = new_loop;
7137 /* Link BB to the new linked list. */
7138 move_block_after (bb, after);
7140 /* Update the edge count in the corresponding flowgraphs. */
7141 if (update_edge_count_p)
7142 FOR_EACH_EDGE (e, ei, bb->succs)
7144 cfun->cfg->x_n_edges--;
7145 dest_cfun->cfg->x_n_edges++;
7148 /* Remove BB from the original basic block array. */
7149 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7150 cfun->cfg->x_n_basic_blocks--;
7152 /* Grow DEST_CFUN's basic block array if needed. */
7153 cfg = dest_cfun->cfg;
7154 cfg->x_n_basic_blocks++;
7155 if (bb->index >= cfg->x_last_basic_block)
7156 cfg->x_last_basic_block = bb->index + 1;
7158 old_len = vec_safe_length (cfg->x_basic_block_info);
7159 if ((unsigned) cfg->x_last_basic_block >= old_len)
7161 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7162 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7165 (*cfg->x_basic_block_info)[bb->index] = bb;
7167 /* Remap the variables in phi nodes. */
7168 for (gphi_iterator psi = gsi_start_phis (bb);
7169 !gsi_end_p (psi); )
7171 gphi *phi = psi.phi ();
7172 use_operand_p use;
7173 tree op = PHI_RESULT (phi);
7174 ssa_op_iter oi;
7175 unsigned i;
7177 if (virtual_operand_p (op))
7179 /* Remove the phi nodes for virtual operands (alias analysis will be
7180 run for the new function, anyway). */
7181 remove_phi_node (&psi, true);
7182 continue;
7185 SET_PHI_RESULT (phi,
7186 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7187 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7189 op = USE_FROM_PTR (use);
7190 if (TREE_CODE (op) == SSA_NAME)
7191 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7194 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7196 location_t locus = gimple_phi_arg_location (phi, i);
7197 tree block = LOCATION_BLOCK (locus);
7199 if (locus == UNKNOWN_LOCATION)
7200 continue;
7201 if (d->orig_block == NULL_TREE || block == d->orig_block)
7203 locus = set_block (locus, d->new_block);
7204 gimple_phi_arg_set_location (phi, i, locus);
7208 gsi_next (&psi);
7211 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7213 gimple *stmt = gsi_stmt (si);
7214 struct walk_stmt_info wi;
7216 memset (&wi, 0, sizeof (wi));
7217 wi.info = d;
7218 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7220 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7222 tree label = gimple_label_label (label_stmt);
7223 int uid = LABEL_DECL_UID (label);
7225 gcc_assert (uid > -1);
7227 old_len = vec_safe_length (cfg->x_label_to_block_map);
7228 if (old_len <= (unsigned) uid)
7230 new_len = 3 * uid / 2 + 1;
7231 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7234 (*cfg->x_label_to_block_map)[uid] = bb;
7235 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7237 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7239 if (uid >= dest_cfun->cfg->last_label_uid)
7240 dest_cfun->cfg->last_label_uid = uid + 1;
7243 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7244 remove_stmt_from_eh_lp_fn (cfun, stmt);
7246 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7247 gimple_remove_stmt_histograms (cfun, stmt);
7249 /* We cannot leave any operands allocated from the operand caches of
7250 the current function. */
7251 free_stmt_operands (cfun, stmt);
7252 push_cfun (dest_cfun);
7253 update_stmt (stmt);
7254 pop_cfun ();
7257 FOR_EACH_EDGE (e, ei, bb->succs)
7258 if (e->goto_locus != UNKNOWN_LOCATION)
7260 tree block = LOCATION_BLOCK (e->goto_locus);
7261 if (d->orig_block == NULL_TREE
7262 || block == d->orig_block)
7263 e->goto_locus = set_block (e->goto_locus, d->new_block);
7267 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7268 the outermost EH region. Use REGION as the incoming base EH region. */
7270 static eh_region
7271 find_outermost_region_in_block (struct function *src_cfun,
7272 basic_block bb, eh_region region)
7274 gimple_stmt_iterator si;
7276 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7278 gimple *stmt = gsi_stmt (si);
7279 eh_region stmt_region;
7280 int lp_nr;
7282 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7283 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7284 if (stmt_region)
7286 if (region == NULL)
7287 region = stmt_region;
7288 else if (stmt_region != region)
7290 region = eh_region_outermost (src_cfun, stmt_region, region);
7291 gcc_assert (region != NULL);
7296 return region;
7299 static tree
7300 new_label_mapper (tree decl, void *data)
7302 htab_t hash = (htab_t) data;
7303 struct tree_map *m;
7304 void **slot;
7306 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7308 m = XNEW (struct tree_map);
7309 m->hash = DECL_UID (decl);
7310 m->base.from = decl;
7311 m->to = create_artificial_label (UNKNOWN_LOCATION);
7312 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7313 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7314 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7316 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7317 gcc_assert (*slot == NULL);
7319 *slot = m;
7321 return m->to;
7324 /* Tree walker to replace the decls used inside value expressions by
7325 duplicates. */
7327 static tree
7328 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7330 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7332 switch (TREE_CODE (*tp))
7334 case VAR_DECL:
7335 case PARM_DECL:
7336 case RESULT_DECL:
7337 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7338 break;
7339 default:
7340 break;
7343 if (IS_TYPE_OR_DECL_P (*tp))
7344 *walk_subtrees = false;
7346 return NULL;
7349 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7350 subblocks. */
7352 static void
7353 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7354 tree to_context)
7356 tree *tp, t;
7358 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7360 t = *tp;
7361 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7362 continue;
7363 replace_by_duplicate_decl (&t, vars_map, to_context);
7364 if (t != *tp)
7366 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7368 tree x = DECL_VALUE_EXPR (*tp);
7369 struct replace_decls_d rd = { vars_map, to_context };
7370 unshare_expr (x);
7371 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7372 SET_DECL_VALUE_EXPR (t, x);
7373 DECL_HAS_VALUE_EXPR_P (t) = 1;
7375 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7376 *tp = t;
7380 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7381 replace_block_vars_by_duplicates (block, vars_map, to_context);
7384 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7385 from FN1 to FN2. */
7387 static void
7388 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7389 struct loop *loop)
7391 /* Discard it from the old loop array. */
7392 (*get_loops (fn1))[loop->num] = NULL;
7394 /* Place it in the new loop array, assigning it a new number. */
7395 loop->num = number_of_loops (fn2);
7396 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7398 /* Recurse to children. */
7399 for (loop = loop->inner; loop; loop = loop->next)
7400 fixup_loop_arrays_after_move (fn1, fn2, loop);
7403 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7404 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7406 DEBUG_FUNCTION void
7407 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7409 basic_block bb;
7410 edge_iterator ei;
7411 edge e;
7412 bitmap bbs = BITMAP_ALLOC (NULL);
7413 int i;
7415 gcc_assert (entry != NULL);
7416 gcc_assert (entry != exit);
7417 gcc_assert (bbs_p != NULL);
7419 gcc_assert (bbs_p->length () > 0);
7421 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7422 bitmap_set_bit (bbs, bb->index);
7424 gcc_assert (bitmap_bit_p (bbs, entry->index));
7425 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7427 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7429 if (bb == entry)
7431 gcc_assert (single_pred_p (entry));
7432 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7434 else
7435 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7437 e = ei_edge (ei);
7438 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7441 if (bb == exit)
7443 gcc_assert (single_succ_p (exit));
7444 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7446 else
7447 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7449 e = ei_edge (ei);
7450 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7454 BITMAP_FREE (bbs);
7457 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7459 bool
7460 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7462 bitmap release_names = (bitmap)data;
7464 if (TREE_CODE (from) != SSA_NAME)
7465 return true;
7467 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7468 return true;
7471 /* Return LOOP_DIST_ALIAS call if present in BB. */
7473 static gimple *
7474 find_loop_dist_alias (basic_block bb)
7476 gimple *g = last_stmt (bb);
7477 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7478 return NULL;
7480 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7481 gsi_prev (&gsi);
7482 if (gsi_end_p (gsi))
7483 return NULL;
7485 g = gsi_stmt (gsi);
7486 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7487 return g;
7488 return NULL;
7491 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7492 to VALUE and update any immediate uses of it's LHS. */
7494 void
7495 fold_loop_internal_call (gimple *g, tree value)
7497 tree lhs = gimple_call_lhs (g);
7498 use_operand_p use_p;
7499 imm_use_iterator iter;
7500 gimple *use_stmt;
7501 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7503 update_call_from_tree (&gsi, value);
7504 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7506 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7507 SET_USE (use_p, value);
7508 update_stmt (use_stmt);
7512 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7513 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7514 single basic block in the original CFG and the new basic block is
7515 returned. DEST_CFUN must not have a CFG yet.
7517 Note that the region need not be a pure SESE region. Blocks inside
7518 the region may contain calls to abort/exit. The only restriction
7519 is that ENTRY_BB should be the only entry point and it must
7520 dominate EXIT_BB.
7522 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7523 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7524 to the new function.
7526 All local variables referenced in the region are assumed to be in
7527 the corresponding BLOCK_VARS and unexpanded variable lists
7528 associated with DEST_CFUN.
7530 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7531 reimplement move_sese_region_to_fn by duplicating the region rather than
7532 moving it. */
7534 basic_block
7535 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7536 basic_block exit_bb, tree orig_block)
7538 vec<basic_block> bbs, dom_bbs;
7539 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7540 basic_block after, bb, *entry_pred, *exit_succ, abb;
7541 struct function *saved_cfun = cfun;
7542 int *entry_flag, *exit_flag;
7543 profile_probability *entry_prob, *exit_prob;
7544 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7545 edge e;
7546 edge_iterator ei;
7547 htab_t new_label_map;
7548 hash_map<void *, void *> *eh_map;
7549 struct loop *loop = entry_bb->loop_father;
7550 struct loop *loop0 = get_loop (saved_cfun, 0);
7551 struct move_stmt_d d;
7553 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7554 region. */
7555 gcc_assert (entry_bb != exit_bb
7556 && (!exit_bb
7557 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7559 /* Collect all the blocks in the region. Manually add ENTRY_BB
7560 because it won't be added by dfs_enumerate_from. */
7561 bbs.create (0);
7562 bbs.safe_push (entry_bb);
7563 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7565 if (flag_checking)
7566 verify_sese (entry_bb, exit_bb, &bbs);
7568 /* The blocks that used to be dominated by something in BBS will now be
7569 dominated by the new block. */
7570 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7571 bbs.address (),
7572 bbs.length ());
7574 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7575 the predecessor edges to ENTRY_BB and the successor edges to
7576 EXIT_BB so that we can re-attach them to the new basic block that
7577 will replace the region. */
7578 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7579 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7580 entry_flag = XNEWVEC (int, num_entry_edges);
7581 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7582 i = 0;
7583 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7585 entry_prob[i] = e->probability;
7586 entry_flag[i] = e->flags;
7587 entry_pred[i++] = e->src;
7588 remove_edge (e);
7591 if (exit_bb)
7593 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7594 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7595 exit_flag = XNEWVEC (int, num_exit_edges);
7596 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7597 i = 0;
7598 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7600 exit_prob[i] = e->probability;
7601 exit_flag[i] = e->flags;
7602 exit_succ[i++] = e->dest;
7603 remove_edge (e);
7606 else
7608 num_exit_edges = 0;
7609 exit_succ = NULL;
7610 exit_flag = NULL;
7611 exit_prob = NULL;
7614 /* Switch context to the child function to initialize DEST_FN's CFG. */
7615 gcc_assert (dest_cfun->cfg == NULL);
7616 push_cfun (dest_cfun);
7618 init_empty_tree_cfg ();
7620 /* Initialize EH information for the new function. */
7621 eh_map = NULL;
7622 new_label_map = NULL;
7623 if (saved_cfun->eh)
7625 eh_region region = NULL;
7627 FOR_EACH_VEC_ELT (bbs, i, bb)
7628 region = find_outermost_region_in_block (saved_cfun, bb, region);
7630 init_eh_for_function ();
7631 if (region != NULL)
7633 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7634 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7635 new_label_mapper, new_label_map);
7639 /* Initialize an empty loop tree. */
7640 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7641 init_loops_structure (dest_cfun, loops, 1);
7642 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7643 set_loops_for_fn (dest_cfun, loops);
7645 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7647 /* Move the outlined loop tree part. */
7648 num_nodes = bbs.length ();
7649 FOR_EACH_VEC_ELT (bbs, i, bb)
7651 if (bb->loop_father->header == bb)
7653 struct loop *this_loop = bb->loop_father;
7654 struct loop *outer = loop_outer (this_loop);
7655 if (outer == loop
7656 /* If the SESE region contains some bbs ending with
7657 a noreturn call, those are considered to belong
7658 to the outermost loop in saved_cfun, rather than
7659 the entry_bb's loop_father. */
7660 || outer == loop0)
7662 if (outer != loop)
7663 num_nodes -= this_loop->num_nodes;
7664 flow_loop_tree_node_remove (bb->loop_father);
7665 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7666 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7669 else if (bb->loop_father == loop0 && loop0 != loop)
7670 num_nodes--;
7672 /* Remove loop exits from the outlined region. */
7673 if (loops_for_fn (saved_cfun)->exits)
7674 FOR_EACH_EDGE (e, ei, bb->succs)
7676 struct loops *l = loops_for_fn (saved_cfun);
7677 loop_exit **slot
7678 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7679 NO_INSERT);
7680 if (slot)
7681 l->exits->clear_slot (slot);
7685 /* Adjust the number of blocks in the tree root of the outlined part. */
7686 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7688 /* Setup a mapping to be used by move_block_to_fn. */
7689 loop->aux = current_loops->tree_root;
7690 loop0->aux = current_loops->tree_root;
7692 /* Fix up orig_loop_num. If the block referenced in it has been moved
7693 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7694 struct loop *dloop;
7695 signed char *moved_orig_loop_num = NULL;
7696 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7697 if (dloop->orig_loop_num)
7699 if (moved_orig_loop_num == NULL)
7700 moved_orig_loop_num
7701 = XCNEWVEC (signed char, vec_safe_length (larray));
7702 if ((*larray)[dloop->orig_loop_num] != NULL
7703 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7705 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7706 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7707 moved_orig_loop_num[dloop->orig_loop_num]++;
7708 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7710 else
7712 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7713 dloop->orig_loop_num = 0;
7716 pop_cfun ();
7718 if (moved_orig_loop_num)
7720 FOR_EACH_VEC_ELT (bbs, i, bb)
7722 gimple *g = find_loop_dist_alias (bb);
7723 if (g == NULL)
7724 continue;
7726 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7727 gcc_assert (orig_loop_num
7728 && (unsigned) orig_loop_num < vec_safe_length (larray));
7729 if (moved_orig_loop_num[orig_loop_num] == 2)
7731 /* If we have moved both loops with this orig_loop_num into
7732 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7733 too, update the first argument. */
7734 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7735 && (get_loop (saved_cfun, dloop->orig_loop_num)
7736 == NULL));
7737 tree t = build_int_cst (integer_type_node,
7738 (*larray)[dloop->orig_loop_num]->num);
7739 gimple_call_set_arg (g, 0, t);
7740 update_stmt (g);
7741 /* Make sure the following loop will not update it. */
7742 moved_orig_loop_num[orig_loop_num] = 0;
7744 else
7745 /* Otherwise at least one of the loops stayed in saved_cfun.
7746 Remove the LOOP_DIST_ALIAS call. */
7747 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7749 FOR_EACH_BB_FN (bb, saved_cfun)
7751 gimple *g = find_loop_dist_alias (bb);
7752 if (g == NULL)
7753 continue;
7754 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7755 gcc_assert (orig_loop_num
7756 && (unsigned) orig_loop_num < vec_safe_length (larray));
7757 if (moved_orig_loop_num[orig_loop_num])
7758 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7759 of the corresponding loops was moved, remove it. */
7760 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7762 XDELETEVEC (moved_orig_loop_num);
7764 ggc_free (larray);
7766 /* Move blocks from BBS into DEST_CFUN. */
7767 gcc_assert (bbs.length () >= 2);
7768 after = dest_cfun->cfg->x_entry_block_ptr;
7769 hash_map<tree, tree> vars_map;
7771 memset (&d, 0, sizeof (d));
7772 d.orig_block = orig_block;
7773 d.new_block = DECL_INITIAL (dest_cfun->decl);
7774 d.from_context = cfun->decl;
7775 d.to_context = dest_cfun->decl;
7776 d.vars_map = &vars_map;
7777 d.new_label_map = new_label_map;
7778 d.eh_map = eh_map;
7779 d.remap_decls_p = true;
7781 if (gimple_in_ssa_p (cfun))
7782 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7784 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7785 set_ssa_default_def (dest_cfun, arg, narg);
7786 vars_map.put (arg, narg);
7789 FOR_EACH_VEC_ELT (bbs, i, bb)
7791 /* No need to update edge counts on the last block. It has
7792 already been updated earlier when we detached the region from
7793 the original CFG. */
7794 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7795 after = bb;
7798 loop->aux = NULL;
7799 loop0->aux = NULL;
7800 /* Loop sizes are no longer correct, fix them up. */
7801 loop->num_nodes -= num_nodes;
7802 for (struct loop *outer = loop_outer (loop);
7803 outer; outer = loop_outer (outer))
7804 outer->num_nodes -= num_nodes;
7805 loop0->num_nodes -= bbs.length () - num_nodes;
7807 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7809 struct loop *aloop;
7810 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7811 if (aloop != NULL)
7813 if (aloop->simduid)
7815 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7816 d.to_context);
7817 dest_cfun->has_simduid_loops = true;
7819 if (aloop->force_vectorize)
7820 dest_cfun->has_force_vectorize_loops = true;
7824 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7825 if (orig_block)
7827 tree block;
7828 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7829 == NULL_TREE);
7830 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7831 = BLOCK_SUBBLOCKS (orig_block);
7832 for (block = BLOCK_SUBBLOCKS (orig_block);
7833 block; block = BLOCK_CHAIN (block))
7834 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7835 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7838 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7839 &vars_map, dest_cfun->decl);
7841 if (new_label_map)
7842 htab_delete (new_label_map);
7843 if (eh_map)
7844 delete eh_map;
7846 if (gimple_in_ssa_p (cfun))
7848 /* We need to release ssa-names in a defined order, so first find them,
7849 and then iterate in ascending version order. */
7850 bitmap release_names = BITMAP_ALLOC (NULL);
7851 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7852 bitmap_iterator bi;
7853 unsigned i;
7854 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7855 release_ssa_name (ssa_name (i));
7856 BITMAP_FREE (release_names);
7859 /* Rewire the entry and exit blocks. The successor to the entry
7860 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7861 the child function. Similarly, the predecessor of DEST_FN's
7862 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7863 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7864 various CFG manipulation function get to the right CFG.
7866 FIXME, this is silly. The CFG ought to become a parameter to
7867 these helpers. */
7868 push_cfun (dest_cfun);
7869 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7870 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7871 if (exit_bb)
7873 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7874 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7876 else
7877 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7878 pop_cfun ();
7880 /* Back in the original function, the SESE region has disappeared,
7881 create a new basic block in its place. */
7882 bb = create_empty_bb (entry_pred[0]);
7883 if (current_loops)
7884 add_bb_to_loop (bb, loop);
7885 for (i = 0; i < num_entry_edges; i++)
7887 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7888 e->probability = entry_prob[i];
7891 for (i = 0; i < num_exit_edges; i++)
7893 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7894 e->probability = exit_prob[i];
7897 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7898 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7899 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7900 dom_bbs.release ();
7902 if (exit_bb)
7904 free (exit_prob);
7905 free (exit_flag);
7906 free (exit_succ);
7908 free (entry_prob);
7909 free (entry_flag);
7910 free (entry_pred);
7911 bbs.release ();
7913 return bb;
7916 /* Dump default def DEF to file FILE using FLAGS and indentation
7917 SPC. */
7919 static void
7920 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7922 for (int i = 0; i < spc; ++i)
7923 fprintf (file, " ");
7924 dump_ssaname_info_to_file (file, def, spc);
7926 print_generic_expr (file, TREE_TYPE (def), flags);
7927 fprintf (file, " ");
7928 print_generic_expr (file, def, flags);
7929 fprintf (file, " = ");
7930 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7931 fprintf (file, ";\n");
7934 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7936 static void
7937 print_no_sanitize_attr_value (FILE *file, tree value)
7939 unsigned int flags = tree_to_uhwi (value);
7940 bool first = true;
7941 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7943 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7945 if (!first)
7946 fprintf (file, " | ");
7947 fprintf (file, "%s", sanitizer_opts[i].name);
7948 first = false;
7953 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7956 void
7957 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7959 tree arg, var, old_current_fndecl = current_function_decl;
7960 struct function *dsf;
7961 bool ignore_topmost_bind = false, any_var = false;
7962 basic_block bb;
7963 tree chain;
7964 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7965 && decl_is_tm_clone (fndecl));
7966 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7968 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7970 fprintf (file, "__attribute__((");
7972 bool first = true;
7973 tree chain;
7974 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7975 first = false, chain = TREE_CHAIN (chain))
7977 if (!first)
7978 fprintf (file, ", ");
7980 tree name = get_attribute_name (chain);
7981 print_generic_expr (file, name, dump_flags);
7982 if (TREE_VALUE (chain) != NULL_TREE)
7984 fprintf (file, " (");
7986 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7987 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7988 else
7989 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7990 fprintf (file, ")");
7994 fprintf (file, "))\n");
7997 current_function_decl = fndecl;
7998 if (flags & TDF_GIMPLE)
8000 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8001 dump_flags | TDF_SLIM);
8002 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
8004 else
8005 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
8007 arg = DECL_ARGUMENTS (fndecl);
8008 while (arg)
8010 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8011 fprintf (file, " ");
8012 print_generic_expr (file, arg, dump_flags);
8013 if (DECL_CHAIN (arg))
8014 fprintf (file, ", ");
8015 arg = DECL_CHAIN (arg);
8017 fprintf (file, ")\n");
8019 dsf = DECL_STRUCT_FUNCTION (fndecl);
8020 if (dsf && (flags & TDF_EH))
8021 dump_eh_tree (file, dsf);
8023 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8025 dump_node (fndecl, TDF_SLIM | flags, file);
8026 current_function_decl = old_current_fndecl;
8027 return;
8030 /* When GIMPLE is lowered, the variables are no longer available in
8031 BIND_EXPRs, so display them separately. */
8032 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8034 unsigned ix;
8035 ignore_topmost_bind = true;
8037 fprintf (file, "{\n");
8038 if (gimple_in_ssa_p (fun)
8039 && (flags & TDF_ALIAS))
8041 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8042 arg = DECL_CHAIN (arg))
8044 tree def = ssa_default_def (fun, arg);
8045 if (def)
8046 dump_default_def (file, def, 2, flags);
8049 tree res = DECL_RESULT (fun->decl);
8050 if (res != NULL_TREE
8051 && DECL_BY_REFERENCE (res))
8053 tree def = ssa_default_def (fun, res);
8054 if (def)
8055 dump_default_def (file, def, 2, flags);
8058 tree static_chain = fun->static_chain_decl;
8059 if (static_chain != NULL_TREE)
8061 tree def = ssa_default_def (fun, static_chain);
8062 if (def)
8063 dump_default_def (file, def, 2, flags);
8067 if (!vec_safe_is_empty (fun->local_decls))
8068 FOR_EACH_LOCAL_DECL (fun, ix, var)
8070 print_generic_decl (file, var, flags);
8071 fprintf (file, "\n");
8073 any_var = true;
8076 tree name;
8078 if (gimple_in_ssa_p (cfun))
8079 FOR_EACH_SSA_NAME (ix, name, cfun)
8081 if (!SSA_NAME_VAR (name))
8083 fprintf (file, " ");
8084 print_generic_expr (file, TREE_TYPE (name), flags);
8085 fprintf (file, " ");
8086 print_generic_expr (file, name, flags);
8087 fprintf (file, ";\n");
8089 any_var = true;
8094 if (fun && fun->decl == fndecl
8095 && fun->cfg
8096 && basic_block_info_for_fn (fun))
8098 /* If the CFG has been built, emit a CFG-based dump. */
8099 if (!ignore_topmost_bind)
8100 fprintf (file, "{\n");
8102 if (any_var && n_basic_blocks_for_fn (fun))
8103 fprintf (file, "\n");
8105 FOR_EACH_BB_FN (bb, fun)
8106 dump_bb (file, bb, 2, flags);
8108 fprintf (file, "}\n");
8110 else if (fun->curr_properties & PROP_gimple_any)
8112 /* The function is now in GIMPLE form but the CFG has not been
8113 built yet. Emit the single sequence of GIMPLE statements
8114 that make up its body. */
8115 gimple_seq body = gimple_body (fndecl);
8117 if (gimple_seq_first_stmt (body)
8118 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8119 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8120 print_gimple_seq (file, body, 0, flags);
8121 else
8123 if (!ignore_topmost_bind)
8124 fprintf (file, "{\n");
8126 if (any_var)
8127 fprintf (file, "\n");
8129 print_gimple_seq (file, body, 2, flags);
8130 fprintf (file, "}\n");
8133 else
8135 int indent;
8137 /* Make a tree based dump. */
8138 chain = DECL_SAVED_TREE (fndecl);
8139 if (chain && TREE_CODE (chain) == BIND_EXPR)
8141 if (ignore_topmost_bind)
8143 chain = BIND_EXPR_BODY (chain);
8144 indent = 2;
8146 else
8147 indent = 0;
8149 else
8151 if (!ignore_topmost_bind)
8153 fprintf (file, "{\n");
8154 /* No topmost bind, pretend it's ignored for later. */
8155 ignore_topmost_bind = true;
8157 indent = 2;
8160 if (any_var)
8161 fprintf (file, "\n");
8163 print_generic_stmt_indented (file, chain, flags, indent);
8164 if (ignore_topmost_bind)
8165 fprintf (file, "}\n");
8168 if (flags & TDF_ENUMERATE_LOCALS)
8169 dump_enumerated_decls (file, flags);
8170 fprintf (file, "\n\n");
8172 current_function_decl = old_current_fndecl;
8175 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8177 DEBUG_FUNCTION void
8178 debug_function (tree fn, dump_flags_t flags)
8180 dump_function_to_file (fn, stderr, flags);
8184 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8186 static void
8187 print_pred_bbs (FILE *file, basic_block bb)
8189 edge e;
8190 edge_iterator ei;
8192 FOR_EACH_EDGE (e, ei, bb->preds)
8193 fprintf (file, "bb_%d ", e->src->index);
8197 /* Print on FILE the indexes for the successors of basic_block BB. */
8199 static void
8200 print_succ_bbs (FILE *file, basic_block bb)
8202 edge e;
8203 edge_iterator ei;
8205 FOR_EACH_EDGE (e, ei, bb->succs)
8206 fprintf (file, "bb_%d ", e->dest->index);
8209 /* Print to FILE the basic block BB following the VERBOSITY level. */
8211 void
8212 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8214 char *s_indent = (char *) alloca ((size_t) indent + 1);
8215 memset ((void *) s_indent, ' ', (size_t) indent);
8216 s_indent[indent] = '\0';
8218 /* Print basic_block's header. */
8219 if (verbosity >= 2)
8221 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8222 print_pred_bbs (file, bb);
8223 fprintf (file, "}, succs = {");
8224 print_succ_bbs (file, bb);
8225 fprintf (file, "})\n");
8228 /* Print basic_block's body. */
8229 if (verbosity >= 3)
8231 fprintf (file, "%s {\n", s_indent);
8232 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8233 fprintf (file, "%s }\n", s_indent);
8237 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8239 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8240 VERBOSITY level this outputs the contents of the loop, or just its
8241 structure. */
8243 static void
8244 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8246 char *s_indent;
8247 basic_block bb;
8249 if (loop == NULL)
8250 return;
8252 s_indent = (char *) alloca ((size_t) indent + 1);
8253 memset ((void *) s_indent, ' ', (size_t) indent);
8254 s_indent[indent] = '\0';
8256 /* Print loop's header. */
8257 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8258 if (loop->header)
8259 fprintf (file, "header = %d", loop->header->index);
8260 else
8262 fprintf (file, "deleted)\n");
8263 return;
8265 if (loop->latch)
8266 fprintf (file, ", latch = %d", loop->latch->index);
8267 else
8268 fprintf (file, ", multiple latches");
8269 fprintf (file, ", niter = ");
8270 print_generic_expr (file, loop->nb_iterations);
8272 if (loop->any_upper_bound)
8274 fprintf (file, ", upper_bound = ");
8275 print_decu (loop->nb_iterations_upper_bound, file);
8277 if (loop->any_likely_upper_bound)
8279 fprintf (file, ", likely_upper_bound = ");
8280 print_decu (loop->nb_iterations_likely_upper_bound, file);
8283 if (loop->any_estimate)
8285 fprintf (file, ", estimate = ");
8286 print_decu (loop->nb_iterations_estimate, file);
8288 if (loop->unroll)
8289 fprintf (file, ", unroll = %d", loop->unroll);
8290 fprintf (file, ")\n");
8292 /* Print loop's body. */
8293 if (verbosity >= 1)
8295 fprintf (file, "%s{\n", s_indent);
8296 FOR_EACH_BB_FN (bb, cfun)
8297 if (bb->loop_father == loop)
8298 print_loops_bb (file, bb, indent, verbosity);
8300 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8301 fprintf (file, "%s}\n", s_indent);
8305 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8306 spaces. Following VERBOSITY level this outputs the contents of the
8307 loop, or just its structure. */
8309 static void
8310 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8311 int verbosity)
8313 if (loop == NULL)
8314 return;
8316 print_loop (file, loop, indent, verbosity);
8317 print_loop_and_siblings (file, loop->next, indent, verbosity);
8320 /* Follow a CFG edge from the entry point of the program, and on entry
8321 of a loop, pretty print the loop structure on FILE. */
8323 void
8324 print_loops (FILE *file, int verbosity)
8326 basic_block bb;
8328 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8329 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8330 if (bb && bb->loop_father)
8331 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8334 /* Dump a loop. */
8336 DEBUG_FUNCTION void
8337 debug (struct loop &ref)
8339 print_loop (stderr, &ref, 0, /*verbosity*/0);
8342 DEBUG_FUNCTION void
8343 debug (struct loop *ptr)
8345 if (ptr)
8346 debug (*ptr);
8347 else
8348 fprintf (stderr, "<nil>\n");
8351 /* Dump a loop verbosely. */
8353 DEBUG_FUNCTION void
8354 debug_verbose (struct loop &ref)
8356 print_loop (stderr, &ref, 0, /*verbosity*/3);
8359 DEBUG_FUNCTION void
8360 debug_verbose (struct loop *ptr)
8362 if (ptr)
8363 debug (*ptr);
8364 else
8365 fprintf (stderr, "<nil>\n");
8369 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8371 DEBUG_FUNCTION void
8372 debug_loops (int verbosity)
8374 print_loops (stderr, verbosity);
8377 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8379 DEBUG_FUNCTION void
8380 debug_loop (struct loop *loop, int verbosity)
8382 print_loop (stderr, loop, 0, verbosity);
8385 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8386 level. */
8388 DEBUG_FUNCTION void
8389 debug_loop_num (unsigned num, int verbosity)
8391 debug_loop (get_loop (cfun, num), verbosity);
8394 /* Return true if BB ends with a call, possibly followed by some
8395 instructions that must stay with the call. Return false,
8396 otherwise. */
8398 static bool
8399 gimple_block_ends_with_call_p (basic_block bb)
8401 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8402 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8406 /* Return true if BB ends with a conditional branch. Return false,
8407 otherwise. */
8409 static bool
8410 gimple_block_ends_with_condjump_p (const_basic_block bb)
8412 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8413 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8417 /* Return true if statement T may terminate execution of BB in ways not
8418 explicitly represtented in the CFG. */
8420 bool
8421 stmt_can_terminate_bb_p (gimple *t)
8423 tree fndecl = NULL_TREE;
8424 int call_flags = 0;
8426 /* Eh exception not handled internally terminates execution of the whole
8427 function. */
8428 if (stmt_can_throw_external (t))
8429 return true;
8431 /* NORETURN and LONGJMP calls already have an edge to exit.
8432 CONST and PURE calls do not need one.
8433 We don't currently check for CONST and PURE here, although
8434 it would be a good idea, because those attributes are
8435 figured out from the RTL in mark_constant_function, and
8436 the counter incrementation code from -fprofile-arcs
8437 leads to different results from -fbranch-probabilities. */
8438 if (is_gimple_call (t))
8440 fndecl = gimple_call_fndecl (t);
8441 call_flags = gimple_call_flags (t);
8444 if (is_gimple_call (t)
8445 && fndecl
8446 && DECL_BUILT_IN (fndecl)
8447 && (call_flags & ECF_NOTHROW)
8448 && !(call_flags & ECF_RETURNS_TWICE)
8449 /* fork() doesn't really return twice, but the effect of
8450 wrapping it in __gcov_fork() which calls __gcov_flush()
8451 and clears the counters before forking has the same
8452 effect as returning twice. Force a fake edge. */
8453 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8454 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8455 return false;
8457 if (is_gimple_call (t))
8459 edge_iterator ei;
8460 edge e;
8461 basic_block bb;
8463 if (call_flags & (ECF_PURE | ECF_CONST)
8464 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8465 return false;
8467 /* Function call may do longjmp, terminate program or do other things.
8468 Special case noreturn that have non-abnormal edges out as in this case
8469 the fact is sufficiently represented by lack of edges out of T. */
8470 if (!(call_flags & ECF_NORETURN))
8471 return true;
8473 bb = gimple_bb (t);
8474 FOR_EACH_EDGE (e, ei, bb->succs)
8475 if ((e->flags & EDGE_FAKE) == 0)
8476 return true;
8479 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8480 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8481 return true;
8483 return false;
8487 /* Add fake edges to the function exit for any non constant and non
8488 noreturn calls (or noreturn calls with EH/abnormal edges),
8489 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8490 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8491 that were split.
8493 The goal is to expose cases in which entering a basic block does
8494 not imply that all subsequent instructions must be executed. */
8496 static int
8497 gimple_flow_call_edges_add (sbitmap blocks)
8499 int i;
8500 int blocks_split = 0;
8501 int last_bb = last_basic_block_for_fn (cfun);
8502 bool check_last_block = false;
8504 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8505 return 0;
8507 if (! blocks)
8508 check_last_block = true;
8509 else
8510 check_last_block = bitmap_bit_p (blocks,
8511 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8513 /* In the last basic block, before epilogue generation, there will be
8514 a fallthru edge to EXIT. Special care is required if the last insn
8515 of the last basic block is a call because make_edge folds duplicate
8516 edges, which would result in the fallthru edge also being marked
8517 fake, which would result in the fallthru edge being removed by
8518 remove_fake_edges, which would result in an invalid CFG.
8520 Moreover, we can't elide the outgoing fake edge, since the block
8521 profiler needs to take this into account in order to solve the minimal
8522 spanning tree in the case that the call doesn't return.
8524 Handle this by adding a dummy instruction in a new last basic block. */
8525 if (check_last_block)
8527 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8528 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8529 gimple *t = NULL;
8531 if (!gsi_end_p (gsi))
8532 t = gsi_stmt (gsi);
8534 if (t && stmt_can_terminate_bb_p (t))
8536 edge e;
8538 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8539 if (e)
8541 gsi_insert_on_edge (e, gimple_build_nop ());
8542 gsi_commit_edge_inserts ();
8547 /* Now add fake edges to the function exit for any non constant
8548 calls since there is no way that we can determine if they will
8549 return or not... */
8550 for (i = 0; i < last_bb; i++)
8552 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8553 gimple_stmt_iterator gsi;
8554 gimple *stmt, *last_stmt;
8556 if (!bb)
8557 continue;
8559 if (blocks && !bitmap_bit_p (blocks, i))
8560 continue;
8562 gsi = gsi_last_nondebug_bb (bb);
8563 if (!gsi_end_p (gsi))
8565 last_stmt = gsi_stmt (gsi);
8568 stmt = gsi_stmt (gsi);
8569 if (stmt_can_terminate_bb_p (stmt))
8571 edge e;
8573 /* The handling above of the final block before the
8574 epilogue should be enough to verify that there is
8575 no edge to the exit block in CFG already.
8576 Calling make_edge in such case would cause us to
8577 mark that edge as fake and remove it later. */
8578 if (flag_checking && stmt == last_stmt)
8580 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8581 gcc_assert (e == NULL);
8584 /* Note that the following may create a new basic block
8585 and renumber the existing basic blocks. */
8586 if (stmt != last_stmt)
8588 e = split_block (bb, stmt);
8589 if (e)
8590 blocks_split++;
8592 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8593 e->probability = profile_probability::guessed_never ();
8595 gsi_prev (&gsi);
8597 while (!gsi_end_p (gsi));
8601 if (blocks_split)
8602 checking_verify_flow_info ();
8604 return blocks_split;
8607 /* Removes edge E and all the blocks dominated by it, and updates dominance
8608 information. The IL in E->src needs to be updated separately.
8609 If dominance info is not available, only the edge E is removed.*/
8611 void
8612 remove_edge_and_dominated_blocks (edge e)
8614 vec<basic_block> bbs_to_remove = vNULL;
8615 vec<basic_block> bbs_to_fix_dom = vNULL;
8616 edge f;
8617 edge_iterator ei;
8618 bool none_removed = false;
8619 unsigned i;
8620 basic_block bb, dbb;
8621 bitmap_iterator bi;
8623 /* If we are removing a path inside a non-root loop that may change
8624 loop ownership of blocks or remove loops. Mark loops for fixup. */
8625 if (current_loops
8626 && loop_outer (e->src->loop_father) != NULL
8627 && e->src->loop_father == e->dest->loop_father)
8628 loops_state_set (LOOPS_NEED_FIXUP);
8630 if (!dom_info_available_p (CDI_DOMINATORS))
8632 remove_edge (e);
8633 return;
8636 /* No updating is needed for edges to exit. */
8637 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8639 if (cfgcleanup_altered_bbs)
8640 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8641 remove_edge (e);
8642 return;
8645 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8646 that is not dominated by E->dest, then this set is empty. Otherwise,
8647 all the basic blocks dominated by E->dest are removed.
8649 Also, to DF_IDOM we store the immediate dominators of the blocks in
8650 the dominance frontier of E (i.e., of the successors of the
8651 removed blocks, if there are any, and of E->dest otherwise). */
8652 FOR_EACH_EDGE (f, ei, e->dest->preds)
8654 if (f == e)
8655 continue;
8657 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8659 none_removed = true;
8660 break;
8664 auto_bitmap df, df_idom;
8665 if (none_removed)
8666 bitmap_set_bit (df_idom,
8667 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8668 else
8670 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8671 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8673 FOR_EACH_EDGE (f, ei, bb->succs)
8675 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8676 bitmap_set_bit (df, f->dest->index);
8679 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8680 bitmap_clear_bit (df, bb->index);
8682 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8684 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8685 bitmap_set_bit (df_idom,
8686 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8690 if (cfgcleanup_altered_bbs)
8692 /* Record the set of the altered basic blocks. */
8693 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8694 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8697 /* Remove E and the cancelled blocks. */
8698 if (none_removed)
8699 remove_edge (e);
8700 else
8702 /* Walk backwards so as to get a chance to substitute all
8703 released DEFs into debug stmts. See
8704 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8705 details. */
8706 for (i = bbs_to_remove.length (); i-- > 0; )
8707 delete_basic_block (bbs_to_remove[i]);
8710 /* Update the dominance information. The immediate dominator may change only
8711 for blocks whose immediate dominator belongs to DF_IDOM:
8713 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8714 removal. Let Z the arbitrary block such that idom(Z) = Y and
8715 Z dominates X after the removal. Before removal, there exists a path P
8716 from Y to X that avoids Z. Let F be the last edge on P that is
8717 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8718 dominates W, and because of P, Z does not dominate W), and W belongs to
8719 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8720 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8722 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8723 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8724 dbb;
8725 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8726 bbs_to_fix_dom.safe_push (dbb);
8729 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8731 bbs_to_remove.release ();
8732 bbs_to_fix_dom.release ();
8735 /* Purge dead EH edges from basic block BB. */
8737 bool
8738 gimple_purge_dead_eh_edges (basic_block bb)
8740 bool changed = false;
8741 edge e;
8742 edge_iterator ei;
8743 gimple *stmt = last_stmt (bb);
8745 if (stmt && stmt_can_throw_internal (stmt))
8746 return false;
8748 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8750 if (e->flags & EDGE_EH)
8752 remove_edge_and_dominated_blocks (e);
8753 changed = true;
8755 else
8756 ei_next (&ei);
8759 return changed;
8762 /* Purge dead EH edges from basic block listed in BLOCKS. */
8764 bool
8765 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8767 bool changed = false;
8768 unsigned i;
8769 bitmap_iterator bi;
8771 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8773 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8775 /* Earlier gimple_purge_dead_eh_edges could have removed
8776 this basic block already. */
8777 gcc_assert (bb || changed);
8778 if (bb != NULL)
8779 changed |= gimple_purge_dead_eh_edges (bb);
8782 return changed;
8785 /* Purge dead abnormal call edges from basic block BB. */
8787 bool
8788 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8790 bool changed = false;
8791 edge e;
8792 edge_iterator ei;
8793 gimple *stmt = last_stmt (bb);
8795 if (!cfun->has_nonlocal_label
8796 && !cfun->calls_setjmp)
8797 return false;
8799 if (stmt && stmt_can_make_abnormal_goto (stmt))
8800 return false;
8802 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8804 if (e->flags & EDGE_ABNORMAL)
8806 if (e->flags & EDGE_FALLTHRU)
8807 e->flags &= ~EDGE_ABNORMAL;
8808 else
8809 remove_edge_and_dominated_blocks (e);
8810 changed = true;
8812 else
8813 ei_next (&ei);
8816 return changed;
8819 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8821 bool
8822 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8824 bool changed = false;
8825 unsigned i;
8826 bitmap_iterator bi;
8828 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8830 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8832 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8833 this basic block already. */
8834 gcc_assert (bb || changed);
8835 if (bb != NULL)
8836 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8839 return changed;
8842 /* This function is called whenever a new edge is created or
8843 redirected. */
8845 static void
8846 gimple_execute_on_growing_pred (edge e)
8848 basic_block bb = e->dest;
8850 if (!gimple_seq_empty_p (phi_nodes (bb)))
8851 reserve_phi_args_for_new_edge (bb);
8854 /* This function is called immediately before edge E is removed from
8855 the edge vector E->dest->preds. */
8857 static void
8858 gimple_execute_on_shrinking_pred (edge e)
8860 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8861 remove_phi_args (e);
8864 /*---------------------------------------------------------------------------
8865 Helper functions for Loop versioning
8866 ---------------------------------------------------------------------------*/
8868 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8869 of 'first'. Both of them are dominated by 'new_head' basic block. When
8870 'new_head' was created by 'second's incoming edge it received phi arguments
8871 on the edge by split_edge(). Later, additional edge 'e' was created to
8872 connect 'new_head' and 'first'. Now this routine adds phi args on this
8873 additional edge 'e' that new_head to second edge received as part of edge
8874 splitting. */
8876 static void
8877 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8878 basic_block new_head, edge e)
8880 gphi *phi1, *phi2;
8881 gphi_iterator psi1, psi2;
8882 tree def;
8883 edge e2 = find_edge (new_head, second);
8885 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8886 edge, we should always have an edge from NEW_HEAD to SECOND. */
8887 gcc_assert (e2 != NULL);
8889 /* Browse all 'second' basic block phi nodes and add phi args to
8890 edge 'e' for 'first' head. PHI args are always in correct order. */
8892 for (psi2 = gsi_start_phis (second),
8893 psi1 = gsi_start_phis (first);
8894 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8895 gsi_next (&psi2), gsi_next (&psi1))
8897 phi1 = psi1.phi ();
8898 phi2 = psi2.phi ();
8899 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8900 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8905 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8906 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8907 the destination of the ELSE part. */
8909 static void
8910 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8911 basic_block second_head ATTRIBUTE_UNUSED,
8912 basic_block cond_bb, void *cond_e)
8914 gimple_stmt_iterator gsi;
8915 gimple *new_cond_expr;
8916 tree cond_expr = (tree) cond_e;
8917 edge e0;
8919 /* Build new conditional expr */
8920 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8921 NULL_TREE, NULL_TREE);
8923 /* Add new cond in cond_bb. */
8924 gsi = gsi_last_bb (cond_bb);
8925 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8927 /* Adjust edges appropriately to connect new head with first head
8928 as well as second head. */
8929 e0 = single_succ_edge (cond_bb);
8930 e0->flags &= ~EDGE_FALLTHRU;
8931 e0->flags |= EDGE_FALSE_VALUE;
8935 /* Do book-keeping of basic block BB for the profile consistency checker.
8936 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8937 then do post-pass accounting. Store the counting in RECORD. */
8938 static void
8939 gimple_account_profile_record (basic_block bb, int after_pass,
8940 struct profile_record *record)
8942 gimple_stmt_iterator i;
8943 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8945 record->size[after_pass]
8946 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8947 if (bb->count.initialized_p ())
8948 record->time[after_pass]
8949 += estimate_num_insns (gsi_stmt (i),
8950 &eni_time_weights) * bb->count.to_gcov_type ();
8951 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8952 record->time[after_pass]
8953 += estimate_num_insns (gsi_stmt (i),
8954 &eni_time_weights) * bb->count.to_frequency (cfun);
8958 struct cfg_hooks gimple_cfg_hooks = {
8959 "gimple",
8960 gimple_verify_flow_info,
8961 gimple_dump_bb, /* dump_bb */
8962 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8963 create_bb, /* create_basic_block */
8964 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8965 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8966 gimple_can_remove_branch_p, /* can_remove_branch_p */
8967 remove_bb, /* delete_basic_block */
8968 gimple_split_block, /* split_block */
8969 gimple_move_block_after, /* move_block_after */
8970 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8971 gimple_merge_blocks, /* merge_blocks */
8972 gimple_predict_edge, /* predict_edge */
8973 gimple_predicted_by_p, /* predicted_by_p */
8974 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8975 gimple_duplicate_bb, /* duplicate_block */
8976 gimple_split_edge, /* split_edge */
8977 gimple_make_forwarder_block, /* make_forward_block */
8978 NULL, /* tidy_fallthru_edge */
8979 NULL, /* force_nonfallthru */
8980 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8981 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8982 gimple_flow_call_edges_add, /* flow_call_edges_add */
8983 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8984 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8985 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8986 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8987 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8988 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8989 flush_pending_stmts, /* flush_pending_stmts */
8990 gimple_empty_block_p, /* block_empty_p */
8991 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8992 gimple_account_profile_record,
8996 /* Split all critical edges. */
8998 unsigned int
8999 split_critical_edges (void)
9001 basic_block bb;
9002 edge e;
9003 edge_iterator ei;
9005 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9006 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9007 mappings around the calls to split_edge. */
9008 start_recording_case_labels ();
9009 FOR_ALL_BB_FN (bb, cfun)
9011 FOR_EACH_EDGE (e, ei, bb->succs)
9013 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9014 split_edge (e);
9015 /* PRE inserts statements to edges and expects that
9016 since split_critical_edges was done beforehand, committing edge
9017 insertions will not split more edges. In addition to critical
9018 edges we must split edges that have multiple successors and
9019 end by control flow statements, such as RESX.
9020 Go ahead and split them too. This matches the logic in
9021 gimple_find_edge_insert_loc. */
9022 else if ((!single_pred_p (e->dest)
9023 || !gimple_seq_empty_p (phi_nodes (e->dest))
9024 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9025 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9026 && !(e->flags & EDGE_ABNORMAL))
9028 gimple_stmt_iterator gsi;
9030 gsi = gsi_last_bb (e->src);
9031 if (!gsi_end_p (gsi)
9032 && stmt_ends_bb_p (gsi_stmt (gsi))
9033 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9034 && !gimple_call_builtin_p (gsi_stmt (gsi),
9035 BUILT_IN_RETURN)))
9036 split_edge (e);
9040 end_recording_case_labels ();
9041 return 0;
9044 namespace {
9046 const pass_data pass_data_split_crit_edges =
9048 GIMPLE_PASS, /* type */
9049 "crited", /* name */
9050 OPTGROUP_NONE, /* optinfo_flags */
9051 TV_TREE_SPLIT_EDGES, /* tv_id */
9052 PROP_cfg, /* properties_required */
9053 PROP_no_crit_edges, /* properties_provided */
9054 0, /* properties_destroyed */
9055 0, /* todo_flags_start */
9056 0, /* todo_flags_finish */
9059 class pass_split_crit_edges : public gimple_opt_pass
9061 public:
9062 pass_split_crit_edges (gcc::context *ctxt)
9063 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9066 /* opt_pass methods: */
9067 virtual unsigned int execute (function *) { return split_critical_edges (); }
9069 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9070 }; // class pass_split_crit_edges
9072 } // anon namespace
9074 gimple_opt_pass *
9075 make_pass_split_crit_edges (gcc::context *ctxt)
9077 return new pass_split_crit_edges (ctxt);
9081 /* Insert COND expression which is GIMPLE_COND after STMT
9082 in basic block BB with appropriate basic block split
9083 and creation of a new conditionally executed basic block.
9084 Update profile so the new bb is visited with probability PROB.
9085 Return created basic block. */
9086 basic_block
9087 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9088 profile_probability prob)
9090 edge fall = split_block (bb, stmt);
9091 gimple_stmt_iterator iter = gsi_last_bb (bb);
9092 basic_block new_bb;
9094 /* Insert cond statement. */
9095 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9096 if (gsi_end_p (iter))
9097 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9098 else
9099 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9101 /* Create conditionally executed block. */
9102 new_bb = create_empty_bb (bb);
9103 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9104 e->probability = prob;
9105 new_bb->count = e->count ();
9106 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9108 /* Fix edge for split bb. */
9109 fall->flags = EDGE_FALSE_VALUE;
9110 fall->probability -= e->probability;
9112 /* Update dominance info. */
9113 if (dom_info_available_p (CDI_DOMINATORS))
9115 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9116 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9119 /* Update loop info. */
9120 if (current_loops)
9121 add_bb_to_loop (new_bb, bb->loop_father);
9123 return new_bb;
9126 /* Build a ternary operation and gimplify it. Emit code before GSI.
9127 Return the gimple_val holding the result. */
9129 tree
9130 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9131 tree type, tree a, tree b, tree c)
9133 tree ret;
9134 location_t loc = gimple_location (gsi_stmt (*gsi));
9136 ret = fold_build3_loc (loc, code, type, a, b, c);
9137 STRIP_NOPS (ret);
9139 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9140 GSI_SAME_STMT);
9143 /* Build a binary operation and gimplify it. Emit code before GSI.
9144 Return the gimple_val holding the result. */
9146 tree
9147 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9148 tree type, tree a, tree b)
9150 tree ret;
9152 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9153 STRIP_NOPS (ret);
9155 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9156 GSI_SAME_STMT);
9159 /* Build a unary operation and gimplify it. Emit code before GSI.
9160 Return the gimple_val holding the result. */
9162 tree
9163 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9164 tree a)
9166 tree ret;
9168 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9169 STRIP_NOPS (ret);
9171 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9172 GSI_SAME_STMT);
9177 /* Given a basic block B which ends with a conditional and has
9178 precisely two successors, determine which of the edges is taken if
9179 the conditional is true and which is taken if the conditional is
9180 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9182 void
9183 extract_true_false_edges_from_block (basic_block b,
9184 edge *true_edge,
9185 edge *false_edge)
9187 edge e = EDGE_SUCC (b, 0);
9189 if (e->flags & EDGE_TRUE_VALUE)
9191 *true_edge = e;
9192 *false_edge = EDGE_SUCC (b, 1);
9194 else
9196 *false_edge = e;
9197 *true_edge = EDGE_SUCC (b, 1);
9202 /* From a controlling predicate in the immediate dominator DOM of
9203 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9204 predicate evaluates to true and false and store them to
9205 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9206 they are non-NULL. Returns true if the edges can be determined,
9207 else return false. */
9209 bool
9210 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9211 edge *true_controlled_edge,
9212 edge *false_controlled_edge)
9214 basic_block bb = phiblock;
9215 edge true_edge, false_edge, tem;
9216 edge e0 = NULL, e1 = NULL;
9218 /* We have to verify that one edge into the PHI node is dominated
9219 by the true edge of the predicate block and the other edge
9220 dominated by the false edge. This ensures that the PHI argument
9221 we are going to take is completely determined by the path we
9222 take from the predicate block.
9223 We can only use BB dominance checks below if the destination of
9224 the true/false edges are dominated by their edge, thus only
9225 have a single predecessor. */
9226 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9227 tem = EDGE_PRED (bb, 0);
9228 if (tem == true_edge
9229 || (single_pred_p (true_edge->dest)
9230 && (tem->src == true_edge->dest
9231 || dominated_by_p (CDI_DOMINATORS,
9232 tem->src, true_edge->dest))))
9233 e0 = tem;
9234 else if (tem == false_edge
9235 || (single_pred_p (false_edge->dest)
9236 && (tem->src == false_edge->dest
9237 || dominated_by_p (CDI_DOMINATORS,
9238 tem->src, false_edge->dest))))
9239 e1 = tem;
9240 else
9241 return false;
9242 tem = EDGE_PRED (bb, 1);
9243 if (tem == true_edge
9244 || (single_pred_p (true_edge->dest)
9245 && (tem->src == true_edge->dest
9246 || dominated_by_p (CDI_DOMINATORS,
9247 tem->src, true_edge->dest))))
9248 e0 = tem;
9249 else if (tem == false_edge
9250 || (single_pred_p (false_edge->dest)
9251 && (tem->src == false_edge->dest
9252 || dominated_by_p (CDI_DOMINATORS,
9253 tem->src, false_edge->dest))))
9254 e1 = tem;
9255 else
9256 return false;
9257 if (!e0 || !e1)
9258 return false;
9260 if (true_controlled_edge)
9261 *true_controlled_edge = e0;
9262 if (false_controlled_edge)
9263 *false_controlled_edge = e1;
9265 return true;
9268 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9269 range [low, high]. Place associated stmts before *GSI. */
9271 void
9272 generate_range_test (basic_block bb, tree index, tree low, tree high,
9273 tree *lhs, tree *rhs)
9275 tree type = TREE_TYPE (index);
9276 tree utype = unsigned_type_for (type);
9278 low = fold_convert (type, low);
9279 high = fold_convert (type, high);
9281 tree tmp = make_ssa_name (type);
9282 gassign *sub1
9283 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
9285 *lhs = make_ssa_name (utype);
9286 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
9288 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
9289 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9290 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
9291 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
9294 /* Emit return warnings. */
9296 namespace {
9298 const pass_data pass_data_warn_function_return =
9300 GIMPLE_PASS, /* type */
9301 "*warn_function_return", /* name */
9302 OPTGROUP_NONE, /* optinfo_flags */
9303 TV_NONE, /* tv_id */
9304 PROP_cfg, /* properties_required */
9305 0, /* properties_provided */
9306 0, /* properties_destroyed */
9307 0, /* todo_flags_start */
9308 0, /* todo_flags_finish */
9311 class pass_warn_function_return : public gimple_opt_pass
9313 public:
9314 pass_warn_function_return (gcc::context *ctxt)
9315 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9318 /* opt_pass methods: */
9319 virtual unsigned int execute (function *);
9321 }; // class pass_warn_function_return
9323 unsigned int
9324 pass_warn_function_return::execute (function *fun)
9326 source_location location;
9327 gimple *last;
9328 edge e;
9329 edge_iterator ei;
9331 if (!targetm.warn_func_return (fun->decl))
9332 return 0;
9334 /* If we have a path to EXIT, then we do return. */
9335 if (TREE_THIS_VOLATILE (fun->decl)
9336 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9338 location = UNKNOWN_LOCATION;
9339 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9340 (e = ei_safe_edge (ei)); )
9342 last = last_stmt (e->src);
9343 if ((gimple_code (last) == GIMPLE_RETURN
9344 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9345 && location == UNKNOWN_LOCATION
9346 && ((location = LOCATION_LOCUS (gimple_location (last)))
9347 != UNKNOWN_LOCATION)
9348 && !optimize)
9349 break;
9350 /* When optimizing, replace return stmts in noreturn functions
9351 with __builtin_unreachable () call. */
9352 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9354 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9355 gimple *new_stmt = gimple_build_call (fndecl, 0);
9356 gimple_set_location (new_stmt, gimple_location (last));
9357 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9358 gsi_replace (&gsi, new_stmt, true);
9359 remove_edge (e);
9361 else
9362 ei_next (&ei);
9364 if (location == UNKNOWN_LOCATION)
9365 location = cfun->function_end_locus;
9366 warning_at (location, 0, "%<noreturn%> function does return");
9369 /* If we see "return;" in some basic block, then we do reach the end
9370 without returning a value. */
9371 else if (warn_return_type > 0
9372 && !TREE_NO_WARNING (fun->decl)
9373 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9375 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9377 gimple *last = last_stmt (e->src);
9378 greturn *return_stmt = dyn_cast <greturn *> (last);
9379 if (return_stmt
9380 && gimple_return_retval (return_stmt) == NULL
9381 && !gimple_no_warning_p (last))
9383 location = gimple_location (last);
9384 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9385 location = fun->function_end_locus;
9386 warning_at (location, OPT_Wreturn_type,
9387 "control reaches end of non-void function");
9388 TREE_NO_WARNING (fun->decl) = 1;
9389 break;
9392 /* The C++ FE turns fallthrough from the end of non-void function
9393 into __builtin_unreachable () call with BUILTINS_LOCATION.
9394 Recognize those too. */
9395 basic_block bb;
9396 if (!TREE_NO_WARNING (fun->decl))
9397 FOR_EACH_BB_FN (bb, fun)
9398 if (EDGE_COUNT (bb->succs) == 0)
9400 gimple *last = last_stmt (bb);
9401 const enum built_in_function ubsan_missing_ret
9402 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9403 if (last
9404 && ((LOCATION_LOCUS (gimple_location (last))
9405 == BUILTINS_LOCATION
9406 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9407 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9409 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9410 gsi_prev_nondebug (&gsi);
9411 gimple *prev = gsi_stmt (gsi);
9412 if (prev == NULL)
9413 location = UNKNOWN_LOCATION;
9414 else
9415 location = gimple_location (prev);
9416 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9417 location = fun->function_end_locus;
9418 warning_at (location, OPT_Wreturn_type,
9419 "control reaches end of non-void function");
9420 TREE_NO_WARNING (fun->decl) = 1;
9421 break;
9425 return 0;
9428 } // anon namespace
9430 gimple_opt_pass *
9431 make_pass_warn_function_return (gcc::context *ctxt)
9433 return new pass_warn_function_return (ctxt);
9436 /* Walk a gimplified function and warn for functions whose return value is
9437 ignored and attribute((warn_unused_result)) is set. This is done before
9438 inlining, so we don't have to worry about that. */
9440 static void
9441 do_warn_unused_result (gimple_seq seq)
9443 tree fdecl, ftype;
9444 gimple_stmt_iterator i;
9446 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9448 gimple *g = gsi_stmt (i);
9450 switch (gimple_code (g))
9452 case GIMPLE_BIND:
9453 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9454 break;
9455 case GIMPLE_TRY:
9456 do_warn_unused_result (gimple_try_eval (g));
9457 do_warn_unused_result (gimple_try_cleanup (g));
9458 break;
9459 case GIMPLE_CATCH:
9460 do_warn_unused_result (gimple_catch_handler (
9461 as_a <gcatch *> (g)));
9462 break;
9463 case GIMPLE_EH_FILTER:
9464 do_warn_unused_result (gimple_eh_filter_failure (g));
9465 break;
9467 case GIMPLE_CALL:
9468 if (gimple_call_lhs (g))
9469 break;
9470 if (gimple_call_internal_p (g))
9471 break;
9473 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9474 LHS. All calls whose value is ignored should be
9475 represented like this. Look for the attribute. */
9476 fdecl = gimple_call_fndecl (g);
9477 ftype = gimple_call_fntype (g);
9479 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9481 location_t loc = gimple_location (g);
9483 if (fdecl)
9484 warning_at (loc, OPT_Wunused_result,
9485 "ignoring return value of %qD, "
9486 "declared with attribute warn_unused_result",
9487 fdecl);
9488 else
9489 warning_at (loc, OPT_Wunused_result,
9490 "ignoring return value of function "
9491 "declared with attribute warn_unused_result");
9493 break;
9495 default:
9496 /* Not a container, not a call, or a call whose value is used. */
9497 break;
9502 namespace {
9504 const pass_data pass_data_warn_unused_result =
9506 GIMPLE_PASS, /* type */
9507 "*warn_unused_result", /* name */
9508 OPTGROUP_NONE, /* optinfo_flags */
9509 TV_NONE, /* tv_id */
9510 PROP_gimple_any, /* properties_required */
9511 0, /* properties_provided */
9512 0, /* properties_destroyed */
9513 0, /* todo_flags_start */
9514 0, /* todo_flags_finish */
9517 class pass_warn_unused_result : public gimple_opt_pass
9519 public:
9520 pass_warn_unused_result (gcc::context *ctxt)
9521 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9524 /* opt_pass methods: */
9525 virtual bool gate (function *) { return flag_warn_unused_result; }
9526 virtual unsigned int execute (function *)
9528 do_warn_unused_result (gimple_body (current_function_decl));
9529 return 0;
9532 }; // class pass_warn_unused_result
9534 } // anon namespace
9536 gimple_opt_pass *
9537 make_pass_warn_unused_result (gcc::context *ctxt)
9539 return new pass_warn_unused_result (ctxt);
9542 /* IPA passes, compilation of earlier functions or inlining
9543 might have changed some properties, such as marked functions nothrow,
9544 pure, const or noreturn.
9545 Remove redundant edges and basic blocks, and create new ones if necessary.
9547 This pass can't be executed as stand alone pass from pass manager, because
9548 in between inlining and this fixup the verify_flow_info would fail. */
9550 unsigned int
9551 execute_fixup_cfg (void)
9553 basic_block bb;
9554 gimple_stmt_iterator gsi;
9555 int todo = 0;
9556 cgraph_node *node = cgraph_node::get (current_function_decl);
9557 profile_count num = node->count;
9558 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9559 bool scale = num.initialized_p () && !(num == den);
9561 if (scale)
9563 profile_count::adjust_for_ipa_scaling (&num, &den);
9564 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9565 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9566 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9569 FOR_EACH_BB_FN (bb, cfun)
9571 if (scale)
9572 bb->count = bb->count.apply_scale (num, den);
9573 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9575 gimple *stmt = gsi_stmt (gsi);
9576 tree decl = is_gimple_call (stmt)
9577 ? gimple_call_fndecl (stmt)
9578 : NULL;
9579 if (decl)
9581 int flags = gimple_call_flags (stmt);
9582 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9584 if (gimple_purge_dead_abnormal_call_edges (bb))
9585 todo |= TODO_cleanup_cfg;
9587 if (gimple_in_ssa_p (cfun))
9589 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9590 update_stmt (stmt);
9594 if (flags & ECF_NORETURN
9595 && fixup_noreturn_call (stmt))
9596 todo |= TODO_cleanup_cfg;
9599 /* Remove stores to variables we marked write-only.
9600 Keep access when store has side effect, i.e. in case when source
9601 is volatile. */
9602 if (gimple_store_p (stmt)
9603 && !gimple_has_side_effects (stmt))
9605 tree lhs = get_base_address (gimple_get_lhs (stmt));
9607 if (VAR_P (lhs)
9608 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9609 && varpool_node::get (lhs)->writeonly)
9611 unlink_stmt_vdef (stmt);
9612 gsi_remove (&gsi, true);
9613 release_defs (stmt);
9614 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9615 continue;
9618 /* For calls we can simply remove LHS when it is known
9619 to be write-only. */
9620 if (is_gimple_call (stmt)
9621 && gimple_get_lhs (stmt))
9623 tree lhs = get_base_address (gimple_get_lhs (stmt));
9625 if (VAR_P (lhs)
9626 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9627 && varpool_node::get (lhs)->writeonly)
9629 gimple_call_set_lhs (stmt, NULL);
9630 update_stmt (stmt);
9631 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9635 if (maybe_clean_eh_stmt (stmt)
9636 && gimple_purge_dead_eh_edges (bb))
9637 todo |= TODO_cleanup_cfg;
9638 gsi_next (&gsi);
9641 /* If we have a basic block with no successors that does not
9642 end with a control statement or a noreturn call end it with
9643 a call to __builtin_unreachable. This situation can occur
9644 when inlining a noreturn call that does in fact return. */
9645 if (EDGE_COUNT (bb->succs) == 0)
9647 gimple *stmt = last_stmt (bb);
9648 if (!stmt
9649 || (!is_ctrl_stmt (stmt)
9650 && (!is_gimple_call (stmt)
9651 || !gimple_call_noreturn_p (stmt))))
9653 if (stmt && is_gimple_call (stmt))
9654 gimple_call_set_ctrl_altering (stmt, false);
9655 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9656 stmt = gimple_build_call (fndecl, 0);
9657 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9658 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9659 if (!cfun->after_inlining)
9661 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9662 node->create_edge (cgraph_node::get_create (fndecl),
9663 call_stmt, bb->count);
9668 if (scale)
9669 compute_function_frequency ();
9671 if (current_loops
9672 && (todo & TODO_cleanup_cfg))
9673 loops_state_set (LOOPS_NEED_FIXUP);
9675 return todo;
9678 namespace {
9680 const pass_data pass_data_fixup_cfg =
9682 GIMPLE_PASS, /* type */
9683 "fixup_cfg", /* name */
9684 OPTGROUP_NONE, /* optinfo_flags */
9685 TV_NONE, /* tv_id */
9686 PROP_cfg, /* properties_required */
9687 0, /* properties_provided */
9688 0, /* properties_destroyed */
9689 0, /* todo_flags_start */
9690 0, /* todo_flags_finish */
9693 class pass_fixup_cfg : public gimple_opt_pass
9695 public:
9696 pass_fixup_cfg (gcc::context *ctxt)
9697 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9700 /* opt_pass methods: */
9701 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9702 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9704 }; // class pass_fixup_cfg
9706 } // anon namespace
9708 gimple_opt_pass *
9709 make_pass_fixup_cfg (gcc::context *ctxt)
9711 return new pass_fixup_cfg (ctxt);
9714 /* Garbage collection support for edge_def. */
9716 extern void gt_ggc_mx (tree&);
9717 extern void gt_ggc_mx (gimple *&);
9718 extern void gt_ggc_mx (rtx&);
9719 extern void gt_ggc_mx (basic_block&);
9721 static void
9722 gt_ggc_mx (rtx_insn *& x)
9724 if (x)
9725 gt_ggc_mx_rtx_def ((void *) x);
9728 void
9729 gt_ggc_mx (edge_def *e)
9731 tree block = LOCATION_BLOCK (e->goto_locus);
9732 gt_ggc_mx (e->src);
9733 gt_ggc_mx (e->dest);
9734 if (current_ir_type () == IR_GIMPLE)
9735 gt_ggc_mx (e->insns.g);
9736 else
9737 gt_ggc_mx (e->insns.r);
9738 gt_ggc_mx (block);
9741 /* PCH support for edge_def. */
9743 extern void gt_pch_nx (tree&);
9744 extern void gt_pch_nx (gimple *&);
9745 extern void gt_pch_nx (rtx&);
9746 extern void gt_pch_nx (basic_block&);
9748 static void
9749 gt_pch_nx (rtx_insn *& x)
9751 if (x)
9752 gt_pch_nx_rtx_def ((void *) x);
9755 void
9756 gt_pch_nx (edge_def *e)
9758 tree block = LOCATION_BLOCK (e->goto_locus);
9759 gt_pch_nx (e->src);
9760 gt_pch_nx (e->dest);
9761 if (current_ir_type () == IR_GIMPLE)
9762 gt_pch_nx (e->insns.g);
9763 else
9764 gt_pch_nx (e->insns.r);
9765 gt_pch_nx (block);
9768 void
9769 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9771 tree block = LOCATION_BLOCK (e->goto_locus);
9772 op (&(e->src), cookie);
9773 op (&(e->dest), cookie);
9774 if (current_ir_type () == IR_GIMPLE)
9775 op (&(e->insns.g), cookie);
9776 else
9777 op (&(e->insns.r), cookie);
9778 op (&(block), cookie);
9781 #if CHECKING_P
9783 namespace selftest {
9785 /* Helper function for CFG selftests: create a dummy function decl
9786 and push it as cfun. */
9788 static tree
9789 push_fndecl (const char *name)
9791 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9792 /* FIXME: this uses input_location: */
9793 tree fndecl = build_fn_decl (name, fn_type);
9794 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9795 NULL_TREE, integer_type_node);
9796 DECL_RESULT (fndecl) = retval;
9797 push_struct_function (fndecl);
9798 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9799 ASSERT_TRUE (fun != NULL);
9800 init_empty_tree_cfg_for_function (fun);
9801 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9802 ASSERT_EQ (0, n_edges_for_fn (fun));
9803 return fndecl;
9806 /* These tests directly create CFGs.
9807 Compare with the static fns within tree-cfg.c:
9808 - build_gimple_cfg
9809 - make_blocks: calls create_basic_block (seq, bb);
9810 - make_edges. */
9812 /* Verify a simple cfg of the form:
9813 ENTRY -> A -> B -> C -> EXIT. */
9815 static void
9816 test_linear_chain ()
9818 gimple_register_cfg_hooks ();
9820 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9821 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9823 /* Create some empty blocks. */
9824 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9825 basic_block bb_b = create_empty_bb (bb_a);
9826 basic_block bb_c = create_empty_bb (bb_b);
9828 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9829 ASSERT_EQ (0, n_edges_for_fn (fun));
9831 /* Create some edges: a simple linear chain of BBs. */
9832 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9833 make_edge (bb_a, bb_b, 0);
9834 make_edge (bb_b, bb_c, 0);
9835 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9837 /* Verify the edges. */
9838 ASSERT_EQ (4, n_edges_for_fn (fun));
9839 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9840 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9841 ASSERT_EQ (1, bb_a->preds->length ());
9842 ASSERT_EQ (1, bb_a->succs->length ());
9843 ASSERT_EQ (1, bb_b->preds->length ());
9844 ASSERT_EQ (1, bb_b->succs->length ());
9845 ASSERT_EQ (1, bb_c->preds->length ());
9846 ASSERT_EQ (1, bb_c->succs->length ());
9847 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9848 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9850 /* Verify the dominance information
9851 Each BB in our simple chain should be dominated by the one before
9852 it. */
9853 calculate_dominance_info (CDI_DOMINATORS);
9854 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9855 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9856 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9857 ASSERT_EQ (1, dom_by_b.length ());
9858 ASSERT_EQ (bb_c, dom_by_b[0]);
9859 free_dominance_info (CDI_DOMINATORS);
9860 dom_by_b.release ();
9862 /* Similarly for post-dominance: each BB in our chain is post-dominated
9863 by the one after it. */
9864 calculate_dominance_info (CDI_POST_DOMINATORS);
9865 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9866 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9867 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9868 ASSERT_EQ (1, postdom_by_b.length ());
9869 ASSERT_EQ (bb_a, postdom_by_b[0]);
9870 free_dominance_info (CDI_POST_DOMINATORS);
9871 postdom_by_b.release ();
9873 pop_cfun ();
9876 /* Verify a simple CFG of the form:
9877 ENTRY
9881 /t \f
9887 EXIT. */
9889 static void
9890 test_diamond ()
9892 gimple_register_cfg_hooks ();
9894 tree fndecl = push_fndecl ("cfg_test_diamond");
9895 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9897 /* Create some empty blocks. */
9898 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9899 basic_block bb_b = create_empty_bb (bb_a);
9900 basic_block bb_c = create_empty_bb (bb_a);
9901 basic_block bb_d = create_empty_bb (bb_b);
9903 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9904 ASSERT_EQ (0, n_edges_for_fn (fun));
9906 /* Create the edges. */
9907 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9908 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9909 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9910 make_edge (bb_b, bb_d, 0);
9911 make_edge (bb_c, bb_d, 0);
9912 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9914 /* Verify the edges. */
9915 ASSERT_EQ (6, n_edges_for_fn (fun));
9916 ASSERT_EQ (1, bb_a->preds->length ());
9917 ASSERT_EQ (2, bb_a->succs->length ());
9918 ASSERT_EQ (1, bb_b->preds->length ());
9919 ASSERT_EQ (1, bb_b->succs->length ());
9920 ASSERT_EQ (1, bb_c->preds->length ());
9921 ASSERT_EQ (1, bb_c->succs->length ());
9922 ASSERT_EQ (2, bb_d->preds->length ());
9923 ASSERT_EQ (1, bb_d->succs->length ());
9925 /* Verify the dominance information. */
9926 calculate_dominance_info (CDI_DOMINATORS);
9927 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9928 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9929 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9930 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9931 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9932 dom_by_a.release ();
9933 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9934 ASSERT_EQ (0, dom_by_b.length ());
9935 dom_by_b.release ();
9936 free_dominance_info (CDI_DOMINATORS);
9938 /* Similarly for post-dominance. */
9939 calculate_dominance_info (CDI_POST_DOMINATORS);
9940 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9941 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9942 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9943 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9944 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9945 postdom_by_d.release ();
9946 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9947 ASSERT_EQ (0, postdom_by_b.length ());
9948 postdom_by_b.release ();
9949 free_dominance_info (CDI_POST_DOMINATORS);
9951 pop_cfun ();
9954 /* Verify that we can handle a CFG containing a "complete" aka
9955 fully-connected subgraph (where A B C D below all have edges
9956 pointing to each other node, also to themselves).
9957 e.g.:
9958 ENTRY EXIT
9964 A<--->B
9965 ^^ ^^
9966 | \ / |
9967 | X |
9968 | / \ |
9969 VV VV
9970 C<--->D
9973 static void
9974 test_fully_connected ()
9976 gimple_register_cfg_hooks ();
9978 tree fndecl = push_fndecl ("cfg_fully_connected");
9979 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9981 const int n = 4;
9983 /* Create some empty blocks. */
9984 auto_vec <basic_block> subgraph_nodes;
9985 for (int i = 0; i < n; i++)
9986 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9988 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9989 ASSERT_EQ (0, n_edges_for_fn (fun));
9991 /* Create the edges. */
9992 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9993 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9994 for (int i = 0; i < n; i++)
9995 for (int j = 0; j < n; j++)
9996 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9998 /* Verify the edges. */
9999 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10000 /* The first one is linked to ENTRY/EXIT as well as itself and
10001 everything else. */
10002 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10003 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10004 /* The other ones in the subgraph are linked to everything in
10005 the subgraph (including themselves). */
10006 for (int i = 1; i < n; i++)
10008 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10009 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10012 /* Verify the dominance information. */
10013 calculate_dominance_info (CDI_DOMINATORS);
10014 /* The initial block in the subgraph should be dominated by ENTRY. */
10015 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10016 get_immediate_dominator (CDI_DOMINATORS,
10017 subgraph_nodes[0]));
10018 /* Every other block in the subgraph should be dominated by the
10019 initial block. */
10020 for (int i = 1; i < n; i++)
10021 ASSERT_EQ (subgraph_nodes[0],
10022 get_immediate_dominator (CDI_DOMINATORS,
10023 subgraph_nodes[i]));
10024 free_dominance_info (CDI_DOMINATORS);
10026 /* Similarly for post-dominance. */
10027 calculate_dominance_info (CDI_POST_DOMINATORS);
10028 /* The initial block in the subgraph should be postdominated by EXIT. */
10029 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10030 get_immediate_dominator (CDI_POST_DOMINATORS,
10031 subgraph_nodes[0]));
10032 /* Every other block in the subgraph should be postdominated by the
10033 initial block, since that leads to EXIT. */
10034 for (int i = 1; i < n; i++)
10035 ASSERT_EQ (subgraph_nodes[0],
10036 get_immediate_dominator (CDI_POST_DOMINATORS,
10037 subgraph_nodes[i]));
10038 free_dominance_info (CDI_POST_DOMINATORS);
10040 pop_cfun ();
10043 /* Run all of the selftests within this file. */
10045 void
10046 tree_cfg_c_tests ()
10048 test_linear_chain ();
10049 test_diamond ();
10050 test_fully_connected ();
10053 } // namespace selftest
10055 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10056 - loop
10057 - nested loops
10058 - switch statement (a block with many out-edges)
10059 - something that jumps to itself
10060 - etc */
10062 #endif /* CHECKING_P */