1 /* Control flow functions for trees.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
70 /* Local declarations. */
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity
= 20;
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
88 static hash_map
<edge
, tree
> *edge_to_cases
;
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
94 static bitmap touched_switch_bbs
;
99 long num_merged_labels
;
102 static struct cfg_stats_d cfg_stats
;
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
107 hash_map
<tree
, tree
> *vars_map
;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
122 static inline hashval_t
hash (const locus_discrim_map
*);
123 static inline bool equal (const locus_discrim_map
*,
124 const locus_discrim_map
*);
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
131 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
133 return item
->location_line
;
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
140 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
141 const locus_discrim_map
*b
)
143 return a
->location_line
== b
->location_line
;
146 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq
);
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block
);
155 static void make_gimple_switch_edges (gswitch
*, basic_block
);
156 static bool make_goto_expr_edges (basic_block
);
157 static void make_gimple_asm_edges (basic_block
);
158 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
159 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge
);
165 static gimple
*first_non_label_stmt (basic_block
);
166 static bool verify_gimple_transaction (gtransaction
*);
167 static bool call_can_make_abnormal_goto (gimple
*);
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block
, basic_block
);
171 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
172 static void remove_bb (basic_block
);
173 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
174 static edge
find_taken_edge_cond_expr (const gcond
*, tree
);
177 init_empty_tree_cfg_for_function (struct function
*fn
)
179 /* Initialize the basic block array. */
181 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
182 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
183 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
184 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
185 initial_cfg_capacity
, true);
187 /* Build a mapping of labels to their associated blocks. */
188 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
189 initial_cfg_capacity
, true);
191 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
192 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
194 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
195 = EXIT_BLOCK_PTR_FOR_FN (fn
);
196 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
197 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
201 init_empty_tree_cfg (void)
203 init_empty_tree_cfg_for_function (cfun
);
206 /*---------------------------------------------------------------------------
208 ---------------------------------------------------------------------------*/
210 /* Entry point to the CFG builder for trees. SEQ is the sequence of
211 statements to be added to the flowgraph. */
214 build_gimple_cfg (gimple_seq seq
)
216 /* Register specific gimple functions. */
217 gimple_register_cfg_hooks ();
219 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
221 init_empty_tree_cfg ();
225 /* Make sure there is always at least one block, even if it's empty. */
226 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
227 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
229 /* Adjust the size of the array. */
230 if (basic_block_info_for_fn (cfun
)->length ()
231 < (size_t) n_basic_blocks_for_fn (cfun
))
232 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
233 n_basic_blocks_for_fn (cfun
));
235 /* To speed up statement iterator walks, we first purge dead labels. */
236 cleanup_dead_labels ();
238 /* Group case nodes to reduce the number of edges.
239 We do this after cleaning up dead labels because otherwise we miss
240 a lot of obvious case merging opportunities. */
241 group_case_labels ();
243 /* Create the edges of the flowgraph. */
244 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
246 assign_discriminators ();
247 cleanup_dead_labels ();
248 delete discriminator_per_locus
;
249 discriminator_per_locus
= NULL
;
252 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
253 them and propagate the information to LOOP. We assume that the annotations
254 come immediately before the condition in BB, if any. */
257 replace_loop_annotate_in_block (basic_block bb
, class loop
*loop
)
259 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
260 gimple
*stmt
= gsi_stmt (gsi
);
262 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
265 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
267 stmt
= gsi_stmt (gsi
);
268 if (gimple_code (stmt
) != GIMPLE_CALL
)
270 if (!gimple_call_internal_p (stmt
)
271 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
274 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
276 case annot_expr_ivdep_kind
:
277 loop
->safelen
= INT_MAX
;
279 case annot_expr_unroll_kind
:
281 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt
, 2));
282 cfun
->has_unroll
= true;
284 case annot_expr_no_vector_kind
:
285 loop
->dont_vectorize
= true;
287 case annot_expr_vector_kind
:
288 loop
->force_vectorize
= true;
289 cfun
->has_force_vectorize_loops
= true;
291 case annot_expr_parallel_kind
:
292 loop
->can_be_parallel
= true;
293 loop
->safelen
= INT_MAX
;
299 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
300 gimple_call_arg (stmt
, 0));
301 gsi_replace (&gsi
, stmt
, true);
305 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
306 them and propagate the information to the loop. We assume that the
307 annotations come immediately before the condition of the loop. */
310 replace_loop_annotate (void)
314 gimple_stmt_iterator gsi
;
317 FOR_EACH_LOOP (loop
, 0)
319 /* First look into the header. */
320 replace_loop_annotate_in_block (loop
->header
, loop
);
322 /* Then look into the latch, if any. */
324 replace_loop_annotate_in_block (loop
->latch
, loop
);
326 /* Push the global flag_finite_loops state down to individual loops. */
327 loop
->finite_p
= flag_finite_loops
;
330 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
331 FOR_EACH_BB_FN (bb
, cfun
)
333 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
335 stmt
= gsi_stmt (gsi
);
336 if (gimple_code (stmt
) != GIMPLE_CALL
)
338 if (!gimple_call_internal_p (stmt
)
339 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
342 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
344 case annot_expr_ivdep_kind
:
345 case annot_expr_unroll_kind
:
346 case annot_expr_no_vector_kind
:
347 case annot_expr_vector_kind
:
348 case annot_expr_parallel_kind
:
354 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
355 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
356 gimple_call_arg (stmt
, 0));
357 gsi_replace (&gsi
, stmt
, true);
363 execute_build_cfg (void)
365 gimple_seq body
= gimple_body (current_function_decl
);
367 build_gimple_cfg (body
);
368 gimple_set_body (current_function_decl
, NULL
);
369 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
371 fprintf (dump_file
, "Scope blocks:\n");
372 dump_scope_blocks (dump_file
, dump_flags
);
375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
376 replace_loop_annotate ();
382 const pass_data pass_data_build_cfg
=
384 GIMPLE_PASS
, /* type */
386 OPTGROUP_NONE
, /* optinfo_flags */
387 TV_TREE_CFG
, /* tv_id */
388 PROP_gimple_leh
, /* properties_required */
389 ( PROP_cfg
| PROP_loops
), /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
395 class pass_build_cfg
: public gimple_opt_pass
398 pass_build_cfg (gcc::context
*ctxt
)
399 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
402 /* opt_pass methods: */
403 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
405 }; // class pass_build_cfg
410 make_pass_build_cfg (gcc::context
*ctxt
)
412 return new pass_build_cfg (ctxt
);
416 /* Return true if T is a computed goto. */
419 computed_goto_p (gimple
*t
)
421 return (gimple_code (t
) == GIMPLE_GOTO
422 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
425 /* Returns true if the sequence of statements STMTS only contains
426 a call to __builtin_unreachable (). */
429 gimple_seq_unreachable_p (gimple_seq stmts
)
432 /* Return false if -fsanitize=unreachable, we don't want to
433 optimize away those calls, but rather turn them into
434 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
436 || sanitize_flags_p (SANITIZE_UNREACHABLE
))
439 gimple_stmt_iterator gsi
= gsi_last (stmts
);
441 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
444 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
446 gimple
*stmt
= gsi_stmt (gsi
);
447 if (gimple_code (stmt
) != GIMPLE_LABEL
448 && !is_gimple_debug (stmt
)
449 && !gimple_clobber_p (stmt
))
455 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
456 the other edge points to a bb with just __builtin_unreachable ().
457 I.e. return true for C->M edge in:
465 __builtin_unreachable ();
469 assert_unreachable_fallthru_edge_p (edge e
)
471 basic_block pred_bb
= e
->src
;
472 gimple
*last
= last_stmt (pred_bb
);
473 if (last
&& gimple_code (last
) == GIMPLE_COND
)
475 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
476 if (other_bb
== e
->dest
)
477 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
478 if (EDGE_COUNT (other_bb
->succs
) == 0)
479 return gimple_seq_unreachable_p (bb_seq (other_bb
));
485 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
486 could alter control flow except via eh. We initialize the flag at
487 CFG build time and only ever clear it later. */
490 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
492 int flags
= gimple_call_flags (stmt
);
494 /* A call alters control flow if it can make an abnormal goto. */
495 if (call_can_make_abnormal_goto (stmt
)
496 /* A call also alters control flow if it does not return. */
497 || flags
& ECF_NORETURN
498 /* TM ending statements have backedges out of the transaction.
499 Return true so we split the basic block containing them.
500 Note that the TM_BUILTIN test is merely an optimization. */
501 || ((flags
& ECF_TM_BUILTIN
)
502 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
503 /* BUILT_IN_RETURN call is same as return statement. */
504 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
505 /* IFN_UNIQUE should be the last insn, to make checking for it
506 as cheap as possible. */
507 || (gimple_call_internal_p (stmt
)
508 && gimple_call_internal_unique_p (stmt
)))
509 gimple_call_set_ctrl_altering (stmt
, true);
511 gimple_call_set_ctrl_altering (stmt
, false);
515 /* Insert SEQ after BB and build a flowgraph. */
518 make_blocks_1 (gimple_seq seq
, basic_block bb
)
520 gimple_stmt_iterator i
= gsi_start (seq
);
522 gimple
*prev_stmt
= NULL
;
523 bool start_new_block
= true;
524 bool first_stmt_of_seq
= true;
526 while (!gsi_end_p (i
))
528 /* PREV_STMT should only be set to a debug stmt if the debug
529 stmt is before nondebug stmts. Once stmt reaches a nondebug
530 nonlabel, prev_stmt will be set to it, so that
531 stmt_starts_bb_p will know to start a new block if a label is
532 found. However, if stmt was a label after debug stmts only,
533 keep the label in prev_stmt even if we find further debug
534 stmts, for there may be other labels after them, and they
535 should land in the same block. */
536 if (!prev_stmt
|| !stmt
|| !is_gimple_debug (stmt
))
540 if (stmt
&& is_gimple_call (stmt
))
541 gimple_call_initialize_ctrl_altering (stmt
);
543 /* If the statement starts a new basic block or if we have determined
544 in a previous pass that we need to create a new block for STMT, do
546 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
548 if (!first_stmt_of_seq
)
549 gsi_split_seq_before (&i
, &seq
);
550 bb
= create_basic_block (seq
, bb
);
551 start_new_block
= false;
555 /* Now add STMT to BB and create the subgraphs for special statement
557 gimple_set_bb (stmt
, bb
);
559 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
561 if (stmt_ends_bb_p (stmt
))
563 /* If the stmt can make abnormal goto use a new temporary
564 for the assignment to the LHS. This makes sure the old value
565 of the LHS is available on the abnormal edge. Otherwise
566 we will end up with overlapping life-ranges for abnormal
568 if (gimple_has_lhs (stmt
)
569 && stmt_can_make_abnormal_goto (stmt
)
570 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
572 tree lhs
= gimple_get_lhs (stmt
);
573 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
574 gimple
*s
= gimple_build_assign (lhs
, tmp
);
575 gimple_set_location (s
, gimple_location (stmt
));
576 gimple_set_block (s
, gimple_block (stmt
));
577 gimple_set_lhs (stmt
, tmp
);
578 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
580 start_new_block
= true;
584 first_stmt_of_seq
= false;
589 /* Build a flowgraph for the sequence of stmts SEQ. */
592 make_blocks (gimple_seq seq
)
594 /* Look for debug markers right before labels, and move the debug
595 stmts after the labels. Accepting labels among debug markers
596 adds no value, just complexity; if we wanted to annotate labels
597 with view numbers (so sequencing among markers would matter) or
598 somesuch, we're probably better off still moving the labels, but
599 adding other debug annotations in their original positions or
600 emitting nonbind or bind markers associated with the labels in
601 the original position of the labels.
603 Moving labels would probably be simpler, but we can't do that:
604 moving labels assigns label ids to them, and doing so because of
605 debug markers makes for -fcompare-debug and possibly even codegen
606 differences. So, we have to move the debug stmts instead. To
607 that end, we scan SEQ backwards, marking the position of the
608 latest (earliest we find) label, and moving debug stmts that are
609 not separated from it by nondebug nonlabel stmts after the
611 if (MAY_HAVE_DEBUG_MARKER_STMTS
)
613 gimple_stmt_iterator label
= gsi_none ();
615 for (gimple_stmt_iterator i
= gsi_last (seq
); !gsi_end_p (i
); gsi_prev (&i
))
617 gimple
*stmt
= gsi_stmt (i
);
619 /* If this is the first label we encounter (latest in SEQ)
620 before nondebug stmts, record its position. */
621 if (is_a
<glabel
*> (stmt
))
623 if (gsi_end_p (label
))
628 /* Without a recorded label position to move debug stmts to,
629 there's nothing to do. */
630 if (gsi_end_p (label
))
633 /* Move the debug stmt at I after LABEL. */
634 if (is_gimple_debug (stmt
))
636 gcc_assert (gimple_debug_nonbind_marker_p (stmt
));
637 /* As STMT is removed, I advances to the stmt after
638 STMT, so the gsi_prev in the for "increment"
639 expression gets us to the stmt we're to visit after
640 STMT. LABEL, however, would advance to the moved
641 stmt if we passed it to gsi_move_after, so pass it a
642 copy instead, so as to keep LABEL pointing to the
644 gimple_stmt_iterator copy
= label
;
645 gsi_move_after (&i
, ©
);
649 /* There aren't any (more?) debug stmts before label, so
650 there isn't anything else to move after it. */
655 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
658 /* Create and return a new empty basic block after bb AFTER. */
661 create_bb (void *h
, void *e
, basic_block after
)
667 /* Create and initialize a new basic block. Since alloc_block uses
668 GC allocation that clears memory to allocate a basic block, we do
669 not have to clear the newly allocated basic block here. */
672 bb
->index
= last_basic_block_for_fn (cfun
);
674 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
676 /* Add the new block to the linked list of blocks. */
677 link_block (bb
, after
);
679 /* Grow the basic block array if needed. */
680 if ((size_t) last_basic_block_for_fn (cfun
)
681 == basic_block_info_for_fn (cfun
)->length ())
682 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
683 last_basic_block_for_fn (cfun
) + 1);
685 /* Add the newly created block to the array. */
686 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
688 n_basic_blocks_for_fn (cfun
)++;
689 last_basic_block_for_fn (cfun
)++;
695 /*---------------------------------------------------------------------------
697 ---------------------------------------------------------------------------*/
699 /* If basic block BB has an abnormal edge to a basic block
700 containing IFN_ABNORMAL_DISPATCHER internal call, return
701 that the dispatcher's basic block, otherwise return NULL. */
704 get_abnormal_succ_dispatcher (basic_block bb
)
709 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
710 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
712 gimple_stmt_iterator gsi
713 = gsi_start_nondebug_after_labels_bb (e
->dest
);
714 gimple
*g
= gsi_stmt (gsi
);
715 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
721 /* Helper function for make_edges. Create a basic block with
722 with ABNORMAL_DISPATCHER internal call in it if needed, and
723 create abnormal edges from BBS to it and from it to FOR_BB
724 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
727 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
728 basic_block for_bb
, int *bb_to_omp_idx
,
729 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
731 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
732 unsigned int idx
= 0;
738 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
739 if (bb_to_omp_idx
[for_bb
->index
] != 0)
743 /* If the dispatcher has been created already, then there are basic
744 blocks with abnormal edges to it, so just make a new edge to
746 if (*dispatcher
== NULL
)
748 /* Check if there are any basic blocks that need to have
749 abnormal edges to this dispatcher. If there are none, return
751 if (bb_to_omp_idx
== NULL
)
753 if (bbs
->is_empty ())
758 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
759 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
765 /* Create the dispatcher bb. */
766 *dispatcher
= create_basic_block (NULL
, for_bb
);
769 /* Factor computed gotos into a common computed goto site. Also
770 record the location of that site so that we can un-factor the
771 gotos after we have converted back to normal form. */
772 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
774 /* Create the destination of the factored goto. Each original
775 computed goto will put its desired destination into this
776 variable and jump to the label we create immediately below. */
777 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
779 /* Build a label for the new block which will contain the
780 factored computed goto. */
781 tree factored_label_decl
782 = create_artificial_label (UNKNOWN_LOCATION
);
783 gimple
*factored_computed_goto_label
784 = gimple_build_label (factored_label_decl
);
785 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
787 /* Build our new computed goto. */
788 gimple
*factored_computed_goto
= gimple_build_goto (var
);
789 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
791 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
794 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
797 gsi
= gsi_last_bb (bb
);
798 gimple
*last
= gsi_stmt (gsi
);
800 gcc_assert (computed_goto_p (last
));
802 /* Copy the original computed goto's destination into VAR. */
804 = gimple_build_assign (var
, gimple_goto_dest (last
));
805 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
807 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
808 e
->goto_locus
= gimple_location (last
);
809 gsi_remove (&gsi
, true);
814 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
815 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
817 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
818 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
820 /* Create predecessor edges of the dispatcher. */
821 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
824 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
826 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
831 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
834 /* Creates outgoing edges for BB. Returns 1 when it ends with an
835 computed goto, returns 2 when it ends with a statement that
836 might return to this function via an nonlocal goto, otherwise
837 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
840 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
842 gimple
*last
= last_stmt (bb
);
843 bool fallthru
= false;
849 switch (gimple_code (last
))
852 if (make_goto_expr_edges (bb
))
858 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
859 e
->goto_locus
= gimple_location (last
);
864 make_cond_expr_edges (bb
);
868 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
872 make_eh_edges (last
);
875 case GIMPLE_EH_DISPATCH
:
876 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
880 /* If this function receives a nonlocal goto, then we need to
881 make edges from this call site to all the nonlocal goto
883 if (stmt_can_make_abnormal_goto (last
))
886 /* If this statement has reachable exception handlers, then
887 create abnormal edges to them. */
888 make_eh_edges (last
);
890 /* BUILTIN_RETURN is really a return statement. */
891 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
893 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
896 /* Some calls are known not to return. */
898 fallthru
= !gimple_call_noreturn_p (last
);
902 /* A GIMPLE_ASSIGN may throw internally and thus be considered
904 if (is_ctrl_altering_stmt (last
))
905 make_eh_edges (last
);
910 make_gimple_asm_edges (bb
);
915 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
918 case GIMPLE_TRANSACTION
:
920 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
921 tree label1
= gimple_transaction_label_norm (txn
);
922 tree label2
= gimple_transaction_label_uninst (txn
);
925 make_edge (bb
, label_to_block (cfun
, label1
), EDGE_FALLTHRU
);
927 make_edge (bb
, label_to_block (cfun
, label2
),
928 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
930 tree label3
= gimple_transaction_label_over (txn
);
931 if (gimple_transaction_subcode (txn
)
932 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
933 make_edge (bb
, label_to_block (cfun
, label3
), EDGE_TM_ABORT
);
940 gcc_assert (!stmt_ends_bb_p (last
));
946 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
951 /* Join all the blocks in the flowgraph. */
957 struct omp_region
*cur_region
= NULL
;
958 auto_vec
<basic_block
> ab_edge_goto
;
959 auto_vec
<basic_block
> ab_edge_call
;
960 int *bb_to_omp_idx
= NULL
;
961 int cur_omp_region_idx
= 0;
963 /* Create an edge from entry to the first block with executable
965 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
966 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
969 /* Traverse the basic block array placing edges. */
970 FOR_EACH_BB_FN (bb
, cfun
)
975 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
977 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
979 ab_edge_goto
.safe_push (bb
);
981 ab_edge_call
.safe_push (bb
);
983 if (cur_region
&& bb_to_omp_idx
== NULL
)
984 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
987 /* Computed gotos are hell to deal with, especially if there are
988 lots of them with a large number of destinations. So we factor
989 them to a common computed goto location before we build the
990 edge list. After we convert back to normal form, we will un-factor
991 the computed gotos since factoring introduces an unwanted jump.
992 For non-local gotos and abnormal edges from calls to calls that return
993 twice or forced labels, factor the abnormal edges too, by having all
994 abnormal edges from the calls go to a common artificial basic block
995 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
996 basic block to all forced labels and calls returning twice.
997 We do this per-OpenMP structured block, because those regions
998 are guaranteed to be single entry single exit by the standard,
999 so it is not allowed to enter or exit such regions abnormally this way,
1000 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1001 must not transfer control across SESE region boundaries. */
1002 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
1004 gimple_stmt_iterator gsi
;
1005 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
1006 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
1007 int count
= n_basic_blocks_for_fn (cfun
);
1010 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
1012 FOR_EACH_BB_FN (bb
, cfun
)
1014 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1016 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1022 target
= gimple_label_label (label_stmt
);
1024 /* Make an edge to every label block that has been marked as a
1025 potential target for a computed goto or a non-local goto. */
1026 if (FORCED_LABEL (target
))
1027 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1028 &ab_edge_goto
, true);
1029 if (DECL_NONLOCAL (target
))
1031 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1032 &ab_edge_call
, false);
1037 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1038 gsi_next_nondebug (&gsi
);
1039 if (!gsi_end_p (gsi
))
1041 /* Make an edge to every setjmp-like call. */
1042 gimple
*call_stmt
= gsi_stmt (gsi
);
1043 if (is_gimple_call (call_stmt
)
1044 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1045 || gimple_call_builtin_p (call_stmt
,
1046 BUILT_IN_SETJMP_RECEIVER
)))
1047 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1048 &ab_edge_call
, false);
1053 XDELETE (dispatcher_bbs
);
1056 XDELETE (bb_to_omp_idx
);
1058 omp_free_regions ();
1061 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1062 needed. Returns true if new bbs were created.
1063 Note: This is transitional code, and should not be used for new code. We
1064 should be able to get rid of this by rewriting all target va-arg
1065 gimplification hooks to use an interface gimple_build_cond_value as described
1066 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1069 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1071 gimple
*stmt
= gsi_stmt (*gsi
);
1072 basic_block bb
= gimple_bb (stmt
);
1073 basic_block lastbb
, afterbb
;
1074 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1076 lastbb
= make_blocks_1 (seq
, bb
);
1077 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1079 e
= split_block (bb
, stmt
);
1080 /* Move e->dest to come after the new basic blocks. */
1082 unlink_block (afterbb
);
1083 link_block (afterbb
, lastbb
);
1084 redirect_edge_succ (e
, bb
->next_bb
);
1086 while (bb
!= afterbb
)
1088 struct omp_region
*cur_region
= NULL
;
1089 profile_count cnt
= profile_count::zero ();
1092 int cur_omp_region_idx
= 0;
1093 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1094 gcc_assert (!mer
&& !cur_region
);
1095 add_bb_to_loop (bb
, afterbb
->loop_father
);
1099 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1101 if (e
->count ().initialized_p ())
1106 tree_guess_outgoing_edge_probabilities (bb
);
1107 if (all
|| profile_status_for_fn (cfun
) == PROFILE_READ
)
1115 /* Find the next available discriminator value for LOCUS. The
1116 discriminator distinguishes among several basic blocks that
1117 share a common locus, allowing for more accurate sample-based
1121 next_discriminator_for_locus (int line
)
1123 struct locus_discrim_map item
;
1124 struct locus_discrim_map
**slot
;
1126 item
.location_line
= line
;
1127 item
.discriminator
= 0;
1128 slot
= discriminator_per_locus
->find_slot_with_hash (&item
, line
, INSERT
);
1130 if (*slot
== HTAB_EMPTY_ENTRY
)
1132 *slot
= XNEW (struct locus_discrim_map
);
1134 (*slot
)->location_line
= line
;
1135 (*slot
)->discriminator
= 0;
1137 (*slot
)->discriminator
++;
1138 return (*slot
)->discriminator
;
1141 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1144 same_line_p (location_t locus1
, expanded_location
*from
, location_t locus2
)
1146 expanded_location to
;
1148 if (locus1
== locus2
)
1151 to
= expand_location (locus2
);
1153 if (from
->line
!= to
.line
)
1155 if (from
->file
== to
.file
)
1157 return (from
->file
!= NULL
1159 && filename_cmp (from
->file
, to
.file
) == 0);
1162 /* Assign discriminators to each basic block. */
1165 assign_discriminators (void)
1169 FOR_EACH_BB_FN (bb
, cfun
)
1173 gimple
*last
= last_stmt (bb
);
1174 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1176 if (locus
== UNKNOWN_LOCATION
)
1179 expanded_location locus_e
= expand_location (locus
);
1181 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1183 gimple
*first
= first_non_label_stmt (e
->dest
);
1184 gimple
*last
= last_stmt (e
->dest
);
1185 if ((first
&& same_line_p (locus
, &locus_e
,
1186 gimple_location (first
)))
1187 || (last
&& same_line_p (locus
, &locus_e
,
1188 gimple_location (last
))))
1190 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1192 = next_discriminator_for_locus (locus_e
.line
);
1194 e
->dest
->discriminator
1195 = next_discriminator_for_locus (locus_e
.line
);
1201 /* Create the edges for a GIMPLE_COND starting at block BB. */
1204 make_cond_expr_edges (basic_block bb
)
1206 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1207 gimple
*then_stmt
, *else_stmt
;
1208 basic_block then_bb
, else_bb
;
1209 tree then_label
, else_label
;
1213 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1215 /* Entry basic blocks for each component. */
1216 then_label
= gimple_cond_true_label (entry
);
1217 else_label
= gimple_cond_false_label (entry
);
1218 then_bb
= label_to_block (cfun
, then_label
);
1219 else_bb
= label_to_block (cfun
, else_label
);
1220 then_stmt
= first_stmt (then_bb
);
1221 else_stmt
= first_stmt (else_bb
);
1223 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1224 e
->goto_locus
= gimple_location (then_stmt
);
1225 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1227 e
->goto_locus
= gimple_location (else_stmt
);
1229 /* We do not need the labels anymore. */
1230 gimple_cond_set_true_label (entry
, NULL_TREE
);
1231 gimple_cond_set_false_label (entry
, NULL_TREE
);
1235 /* Called for each element in the hash table (P) as we delete the
1236 edge to cases hash table.
1238 Clear all the CASE_CHAINs to prevent problems with copying of
1239 SWITCH_EXPRs and structure sharing rules, then free the hash table
1243 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1247 for (t
= value
; t
; t
= next
)
1249 next
= CASE_CHAIN (t
);
1250 CASE_CHAIN (t
) = NULL
;
1256 /* Start recording information mapping edges to case labels. */
1259 start_recording_case_labels (void)
1261 gcc_assert (edge_to_cases
== NULL
);
1262 edge_to_cases
= new hash_map
<edge
, tree
>;
1263 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1266 /* Return nonzero if we are recording information for case labels. */
1269 recording_case_labels_p (void)
1271 return (edge_to_cases
!= NULL
);
1274 /* Stop recording information mapping edges to case labels and
1275 remove any information we have recorded. */
1277 end_recording_case_labels (void)
1281 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1282 delete edge_to_cases
;
1283 edge_to_cases
= NULL
;
1284 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1286 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1289 gimple
*stmt
= last_stmt (bb
);
1290 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1291 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1294 BITMAP_FREE (touched_switch_bbs
);
1297 /* If we are inside a {start,end}_recording_cases block, then return
1298 a chain of CASE_LABEL_EXPRs from T which reference E.
1300 Otherwise return NULL. */
1303 get_cases_for_edge (edge e
, gswitch
*t
)
1308 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1309 chains available. Return NULL so the caller can detect this case. */
1310 if (!recording_case_labels_p ())
1313 slot
= edge_to_cases
->get (e
);
1317 /* If we did not find E in the hash table, then this must be the first
1318 time we have been queried for information about E & T. Add all the
1319 elements from T to the hash table then perform the query again. */
1321 n
= gimple_switch_num_labels (t
);
1322 for (i
= 0; i
< n
; i
++)
1324 tree elt
= gimple_switch_label (t
, i
);
1325 tree lab
= CASE_LABEL (elt
);
1326 basic_block label_bb
= label_to_block (cfun
, lab
);
1327 edge this_edge
= find_edge (e
->src
, label_bb
);
1329 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1331 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1332 CASE_CHAIN (elt
) = s
;
1336 return *edge_to_cases
->get (e
);
1339 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1342 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1346 n
= gimple_switch_num_labels (entry
);
1348 for (i
= 0; i
< n
; ++i
)
1350 basic_block label_bb
= gimple_switch_label_bb (cfun
, entry
, i
);
1351 make_edge (bb
, label_bb
, 0);
1356 /* Return the basic block holding label DEST. */
1359 label_to_block (struct function
*ifun
, tree dest
)
1361 int uid
= LABEL_DECL_UID (dest
);
1363 /* We would die hard when faced by an undefined label. Emit a label to
1364 the very first basic block. This will hopefully make even the dataflow
1365 and undefined variable warnings quite right. */
1366 if (seen_error () && uid
< 0)
1368 gimple_stmt_iterator gsi
=
1369 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1372 stmt
= gimple_build_label (dest
);
1373 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1374 uid
= LABEL_DECL_UID (dest
);
1376 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1378 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1381 /* Create edges for a goto statement at block BB. Returns true
1382 if abnormal edges should be created. */
1385 make_goto_expr_edges (basic_block bb
)
1387 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1388 gimple
*goto_t
= gsi_stmt (last
);
1390 /* A simple GOTO creates normal edges. */
1391 if (simple_goto_p (goto_t
))
1393 tree dest
= gimple_goto_dest (goto_t
);
1394 basic_block label_bb
= label_to_block (cfun
, dest
);
1395 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1396 e
->goto_locus
= gimple_location (goto_t
);
1397 gsi_remove (&last
, true);
1401 /* A computed GOTO creates abnormal edges. */
1405 /* Create edges for an asm statement with labels at block BB. */
1408 make_gimple_asm_edges (basic_block bb
)
1410 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1411 int i
, n
= gimple_asm_nlabels (stmt
);
1413 for (i
= 0; i
< n
; ++i
)
1415 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1416 basic_block label_bb
= label_to_block (cfun
, label
);
1417 make_edge (bb
, label_bb
, 0);
1421 /*---------------------------------------------------------------------------
1423 ---------------------------------------------------------------------------*/
1425 /* Cleanup useless labels in basic blocks. This is something we wish
1426 to do early because it allows us to group case labels before creating
1427 the edges for the CFG, and it speeds up block statement iterators in
1428 all passes later on.
1429 We rerun this pass after CFG is created, to get rid of the labels that
1430 are no longer referenced. After then we do not run it any more, since
1431 (almost) no new labels should be created. */
1433 /* A map from basic block index to the leading label of that block. */
1439 /* True if the label is referenced from somewhere. */
1443 /* Given LABEL return the first label in the same basic block. */
1446 main_block_label (tree label
, label_record
*label_for_bb
)
1448 basic_block bb
= label_to_block (cfun
, label
);
1449 tree main_label
= label_for_bb
[bb
->index
].label
;
1451 /* label_to_block possibly inserted undefined label into the chain. */
1454 label_for_bb
[bb
->index
].label
= label
;
1458 label_for_bb
[bb
->index
].used
= true;
1462 /* Clean up redundant labels within the exception tree. */
1465 cleanup_dead_labels_eh (label_record
*label_for_bb
)
1472 if (cfun
->eh
== NULL
)
1475 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1476 if (lp
&& lp
->post_landing_pad
)
1478 lab
= main_block_label (lp
->post_landing_pad
, label_for_bb
);
1479 if (lab
!= lp
->post_landing_pad
)
1481 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1482 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1486 FOR_ALL_EH_REGION (r
)
1490 case ERT_MUST_NOT_THROW
:
1496 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1500 c
->label
= main_block_label (lab
, label_for_bb
);
1505 case ERT_ALLOWED_EXCEPTIONS
:
1506 lab
= r
->u
.allowed
.label
;
1508 r
->u
.allowed
.label
= main_block_label (lab
, label_for_bb
);
1514 /* Cleanup redundant labels. This is a three-step process:
1515 1) Find the leading label for each block.
1516 2) Redirect all references to labels to the leading labels.
1517 3) Cleanup all useless labels. */
1520 cleanup_dead_labels (void)
1523 label_record
*label_for_bb
= XCNEWVEC (struct label_record
,
1524 last_basic_block_for_fn (cfun
));
1526 /* Find a suitable label for each block. We use the first user-defined
1527 label if there is one, or otherwise just the first label we see. */
1528 FOR_EACH_BB_FN (bb
, cfun
)
1530 gimple_stmt_iterator i
;
1532 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1535 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1540 label
= gimple_label_label (label_stmt
);
1542 /* If we have not yet seen a label for the current block,
1543 remember this one and see if there are more labels. */
1544 if (!label_for_bb
[bb
->index
].label
)
1546 label_for_bb
[bb
->index
].label
= label
;
1550 /* If we did see a label for the current block already, but it
1551 is an artificially created label, replace it if the current
1552 label is a user defined label. */
1553 if (!DECL_ARTIFICIAL (label
)
1554 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1556 label_for_bb
[bb
->index
].label
= label
;
1562 /* Now redirect all jumps/branches to the selected label.
1563 First do so for each block ending in a control statement. */
1564 FOR_EACH_BB_FN (bb
, cfun
)
1566 gimple
*stmt
= last_stmt (bb
);
1567 tree label
, new_label
;
1572 switch (gimple_code (stmt
))
1576 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1577 label
= gimple_cond_true_label (cond_stmt
);
1580 new_label
= main_block_label (label
, label_for_bb
);
1581 if (new_label
!= label
)
1582 gimple_cond_set_true_label (cond_stmt
, new_label
);
1585 label
= gimple_cond_false_label (cond_stmt
);
1588 new_label
= main_block_label (label
, label_for_bb
);
1589 if (new_label
!= label
)
1590 gimple_cond_set_false_label (cond_stmt
, new_label
);
1597 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1598 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1600 /* Replace all destination labels. */
1601 for (i
= 0; i
< n
; ++i
)
1603 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1604 label
= CASE_LABEL (case_label
);
1605 new_label
= main_block_label (label
, label_for_bb
);
1606 if (new_label
!= label
)
1607 CASE_LABEL (case_label
) = new_label
;
1614 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1615 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1617 for (i
= 0; i
< n
; ++i
)
1619 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1620 tree label
= main_block_label (TREE_VALUE (cons
), label_for_bb
);
1621 TREE_VALUE (cons
) = label
;
1626 /* We have to handle gotos until they're removed, and we don't
1627 remove them until after we've created the CFG edges. */
1629 if (!computed_goto_p (stmt
))
1631 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1632 label
= gimple_goto_dest (goto_stmt
);
1633 new_label
= main_block_label (label
, label_for_bb
);
1634 if (new_label
!= label
)
1635 gimple_goto_set_dest (goto_stmt
, new_label
);
1639 case GIMPLE_TRANSACTION
:
1641 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1643 label
= gimple_transaction_label_norm (txn
);
1646 new_label
= main_block_label (label
, label_for_bb
);
1647 if (new_label
!= label
)
1648 gimple_transaction_set_label_norm (txn
, new_label
);
1651 label
= gimple_transaction_label_uninst (txn
);
1654 new_label
= main_block_label (label
, label_for_bb
);
1655 if (new_label
!= label
)
1656 gimple_transaction_set_label_uninst (txn
, new_label
);
1659 label
= gimple_transaction_label_over (txn
);
1662 new_label
= main_block_label (label
, label_for_bb
);
1663 if (new_label
!= label
)
1664 gimple_transaction_set_label_over (txn
, new_label
);
1674 /* Do the same for the exception region tree labels. */
1675 cleanup_dead_labels_eh (label_for_bb
);
1677 /* Finally, purge dead labels. All user-defined labels and labels that
1678 can be the target of non-local gotos and labels which have their
1679 address taken are preserved. */
1680 FOR_EACH_BB_FN (bb
, cfun
)
1682 gimple_stmt_iterator i
;
1683 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1685 if (!label_for_this_bb
)
1688 /* If the main label of the block is unused, we may still remove it. */
1689 if (!label_for_bb
[bb
->index
].used
)
1690 label_for_this_bb
= NULL
;
1692 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1695 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1700 label
= gimple_label_label (label_stmt
);
1702 if (label
== label_for_this_bb
1703 || !DECL_ARTIFICIAL (label
)
1704 || DECL_NONLOCAL (label
)
1705 || FORCED_LABEL (label
))
1708 gsi_remove (&i
, true);
1712 free (label_for_bb
);
1715 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1716 the ones jumping to the same label.
1717 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1720 group_case_labels_stmt (gswitch
*stmt
)
1722 int old_size
= gimple_switch_num_labels (stmt
);
1723 int i
, next_index
, new_size
;
1724 basic_block default_bb
= NULL
;
1725 hash_set
<tree
> *removed_labels
= NULL
;
1727 default_bb
= gimple_switch_default_bb (cfun
, stmt
);
1729 /* Look for possible opportunities to merge cases. */
1731 while (i
< old_size
)
1733 tree base_case
, base_high
;
1734 basic_block base_bb
;
1736 base_case
= gimple_switch_label (stmt
, i
);
1738 gcc_assert (base_case
);
1739 base_bb
= label_to_block (cfun
, CASE_LABEL (base_case
));
1741 /* Discard cases that have the same destination as the default case or
1742 whose destination blocks have already been removed as unreachable. */
1744 || base_bb
== default_bb
1746 && removed_labels
->contains (CASE_LABEL (base_case
))))
1752 base_high
= CASE_HIGH (base_case
)
1753 ? CASE_HIGH (base_case
)
1754 : CASE_LOW (base_case
);
1757 /* Try to merge case labels. Break out when we reach the end
1758 of the label vector or when we cannot merge the next case
1759 label with the current one. */
1760 while (next_index
< old_size
)
1762 tree merge_case
= gimple_switch_label (stmt
, next_index
);
1763 basic_block merge_bb
= label_to_block (cfun
, CASE_LABEL (merge_case
));
1764 wide_int bhp1
= wi::to_wide (base_high
) + 1;
1766 /* Merge the cases if they jump to the same place,
1767 and their ranges are consecutive. */
1768 if (merge_bb
== base_bb
1769 && (removed_labels
== NULL
1770 || !removed_labels
->contains (CASE_LABEL (merge_case
)))
1771 && wi::to_wide (CASE_LOW (merge_case
)) == bhp1
)
1774 = (CASE_HIGH (merge_case
)
1775 ? CASE_HIGH (merge_case
) : CASE_LOW (merge_case
));
1776 CASE_HIGH (base_case
) = base_high
;
1783 /* Discard cases that have an unreachable destination block. */
1784 if (EDGE_COUNT (base_bb
->succs
) == 0
1785 && gimple_seq_unreachable_p (bb_seq (base_bb
))
1786 /* Don't optimize this if __builtin_unreachable () is the
1787 implicitly added one by the C++ FE too early, before
1788 -Wreturn-type can be diagnosed. We'll optimize it later
1789 during switchconv pass or any other cfg cleanup. */
1790 && (gimple_in_ssa_p (cfun
)
1791 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb
)))
1792 != BUILTINS_LOCATION
)))
1794 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1795 if (base_edge
!= NULL
)
1797 for (gimple_stmt_iterator gsi
= gsi_start_bb (base_bb
);
1798 !gsi_end_p (gsi
); gsi_next (&gsi
))
1799 if (glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
)))
1801 if (FORCED_LABEL (gimple_label_label (stmt
))
1802 || DECL_NONLOCAL (gimple_label_label (stmt
)))
1804 /* Forced/non-local labels aren't going to be removed,
1805 but they will be moved to some neighbouring basic
1806 block. If some later case label refers to one of
1807 those labels, we should throw that case away rather
1808 than keeping it around and refering to some random
1809 other basic block without an edge to it. */
1810 if (removed_labels
== NULL
)
1811 removed_labels
= new hash_set
<tree
>;
1812 removed_labels
->add (gimple_label_label (stmt
));
1817 remove_edge_and_dominated_blocks (base_edge
);
1824 gimple_switch_set_label (stmt
, new_size
,
1825 gimple_switch_label (stmt
, i
));
1830 gcc_assert (new_size
<= old_size
);
1832 if (new_size
< old_size
)
1833 gimple_switch_set_num_labels (stmt
, new_size
);
1835 delete removed_labels
;
1836 return new_size
< old_size
;
1839 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1840 and scan the sorted vector of cases. Combine the ones jumping to the
1844 group_case_labels (void)
1847 bool changed
= false;
1849 FOR_EACH_BB_FN (bb
, cfun
)
1851 gimple
*stmt
= last_stmt (bb
);
1852 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1853 changed
|= group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1859 /* Checks whether we can merge block B into block A. */
1862 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1866 if (!single_succ_p (a
))
1869 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1872 if (single_succ (a
) != b
)
1875 if (!single_pred_p (b
))
1878 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1879 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1882 /* If A ends by a statement causing exceptions or something similar, we
1883 cannot merge the blocks. */
1884 stmt
= last_stmt (a
);
1885 if (stmt
&& stmt_ends_bb_p (stmt
))
1888 /* Do not allow a block with only a non-local label to be merged. */
1890 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1891 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1894 /* Examine the labels at the beginning of B. */
1895 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1899 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1902 lab
= gimple_label_label (label_stmt
);
1904 /* Do not remove user forced labels or for -O0 any user labels. */
1905 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1909 /* Protect simple loop latches. We only want to avoid merging
1910 the latch with the loop header or with a block in another
1911 loop in this case. */
1913 && b
->loop_father
->latch
== b
1914 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1915 && (b
->loop_father
->header
== a
1916 || b
->loop_father
!= a
->loop_father
))
1919 /* It must be possible to eliminate all phi nodes in B. If ssa form
1920 is not up-to-date and a name-mapping is registered, we cannot eliminate
1921 any phis. Symbols marked for renaming are never a problem though. */
1922 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1925 gphi
*phi
= gsi
.phi ();
1926 /* Technically only new names matter. */
1927 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1931 /* When not optimizing, don't merge if we'd lose goto_locus. */
1933 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1935 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1936 gimple_stmt_iterator prev
, next
;
1937 prev
= gsi_last_nondebug_bb (a
);
1938 next
= gsi_after_labels (b
);
1939 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1940 gsi_next_nondebug (&next
);
1941 if ((gsi_end_p (prev
)
1942 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1943 && (gsi_end_p (next
)
1944 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1951 /* Replaces all uses of NAME by VAL. */
1954 replace_uses_by (tree name
, tree val
)
1956 imm_use_iterator imm_iter
;
1961 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1963 /* Mark the block if we change the last stmt in it. */
1964 if (cfgcleanup_altered_bbs
1965 && stmt_ends_bb_p (stmt
))
1966 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1968 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1970 replace_exp (use
, val
);
1972 if (gimple_code (stmt
) == GIMPLE_PHI
)
1974 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1975 PHI_ARG_INDEX_FROM_USE (use
));
1976 if (e
->flags
& EDGE_ABNORMAL
1977 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1979 /* This can only occur for virtual operands, since
1980 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1981 would prevent replacement. */
1982 gcc_checking_assert (virtual_operand_p (name
));
1983 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1988 if (gimple_code (stmt
) != GIMPLE_PHI
)
1990 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1991 gimple
*orig_stmt
= stmt
;
1994 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1995 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1996 only change sth from non-invariant to invariant, and only
1997 when propagating constants. */
1998 if (is_gimple_min_invariant (val
))
1999 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
2001 tree op
= gimple_op (stmt
, i
);
2002 /* Operands may be empty here. For example, the labels
2003 of a GIMPLE_COND are nulled out following the creation
2004 of the corresponding CFG edges. */
2005 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
2006 recompute_tree_invariant_for_addr_expr (op
);
2009 if (fold_stmt (&gsi
))
2010 stmt
= gsi_stmt (gsi
);
2012 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2013 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
2019 gcc_checking_assert (has_zero_uses (name
));
2021 /* Also update the trees stored in loop structures. */
2026 FOR_EACH_LOOP (loop
, 0)
2028 substitute_in_loop_info (loop
, name
, val
);
2033 /* Merge block B into block A. */
2036 gimple_merge_blocks (basic_block a
, basic_block b
)
2038 gimple_stmt_iterator last
, gsi
;
2042 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
2044 /* Remove all single-valued PHI nodes from block B of the form
2045 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2046 gsi
= gsi_last_bb (a
);
2047 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
2049 gimple
*phi
= gsi_stmt (psi
);
2050 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
2052 bool may_replace_uses
= (virtual_operand_p (def
)
2053 || may_propagate_copy (def
, use
));
2055 /* In case we maintain loop closed ssa form, do not propagate arguments
2056 of loop exit phi nodes. */
2058 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
2059 && !virtual_operand_p (def
)
2060 && TREE_CODE (use
) == SSA_NAME
2061 && a
->loop_father
!= b
->loop_father
)
2062 may_replace_uses
= false;
2064 if (!may_replace_uses
)
2066 gcc_assert (!virtual_operand_p (def
));
2068 /* Note that just emitting the copies is fine -- there is no problem
2069 with ordering of phi nodes. This is because A is the single
2070 predecessor of B, therefore results of the phi nodes cannot
2071 appear as arguments of the phi nodes. */
2072 copy
= gimple_build_assign (def
, use
);
2073 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
2074 remove_phi_node (&psi
, false);
2078 /* If we deal with a PHI for virtual operands, we can simply
2079 propagate these without fussing with folding or updating
2081 if (virtual_operand_p (def
))
2083 imm_use_iterator iter
;
2084 use_operand_p use_p
;
2087 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
2088 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2089 SET_USE (use_p
, use
);
2091 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2092 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
2095 replace_uses_by (def
, use
);
2097 remove_phi_node (&psi
, true);
2101 /* Ensure that B follows A. */
2102 move_block_after (b
, a
);
2104 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2105 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
2107 /* Remove labels from B and set gimple_bb to A for other statements. */
2108 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2110 gimple
*stmt
= gsi_stmt (gsi
);
2111 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2113 tree label
= gimple_label_label (label_stmt
);
2116 gsi_remove (&gsi
, false);
2118 /* Now that we can thread computed gotos, we might have
2119 a situation where we have a forced label in block B
2120 However, the label at the start of block B might still be
2121 used in other ways (think about the runtime checking for
2122 Fortran assigned gotos). So we cannot just delete the
2123 label. Instead we move the label to the start of block A. */
2124 if (FORCED_LABEL (label
))
2126 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2127 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2129 /* Other user labels keep around in a form of a debug stmt. */
2130 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_BIND_STMTS
)
2132 gimple
*dbg
= gimple_build_debug_bind (label
,
2135 gimple_debug_bind_reset_value (dbg
);
2136 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2139 lp_nr
= EH_LANDING_PAD_NR (label
);
2142 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2143 lp
->post_landing_pad
= NULL
;
2148 gimple_set_bb (stmt
, a
);
2153 /* When merging two BBs, if their counts are different, the larger count
2154 is selected as the new bb count. This is to handle inconsistent
2156 if (a
->loop_father
== b
->loop_father
)
2158 a
->count
= a
->count
.merge (b
->count
);
2161 /* Merge the sequences. */
2162 last
= gsi_last_bb (a
);
2163 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2164 set_bb_seq (b
, NULL
);
2166 if (cfgcleanup_altered_bbs
)
2167 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2171 /* Return the one of two successors of BB that is not reachable by a
2172 complex edge, if there is one. Else, return BB. We use
2173 this in optimizations that use post-dominators for their heuristics,
2174 to catch the cases in C++ where function calls are involved. */
2177 single_noncomplex_succ (basic_block bb
)
2180 if (EDGE_COUNT (bb
->succs
) != 2)
2183 e0
= EDGE_SUCC (bb
, 0);
2184 e1
= EDGE_SUCC (bb
, 1);
2185 if (e0
->flags
& EDGE_COMPLEX
)
2187 if (e1
->flags
& EDGE_COMPLEX
)
2193 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2196 notice_special_calls (gcall
*call
)
2198 int flags
= gimple_call_flags (call
);
2200 if (flags
& ECF_MAY_BE_ALLOCA
)
2201 cfun
->calls_alloca
= true;
2202 if (flags
& ECF_RETURNS_TWICE
)
2203 cfun
->calls_setjmp
= true;
2207 /* Clear flags set by notice_special_calls. Used by dead code removal
2208 to update the flags. */
2211 clear_special_calls (void)
2213 cfun
->calls_alloca
= false;
2214 cfun
->calls_setjmp
= false;
2217 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2220 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2222 /* Since this block is no longer reachable, we can just delete all
2223 of its PHI nodes. */
2224 remove_phi_nodes (bb
);
2226 /* Remove edges to BB's successors. */
2227 while (EDGE_COUNT (bb
->succs
) > 0)
2228 remove_edge (EDGE_SUCC (bb
, 0));
2232 /* Remove statements of basic block BB. */
2235 remove_bb (basic_block bb
)
2237 gimple_stmt_iterator i
;
2241 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2242 if (dump_flags
& TDF_DETAILS
)
2244 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2245 fprintf (dump_file
, "\n");
2251 class loop
*loop
= bb
->loop_father
;
2253 /* If a loop gets removed, clean up the information associated
2255 if (loop
->latch
== bb
2256 || loop
->header
== bb
)
2257 free_numbers_of_iterations_estimates (loop
);
2260 /* Remove all the instructions in the block. */
2261 if (bb_seq (bb
) != NULL
)
2263 /* Walk backwards so as to get a chance to substitute all
2264 released DEFs into debug stmts. See
2265 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2267 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2269 gimple
*stmt
= gsi_stmt (i
);
2270 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2272 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2273 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2276 gimple_stmt_iterator new_gsi
;
2278 /* A non-reachable non-local label may still be referenced.
2279 But it no longer needs to carry the extra semantics of
2281 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2283 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2284 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2287 new_bb
= bb
->prev_bb
;
2288 /* Don't move any labels into ENTRY block. */
2289 if (new_bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
2291 new_bb
= single_succ (new_bb
);
2292 gcc_assert (new_bb
!= bb
);
2294 new_gsi
= gsi_after_labels (new_bb
);
2295 gsi_remove (&i
, false);
2296 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2300 /* Release SSA definitions. */
2301 release_defs (stmt
);
2302 gsi_remove (&i
, true);
2306 i
= gsi_last_bb (bb
);
2312 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2313 bb
->il
.gimple
.seq
= NULL
;
2314 bb
->il
.gimple
.phi_nodes
= NULL
;
2318 /* Given a basic block BB and a value VAL for use in the final statement
2319 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2320 the edge that will be taken out of the block.
2321 If VAL is NULL_TREE, then the current value of the final statement's
2322 predicate or index is used.
2323 If the value does not match a unique edge, NULL is returned. */
2326 find_taken_edge (basic_block bb
, tree val
)
2330 stmt
= last_stmt (bb
);
2332 /* Handle ENTRY and EXIT. */
2336 if (gimple_code (stmt
) == GIMPLE_COND
)
2337 return find_taken_edge_cond_expr (as_a
<gcond
*> (stmt
), val
);
2339 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2340 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), val
);
2342 if (computed_goto_p (stmt
))
2344 /* Only optimize if the argument is a label, if the argument is
2345 not a label then we cannot construct a proper CFG.
2347 It may be the case that we only need to allow the LABEL_REF to
2348 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2349 appear inside a LABEL_EXPR just to be safe. */
2351 && (TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2352 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2353 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2356 /* Otherwise we only know the taken successor edge if it's unique. */
2357 return single_succ_p (bb
) ? single_succ_edge (bb
) : NULL
;
2360 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2361 statement, determine which of the outgoing edges will be taken out of the
2362 block. Return NULL if either edge may be taken. */
2365 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2370 dest
= label_to_block (cfun
, val
);
2372 e
= find_edge (bb
, dest
);
2374 /* It's possible for find_edge to return NULL here on invalid code
2375 that abuses the labels-as-values extension (e.g. code that attempts to
2376 jump *between* functions via stored labels-as-values; PR 84136).
2377 If so, then we simply return that NULL for the edge.
2378 We don't currently have a way of detecting such invalid code, so we
2379 can't assert that it was the case when a NULL edge occurs here. */
2384 /* Given COND_STMT and a constant value VAL for use as the predicate,
2385 determine which of the two edges will be taken out of
2386 the statement's block. Return NULL if either edge may be taken.
2387 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2391 find_taken_edge_cond_expr (const gcond
*cond_stmt
, tree val
)
2393 edge true_edge
, false_edge
;
2395 if (val
== NULL_TREE
)
2397 /* Use the current value of the predicate. */
2398 if (gimple_cond_true_p (cond_stmt
))
2399 val
= integer_one_node
;
2400 else if (gimple_cond_false_p (cond_stmt
))
2401 val
= integer_zero_node
;
2405 else if (TREE_CODE (val
) != INTEGER_CST
)
2408 extract_true_false_edges_from_block (gimple_bb (cond_stmt
),
2409 &true_edge
, &false_edge
);
2411 return (integer_zerop (val
) ? false_edge
: true_edge
);
2414 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2415 which edge will be taken out of the statement's block. Return NULL if any
2417 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2421 find_taken_edge_switch_expr (const gswitch
*switch_stmt
, tree val
)
2423 basic_block dest_bb
;
2427 if (gimple_switch_num_labels (switch_stmt
) == 1)
2428 taken_case
= gimple_switch_default_label (switch_stmt
);
2431 if (val
== NULL_TREE
)
2432 val
= gimple_switch_index (switch_stmt
);
2433 if (TREE_CODE (val
) != INTEGER_CST
)
2436 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2438 dest_bb
= label_to_block (cfun
, CASE_LABEL (taken_case
));
2440 e
= find_edge (gimple_bb (switch_stmt
), dest_bb
);
2446 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2447 We can make optimal use here of the fact that the case labels are
2448 sorted: We can do a binary search for a case matching VAL. */
2451 find_case_label_for_value (const gswitch
*switch_stmt
, tree val
)
2453 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2454 tree default_case
= gimple_switch_default_label (switch_stmt
);
2456 for (low
= 0, high
= n
; high
- low
> 1; )
2458 size_t i
= (high
+ low
) / 2;
2459 tree t
= gimple_switch_label (switch_stmt
, i
);
2462 /* Cache the result of comparing CASE_LOW and val. */
2463 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2470 if (CASE_HIGH (t
) == NULL
)
2472 /* A singe-valued case label. */
2478 /* A case range. We can only handle integer ranges. */
2479 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2484 return default_case
;
2488 /* Dump a basic block on stderr. */
2491 gimple_debug_bb (basic_block bb
)
2493 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2497 /* Dump basic block with index N on stderr. */
2500 gimple_debug_bb_n (int n
)
2502 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2503 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2507 /* Dump the CFG on stderr.
2509 FLAGS are the same used by the tree dumping functions
2510 (see TDF_* in dumpfile.h). */
2513 gimple_debug_cfg (dump_flags_t flags
)
2515 gimple_dump_cfg (stderr
, flags
);
2519 /* Dump the program showing basic block boundaries on the given FILE.
2521 FLAGS are the same used by the tree dumping functions (see TDF_* in
2525 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2527 if (flags
& TDF_DETAILS
)
2529 dump_function_header (file
, current_function_decl
, flags
);
2530 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2531 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2532 last_basic_block_for_fn (cfun
));
2534 brief_dump_cfg (file
, flags
);
2535 fprintf (file
, "\n");
2538 if (flags
& TDF_STATS
)
2539 dump_cfg_stats (file
);
2541 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2545 /* Dump CFG statistics on FILE. */
2548 dump_cfg_stats (FILE *file
)
2550 static long max_num_merged_labels
= 0;
2551 unsigned long size
, total
= 0;
2554 const char * const fmt_str
= "%-30s%-13s%12s\n";
2555 const char * const fmt_str_1
= "%-30s%13d" PRsa (11) "\n";
2556 const char * const fmt_str_2
= "%-30s%13ld" PRsa (11) "\n";
2557 const char * const fmt_str_3
= "%-43s" PRsa (11) "\n";
2558 const char *funcname
= current_function_name ();
2560 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2562 fprintf (file
, "---------------------------------------------------------\n");
2563 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2564 fprintf (file
, fmt_str
, "", " instances ", "used ");
2565 fprintf (file
, "---------------------------------------------------------\n");
2567 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2569 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2570 SIZE_AMOUNT (size
));
2573 FOR_EACH_BB_FN (bb
, cfun
)
2574 num_edges
+= EDGE_COUNT (bb
->succs
);
2575 size
= num_edges
* sizeof (class edge_def
);
2577 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SIZE_AMOUNT (size
));
2579 fprintf (file
, "---------------------------------------------------------\n");
2580 fprintf (file
, fmt_str_3
, "Total memory used by CFG data",
2581 SIZE_AMOUNT (total
));
2582 fprintf (file
, "---------------------------------------------------------\n");
2583 fprintf (file
, "\n");
2585 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2586 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2588 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2589 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2591 fprintf (file
, "\n");
2595 /* Dump CFG statistics on stderr. Keep extern so that it's always
2596 linked in the final executable. */
2599 debug_cfg_stats (void)
2601 dump_cfg_stats (stderr
);
2604 /*---------------------------------------------------------------------------
2605 Miscellaneous helpers
2606 ---------------------------------------------------------------------------*/
2608 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2609 flow. Transfers of control flow associated with EH are excluded. */
2612 call_can_make_abnormal_goto (gimple
*t
)
2614 /* If the function has no non-local labels, then a call cannot make an
2615 abnormal transfer of control. */
2616 if (!cfun
->has_nonlocal_label
2617 && !cfun
->calls_setjmp
)
2620 /* Likewise if the call has no side effects. */
2621 if (!gimple_has_side_effects (t
))
2624 /* Likewise if the called function is leaf. */
2625 if (gimple_call_flags (t
) & ECF_LEAF
)
2632 /* Return true if T can make an abnormal transfer of control flow.
2633 Transfers of control flow associated with EH are excluded. */
2636 stmt_can_make_abnormal_goto (gimple
*t
)
2638 if (computed_goto_p (t
))
2640 if (is_gimple_call (t
))
2641 return call_can_make_abnormal_goto (t
);
2646 /* Return true if T represents a stmt that always transfers control. */
2649 is_ctrl_stmt (gimple
*t
)
2651 switch (gimple_code (t
))
2665 /* Return true if T is a statement that may alter the flow of control
2666 (e.g., a call to a non-returning function). */
2669 is_ctrl_altering_stmt (gimple
*t
)
2673 switch (gimple_code (t
))
2676 /* Per stmt call flag indicates whether the call could alter
2678 if (gimple_call_ctrl_altering_p (t
))
2682 case GIMPLE_EH_DISPATCH
:
2683 /* EH_DISPATCH branches to the individual catch handlers at
2684 this level of a try or allowed-exceptions region. It can
2685 fallthru to the next statement as well. */
2689 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2694 /* OpenMP directives alter control flow. */
2697 case GIMPLE_TRANSACTION
:
2698 /* A transaction start alters control flow. */
2705 /* If a statement can throw, it alters control flow. */
2706 return stmt_can_throw_internal (cfun
, t
);
2710 /* Return true if T is a simple local goto. */
2713 simple_goto_p (gimple
*t
)
2715 return (gimple_code (t
) == GIMPLE_GOTO
2716 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2720 /* Return true if STMT should start a new basic block. PREV_STMT is
2721 the statement preceding STMT. It is used when STMT is a label or a
2722 case label. Labels should only start a new basic block if their
2723 previous statement wasn't a label. Otherwise, sequence of labels
2724 would generate unnecessary basic blocks that only contain a single
2728 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2733 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2734 any nondebug stmts in the block. We don't want to start another
2735 block in this case: the debug stmt will already have started the
2736 one STMT would start if we weren't outputting debug stmts. */
2737 if (prev_stmt
&& is_gimple_debug (prev_stmt
))
2740 /* Labels start a new basic block only if the preceding statement
2741 wasn't a label of the same type. This prevents the creation of
2742 consecutive blocks that have nothing but a single label. */
2743 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2745 /* Nonlocal and computed GOTO targets always start a new block. */
2746 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2747 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2750 if (glabel
*plabel
= safe_dyn_cast
<glabel
*> (prev_stmt
))
2752 if (DECL_NONLOCAL (gimple_label_label (plabel
))
2753 || !DECL_ARTIFICIAL (gimple_label_label (plabel
)))
2756 cfg_stats
.num_merged_labels
++;
2762 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2764 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2765 /* setjmp acts similar to a nonlocal GOTO target and thus should
2766 start a new block. */
2768 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2770 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2771 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2772 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2773 /* PHI nodes start a new block unless preceeded by a label
2782 /* Return true if T should end a basic block. */
2785 stmt_ends_bb_p (gimple
*t
)
2787 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2790 /* Remove block annotations and other data structures. */
2793 delete_tree_cfg_annotations (struct function
*fn
)
2795 vec_free (label_to_block_map_for_fn (fn
));
2798 /* Return the virtual phi in BB. */
2801 get_virtual_phi (basic_block bb
)
2803 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2807 gphi
*phi
= gsi
.phi ();
2809 if (virtual_operand_p (PHI_RESULT (phi
)))
2816 /* Return the first statement in basic block BB. */
2819 first_stmt (basic_block bb
)
2821 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2822 gimple
*stmt
= NULL
;
2824 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2832 /* Return the first non-label statement in basic block BB. */
2835 first_non_label_stmt (basic_block bb
)
2837 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2838 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2840 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2843 /* Return the last statement in basic block BB. */
2846 last_stmt (basic_block bb
)
2848 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2849 gimple
*stmt
= NULL
;
2851 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2859 /* Return the last statement of an otherwise empty block. Return NULL
2860 if the block is totally empty, or if it contains more than one
2864 last_and_only_stmt (basic_block bb
)
2866 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2867 gimple
*last
, *prev
;
2872 last
= gsi_stmt (i
);
2873 gsi_prev_nondebug (&i
);
2877 /* Empty statements should no longer appear in the instruction stream.
2878 Everything that might have appeared before should be deleted by
2879 remove_useless_stmts, and the optimizers should just gsi_remove
2880 instead of smashing with build_empty_stmt.
2882 Thus the only thing that should appear here in a block containing
2883 one executable statement is a label. */
2884 prev
= gsi_stmt (i
);
2885 if (gimple_code (prev
) == GIMPLE_LABEL
)
2891 /* Returns the basic block after which the new basic block created
2892 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2893 near its "logical" location. This is of most help to humans looking
2894 at debugging dumps. */
2897 split_edge_bb_loc (edge edge_in
)
2899 basic_block dest
= edge_in
->dest
;
2900 basic_block dest_prev
= dest
->prev_bb
;
2904 edge e
= find_edge (dest_prev
, dest
);
2905 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2906 return edge_in
->src
;
2911 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2912 Abort on abnormal edges. */
2915 gimple_split_edge (edge edge_in
)
2917 basic_block new_bb
, after_bb
, dest
;
2920 /* Abnormal edges cannot be split. */
2921 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2923 dest
= edge_in
->dest
;
2925 after_bb
= split_edge_bb_loc (edge_in
);
2927 new_bb
= create_empty_bb (after_bb
);
2928 new_bb
->count
= edge_in
->count ();
2930 /* We want to avoid re-allocating PHIs when we first
2931 add the fallthru edge from new_bb to dest but we also
2932 want to avoid changing PHI argument order when
2933 first redirecting edge_in away from dest. The former
2934 avoids changing PHI argument order by adding them
2935 last and then the redirection swapping it back into
2936 place by means of unordered remove.
2937 So hack around things by temporarily removing all PHIs
2938 from the destination during the edge redirection and then
2939 making sure the edges stay in order. */
2940 gimple_seq saved_phis
= phi_nodes (dest
);
2941 unsigned old_dest_idx
= edge_in
->dest_idx
;
2942 set_phi_nodes (dest
, NULL
);
2943 new_edge
= make_single_succ_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2944 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2945 gcc_assert (e
== edge_in
&& new_edge
->dest_idx
== old_dest_idx
);
2946 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
2947 dest
->il
.gimple
.phi_nodes
= saved_phis
;
2953 /* Verify properties of the address expression T whose base should be
2954 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2957 verify_address (tree t
, bool verify_addressable
)
2960 bool old_side_effects
;
2962 bool new_side_effects
;
2964 old_constant
= TREE_CONSTANT (t
);
2965 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2967 recompute_tree_invariant_for_addr_expr (t
);
2968 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2969 new_constant
= TREE_CONSTANT (t
);
2971 if (old_constant
!= new_constant
)
2973 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2976 if (old_side_effects
!= new_side_effects
)
2978 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2982 tree base
= TREE_OPERAND (t
, 0);
2983 while (handled_component_p (base
))
2984 base
= TREE_OPERAND (base
, 0);
2987 || TREE_CODE (base
) == PARM_DECL
2988 || TREE_CODE (base
) == RESULT_DECL
))
2991 if (verify_addressable
&& !TREE_ADDRESSABLE (base
))
2993 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3001 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3002 Returns true if there is an error, otherwise false. */
3005 verify_types_in_gimple_min_lval (tree expr
)
3009 if (is_gimple_id (expr
))
3012 if (TREE_CODE (expr
) != TARGET_MEM_REF
3013 && TREE_CODE (expr
) != MEM_REF
)
3015 error ("invalid expression for min lvalue");
3019 /* TARGET_MEM_REFs are strange beasts. */
3020 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3023 op
= TREE_OPERAND (expr
, 0);
3024 if (!is_gimple_val (op
))
3026 error ("invalid operand in indirect reference");
3027 debug_generic_stmt (op
);
3030 /* Memory references now generally can involve a value conversion. */
3035 /* Verify if EXPR is a valid GIMPLE reference expression. If
3036 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3037 if there is an error, otherwise false. */
3040 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3042 const char *code_name
= get_tree_code_name (TREE_CODE (expr
));
3044 if (TREE_CODE (expr
) == REALPART_EXPR
3045 || TREE_CODE (expr
) == IMAGPART_EXPR
3046 || TREE_CODE (expr
) == BIT_FIELD_REF
)
3048 tree op
= TREE_OPERAND (expr
, 0);
3049 if (!is_gimple_reg_type (TREE_TYPE (expr
)))
3051 error ("non-scalar %qs", code_name
);
3055 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
3057 tree t1
= TREE_OPERAND (expr
, 1);
3058 tree t2
= TREE_OPERAND (expr
, 2);
3059 poly_uint64 size
, bitpos
;
3060 if (!poly_int_tree_p (t1
, &size
)
3061 || !poly_int_tree_p (t2
, &bitpos
)
3062 || !types_compatible_p (bitsizetype
, TREE_TYPE (t1
))
3063 || !types_compatible_p (bitsizetype
, TREE_TYPE (t2
)))
3065 error ("invalid position or size operand to %qs", code_name
);
3068 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3069 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr
)), size
))
3071 error ("integral result type precision does not match "
3072 "field size of %qs", code_name
);
3075 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3076 && TYPE_MODE (TREE_TYPE (expr
)) != BLKmode
3077 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr
))),
3080 error ("mode size of non-integral result does not "
3081 "match field size of %qs",
3085 if (INTEGRAL_TYPE_P (TREE_TYPE (op
))
3086 && !type_has_mode_precision_p (TREE_TYPE (op
)))
3088 error ("%qs of non-mode-precision operand", code_name
);
3091 if (!AGGREGATE_TYPE_P (TREE_TYPE (op
))
3092 && maybe_gt (size
+ bitpos
,
3093 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op
)))))
3095 error ("position plus size exceeds size of referenced object in "
3101 if ((TREE_CODE (expr
) == REALPART_EXPR
3102 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3103 && !useless_type_conversion_p (TREE_TYPE (expr
),
3104 TREE_TYPE (TREE_TYPE (op
))))
3106 error ("type mismatch in %qs reference", code_name
);
3107 debug_generic_stmt (TREE_TYPE (expr
));
3108 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3114 while (handled_component_p (expr
))
3116 code_name
= get_tree_code_name (TREE_CODE (expr
));
3118 if (TREE_CODE (expr
) == REALPART_EXPR
3119 || TREE_CODE (expr
) == IMAGPART_EXPR
3120 || TREE_CODE (expr
) == BIT_FIELD_REF
)
3122 error ("non-top-level %qs", code_name
);
3126 tree op
= TREE_OPERAND (expr
, 0);
3128 if (TREE_CODE (expr
) == ARRAY_REF
3129 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3131 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3132 || (TREE_OPERAND (expr
, 2)
3133 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3134 || (TREE_OPERAND (expr
, 3)
3135 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3137 error ("invalid operands to %qs", code_name
);
3138 debug_generic_stmt (expr
);
3143 /* Verify if the reference array element types are compatible. */
3144 if (TREE_CODE (expr
) == ARRAY_REF
3145 && !useless_type_conversion_p (TREE_TYPE (expr
),
3146 TREE_TYPE (TREE_TYPE (op
))))
3148 error ("type mismatch in %qs", code_name
);
3149 debug_generic_stmt (TREE_TYPE (expr
));
3150 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3153 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3154 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3155 TREE_TYPE (TREE_TYPE (op
))))
3157 error ("type mismatch in %qs", code_name
);
3158 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3159 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3163 if (TREE_CODE (expr
) == COMPONENT_REF
)
3165 if (TREE_OPERAND (expr
, 2)
3166 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3168 error ("invalid %qs offset operator", code_name
);
3171 if (!useless_type_conversion_p (TREE_TYPE (expr
),
3172 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3174 error ("type mismatch in %qs", code_name
);
3175 debug_generic_stmt (TREE_TYPE (expr
));
3176 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3181 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3183 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3184 that their operand is not an SSA name or an invariant when
3185 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3186 bug). Otherwise there is nothing to verify, gross mismatches at
3187 most invoke undefined behavior. */
3189 && (TREE_CODE (op
) == SSA_NAME
3190 || is_gimple_min_invariant (op
)))
3192 error ("conversion of %qs on the left hand side of %qs",
3193 get_tree_code_name (TREE_CODE (op
)), code_name
);
3194 debug_generic_stmt (expr
);
3197 else if (TREE_CODE (op
) == SSA_NAME
3198 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3200 error ("conversion of register to a different size in %qs",
3202 debug_generic_stmt (expr
);
3205 else if (!handled_component_p (op
))
3212 code_name
= get_tree_code_name (TREE_CODE (expr
));
3214 if (TREE_CODE (expr
) == MEM_REF
)
3216 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0))
3217 || (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
3218 && verify_address (TREE_OPERAND (expr
, 0), false)))
3220 error ("invalid address operand in %qs", code_name
);
3221 debug_generic_stmt (expr
);
3224 if (!poly_int_tree_p (TREE_OPERAND (expr
, 1))
3225 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3227 error ("invalid offset operand in %qs", code_name
);
3228 debug_generic_stmt (expr
);
3231 if (MR_DEPENDENCE_CLIQUE (expr
) != 0
3232 && MR_DEPENDENCE_CLIQUE (expr
) > cfun
->last_clique
)
3234 error ("invalid clique in %qs", code_name
);
3235 debug_generic_stmt (expr
);
3239 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3241 if (!TMR_BASE (expr
)
3242 || !is_gimple_mem_ref_addr (TMR_BASE (expr
))
3243 || (TREE_CODE (TMR_BASE (expr
)) == ADDR_EXPR
3244 && verify_address (TMR_BASE (expr
), false)))
3246 error ("invalid address operand in %qs", code_name
);
3249 if (!TMR_OFFSET (expr
)
3250 || !poly_int_tree_p (TMR_OFFSET (expr
))
3251 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3253 error ("invalid offset operand in %qs", code_name
);
3254 debug_generic_stmt (expr
);
3257 if (MR_DEPENDENCE_CLIQUE (expr
) != 0
3258 && MR_DEPENDENCE_CLIQUE (expr
) > cfun
->last_clique
)
3260 error ("invalid clique in %qs", code_name
);
3261 debug_generic_stmt (expr
);
3265 else if (TREE_CODE (expr
) == INDIRECT_REF
)
3267 error ("%qs in gimple IL", code_name
);
3268 debug_generic_stmt (expr
);
3272 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3273 && verify_types_in_gimple_min_lval (expr
));
3276 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3277 list of pointer-to types that is trivially convertible to DEST. */
3280 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3284 if (!TYPE_POINTER_TO (src_obj
))
3287 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3288 if (useless_type_conversion_p (dest
, src
))
3294 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3295 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3298 valid_fixed_convert_types_p (tree type1
, tree type2
)
3300 return (FIXED_POINT_TYPE_P (type1
)
3301 && (INTEGRAL_TYPE_P (type2
)
3302 || SCALAR_FLOAT_TYPE_P (type2
)
3303 || FIXED_POINT_TYPE_P (type2
)));
3306 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3307 is a problem, otherwise false. */
3310 verify_gimple_call (gcall
*stmt
)
3312 tree fn
= gimple_call_fn (stmt
);
3313 tree fntype
, fndecl
;
3316 if (gimple_call_internal_p (stmt
))
3320 error ("gimple call has two targets");
3321 debug_generic_stmt (fn
);
3329 error ("gimple call has no target");
3334 if (fn
&& !is_gimple_call_addr (fn
))
3336 error ("invalid function in gimple call");
3337 debug_generic_stmt (fn
);
3342 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3343 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3344 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3346 error ("non-function in gimple call");
3350 fndecl
= gimple_call_fndecl (stmt
);
3352 && TREE_CODE (fndecl
) == FUNCTION_DECL
3353 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3354 && !DECL_PURE_P (fndecl
)
3355 && !TREE_READONLY (fndecl
))
3357 error ("invalid pure const state for function");
3361 tree lhs
= gimple_call_lhs (stmt
);
3363 && (!is_gimple_lvalue (lhs
)
3364 || verify_types_in_gimple_reference (lhs
, true)))
3366 error ("invalid LHS in gimple call");
3370 if (gimple_call_ctrl_altering_p (stmt
)
3371 && gimple_call_noreturn_p (stmt
)
3372 && should_remove_lhs_p (lhs
))
3374 error ("LHS in %<noreturn%> call");
3378 fntype
= gimple_call_fntype (stmt
);
3381 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3382 /* ??? At least C++ misses conversions at assignments from
3383 void * call results.
3384 For now simply allow arbitrary pointer type conversions. */
3385 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3386 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3388 error ("invalid conversion in gimple call");
3389 debug_generic_stmt (TREE_TYPE (lhs
));
3390 debug_generic_stmt (TREE_TYPE (fntype
));
3394 if (gimple_call_chain (stmt
)
3395 && !is_gimple_val (gimple_call_chain (stmt
)))
3397 error ("invalid static chain in gimple call");
3398 debug_generic_stmt (gimple_call_chain (stmt
));
3402 /* If there is a static chain argument, the call should either be
3403 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3404 if (gimple_call_chain (stmt
)
3406 && !DECL_STATIC_CHAIN (fndecl
))
3408 error ("static chain with function that doesn%'t use one");
3412 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3414 switch (DECL_FUNCTION_CODE (fndecl
))
3416 case BUILT_IN_UNREACHABLE
:
3418 if (gimple_call_num_args (stmt
) > 0)
3420 /* Built-in unreachable with parameters might not be caught by
3421 undefined behavior sanitizer. Front-ends do check users do not
3422 call them that way but we also produce calls to
3423 __builtin_unreachable internally, for example when IPA figures
3424 out a call cannot happen in a legal program. In such cases,
3425 we must make sure arguments are stripped off. */
3426 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3436 /* ??? The C frontend passes unpromoted arguments in case it
3437 didn't see a function declaration before the call. So for now
3438 leave the call arguments mostly unverified. Once we gimplify
3439 unit-at-a-time we have a chance to fix this. */
3441 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3443 tree arg
= gimple_call_arg (stmt
, i
);
3444 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3445 && !is_gimple_val (arg
))
3446 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3447 && !is_gimple_lvalue (arg
)))
3449 error ("invalid argument to gimple call");
3450 debug_generic_expr (arg
);
3458 /* Verifies the gimple comparison with the result type TYPE and
3459 the operands OP0 and OP1, comparison code is CODE. */
3462 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3464 tree op0_type
= TREE_TYPE (op0
);
3465 tree op1_type
= TREE_TYPE (op1
);
3467 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3469 error ("invalid operands in gimple comparison");
3473 /* For comparisons we do not have the operations type as the
3474 effective type the comparison is carried out in. Instead
3475 we require that either the first operand is trivially
3476 convertible into the second, or the other way around. */
3477 if (!useless_type_conversion_p (op0_type
, op1_type
)
3478 && !useless_type_conversion_p (op1_type
, op0_type
))
3480 error ("mismatching comparison operand types");
3481 debug_generic_expr (op0_type
);
3482 debug_generic_expr (op1_type
);
3486 /* The resulting type of a comparison may be an effective boolean type. */
3487 if (INTEGRAL_TYPE_P (type
)
3488 && (TREE_CODE (type
) == BOOLEAN_TYPE
3489 || TYPE_PRECISION (type
) == 1))
3491 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3492 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3493 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3494 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3495 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3497 error ("unsupported operation or type for vector comparison"
3498 " returning a boolean");
3499 debug_generic_expr (op0_type
);
3500 debug_generic_expr (op1_type
);
3504 /* Or a boolean vector type with the same element count
3505 as the comparison operand types. */
3506 else if (TREE_CODE (type
) == VECTOR_TYPE
3507 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3509 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3510 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3512 error ("non-vector operands in vector comparison");
3513 debug_generic_expr (op0_type
);
3514 debug_generic_expr (op1_type
);
3518 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type
),
3519 TYPE_VECTOR_SUBPARTS (op0_type
)))
3521 error ("invalid vector comparison resulting type");
3522 debug_generic_expr (type
);
3528 error ("bogus comparison result type");
3529 debug_generic_expr (type
);
3536 /* Verify a gimple assignment statement STMT with an unary rhs.
3537 Returns true if anything is wrong. */
3540 verify_gimple_assign_unary (gassign
*stmt
)
3542 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3543 tree lhs
= gimple_assign_lhs (stmt
);
3544 tree lhs_type
= TREE_TYPE (lhs
);
3545 tree rhs1
= gimple_assign_rhs1 (stmt
);
3546 tree rhs1_type
= TREE_TYPE (rhs1
);
3548 if (!is_gimple_reg (lhs
))
3550 error ("non-register as LHS of unary operation");
3554 if (!is_gimple_val (rhs1
))
3556 error ("invalid operand in unary operation");
3560 const char* const code_name
= get_tree_code_name (rhs_code
);
3562 /* First handle conversions. */
3567 /* Allow conversions between vectors with the same number of elements,
3568 provided that the conversion is OK for the element types too. */
3569 if (VECTOR_TYPE_P (lhs_type
)
3570 && VECTOR_TYPE_P (rhs1_type
)
3571 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type
),
3572 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
3574 lhs_type
= TREE_TYPE (lhs_type
);
3575 rhs1_type
= TREE_TYPE (rhs1_type
);
3577 else if (VECTOR_TYPE_P (lhs_type
) || VECTOR_TYPE_P (rhs1_type
))
3579 error ("invalid vector types in nop conversion");
3580 debug_generic_expr (lhs_type
);
3581 debug_generic_expr (rhs1_type
);
3585 /* Allow conversions from pointer type to integral type only if
3586 there is no sign or zero extension involved.
3587 For targets were the precision of ptrofftype doesn't match that
3588 of pointers we allow conversions to types where
3589 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3590 if ((POINTER_TYPE_P (lhs_type
)
3591 && INTEGRAL_TYPE_P (rhs1_type
))
3592 || (POINTER_TYPE_P (rhs1_type
)
3593 && INTEGRAL_TYPE_P (lhs_type
)
3594 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3595 #if defined(POINTERS_EXTEND_UNSIGNED)
3596 || (TYPE_MODE (rhs1_type
) == ptr_mode
3597 && (TYPE_PRECISION (lhs_type
)
3598 == BITS_PER_WORD
/* word_mode */
3599 || (TYPE_PRECISION (lhs_type
)
3600 == GET_MODE_PRECISION (Pmode
))))
3605 /* Allow conversion from integral to offset type and vice versa. */
3606 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3607 && INTEGRAL_TYPE_P (rhs1_type
))
3608 || (INTEGRAL_TYPE_P (lhs_type
)
3609 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3612 /* Otherwise assert we are converting between types of the
3614 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3616 error ("invalid types in nop conversion");
3617 debug_generic_expr (lhs_type
);
3618 debug_generic_expr (rhs1_type
);
3625 case ADDR_SPACE_CONVERT_EXPR
:
3627 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3628 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3629 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3631 error ("invalid types in address space conversion");
3632 debug_generic_expr (lhs_type
);
3633 debug_generic_expr (rhs1_type
);
3640 case FIXED_CONVERT_EXPR
:
3642 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3643 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3645 error ("invalid types in fixed-point conversion");
3646 debug_generic_expr (lhs_type
);
3647 debug_generic_expr (rhs1_type
);
3656 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3657 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3658 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3660 error ("invalid types in conversion to floating-point");
3661 debug_generic_expr (lhs_type
);
3662 debug_generic_expr (rhs1_type
);
3669 case FIX_TRUNC_EXPR
:
3671 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3672 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3673 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3675 error ("invalid types in conversion to integer");
3676 debug_generic_expr (lhs_type
);
3677 debug_generic_expr (rhs1_type
);
3684 case VEC_UNPACK_HI_EXPR
:
3685 case VEC_UNPACK_LO_EXPR
:
3686 case VEC_UNPACK_FLOAT_HI_EXPR
:
3687 case VEC_UNPACK_FLOAT_LO_EXPR
:
3688 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
3689 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
3690 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3691 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3692 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3693 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
)))
3694 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3695 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3696 || ((rhs_code
== VEC_UNPACK_HI_EXPR
3697 || rhs_code
== VEC_UNPACK_LO_EXPR
)
3698 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3699 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3700 || ((rhs_code
== VEC_UNPACK_FLOAT_HI_EXPR
3701 || rhs_code
== VEC_UNPACK_FLOAT_LO_EXPR
)
3702 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3703 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))))
3704 || ((rhs_code
== VEC_UNPACK_FIX_TRUNC_HI_EXPR
3705 || rhs_code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
)
3706 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3707 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
))))
3708 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type
)),
3709 2 * GET_MODE_SIZE (element_mode (rhs1_type
)))
3710 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type
)
3711 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type
)))
3712 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type
),
3713 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
3715 error ("type mismatch in %qs expression", code_name
);
3716 debug_generic_expr (lhs_type
);
3717 debug_generic_expr (rhs1_type
);
3731 if (!ANY_INTEGRAL_TYPE_P (lhs_type
)
3732 || !TYPE_UNSIGNED (lhs_type
)
3733 || !ANY_INTEGRAL_TYPE_P (rhs1_type
)
3734 || TYPE_UNSIGNED (rhs1_type
)
3735 || element_precision (lhs_type
) != element_precision (rhs1_type
))
3737 error ("invalid types for %qs", code_name
);
3738 debug_generic_expr (lhs_type
);
3739 debug_generic_expr (rhs1_type
);
3744 case VEC_DUPLICATE_EXPR
:
3745 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
3746 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
3748 error ("%qs should be from a scalar to a like vector", code_name
);
3749 debug_generic_expr (lhs_type
);
3750 debug_generic_expr (rhs1_type
);
3759 /* For the remaining codes assert there is no conversion involved. */
3760 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3762 error ("non-trivial conversion in unary operation");
3763 debug_generic_expr (lhs_type
);
3764 debug_generic_expr (rhs1_type
);
3771 /* Verify a gimple assignment statement STMT with a binary rhs.
3772 Returns true if anything is wrong. */
3775 verify_gimple_assign_binary (gassign
*stmt
)
3777 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3778 tree lhs
= gimple_assign_lhs (stmt
);
3779 tree lhs_type
= TREE_TYPE (lhs
);
3780 tree rhs1
= gimple_assign_rhs1 (stmt
);
3781 tree rhs1_type
= TREE_TYPE (rhs1
);
3782 tree rhs2
= gimple_assign_rhs2 (stmt
);
3783 tree rhs2_type
= TREE_TYPE (rhs2
);
3785 if (!is_gimple_reg (lhs
))
3787 error ("non-register as LHS of binary operation");
3791 if (!is_gimple_val (rhs1
)
3792 || !is_gimple_val (rhs2
))
3794 error ("invalid operands in binary operation");
3798 const char* const code_name
= get_tree_code_name (rhs_code
);
3800 /* First handle operations that involve different types. */
3805 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3806 || !(INTEGRAL_TYPE_P (rhs1_type
)
3807 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3808 || !(INTEGRAL_TYPE_P (rhs2_type
)
3809 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3811 error ("type mismatch in %qs", code_name
);
3812 debug_generic_expr (lhs_type
);
3813 debug_generic_expr (rhs1_type
);
3814 debug_generic_expr (rhs2_type
);
3826 /* Shifts and rotates are ok on integral types, fixed point
3827 types and integer vector types. */
3828 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3829 && !FIXED_POINT_TYPE_P (rhs1_type
)
3830 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3831 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3832 || (!INTEGRAL_TYPE_P (rhs2_type
)
3833 /* Vector shifts of vectors are also ok. */
3834 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3835 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3836 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3837 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3838 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3840 error ("type mismatch in %qs", code_name
);
3841 debug_generic_expr (lhs_type
);
3842 debug_generic_expr (rhs1_type
);
3843 debug_generic_expr (rhs2_type
);
3850 case WIDEN_LSHIFT_EXPR
:
3852 if (!INTEGRAL_TYPE_P (lhs_type
)
3853 || !INTEGRAL_TYPE_P (rhs1_type
)
3854 || TREE_CODE (rhs2
) != INTEGER_CST
3855 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3857 error ("type mismatch in %qs", code_name
);
3858 debug_generic_expr (lhs_type
);
3859 debug_generic_expr (rhs1_type
);
3860 debug_generic_expr (rhs2_type
);
3867 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3868 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3870 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3871 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3872 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3873 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3874 || TREE_CODE (rhs2
) != INTEGER_CST
3875 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3876 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3878 error ("type mismatch in %qs", code_name
);
3879 debug_generic_expr (lhs_type
);
3880 debug_generic_expr (rhs1_type
);
3881 debug_generic_expr (rhs2_type
);
3888 case WIDEN_PLUS_EXPR
:
3889 case WIDEN_MINUS_EXPR
:
3893 tree lhs_etype
= lhs_type
;
3894 tree rhs1_etype
= rhs1_type
;
3895 tree rhs2_etype
= rhs2_type
;
3896 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3898 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3899 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3901 error ("invalid non-vector operands to %qs", code_name
);
3904 lhs_etype
= TREE_TYPE (lhs_type
);
3905 rhs1_etype
= TREE_TYPE (rhs1_type
);
3906 rhs2_etype
= TREE_TYPE (rhs2_type
);
3908 if (POINTER_TYPE_P (lhs_etype
)
3909 || POINTER_TYPE_P (rhs1_etype
)
3910 || POINTER_TYPE_P (rhs2_etype
))
3912 error ("invalid (pointer) operands %qs", code_name
);
3916 /* Continue with generic binary expression handling. */
3920 case POINTER_PLUS_EXPR
:
3922 if (!POINTER_TYPE_P (rhs1_type
)
3923 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3924 || !ptrofftype_p (rhs2_type
))
3926 error ("type mismatch in %qs", code_name
);
3927 debug_generic_stmt (lhs_type
);
3928 debug_generic_stmt (rhs1_type
);
3929 debug_generic_stmt (rhs2_type
);
3936 case POINTER_DIFF_EXPR
:
3938 if (!POINTER_TYPE_P (rhs1_type
)
3939 || !POINTER_TYPE_P (rhs2_type
)
3940 /* Because we special-case pointers to void we allow difference
3941 of arbitrary pointers with the same mode. */
3942 || TYPE_MODE (rhs1_type
) != TYPE_MODE (rhs2_type
)
3943 || TREE_CODE (lhs_type
) != INTEGER_TYPE
3944 || TYPE_UNSIGNED (lhs_type
)
3945 || TYPE_PRECISION (lhs_type
) != TYPE_PRECISION (rhs1_type
))
3947 error ("type mismatch in %qs", code_name
);
3948 debug_generic_stmt (lhs_type
);
3949 debug_generic_stmt (rhs1_type
);
3950 debug_generic_stmt (rhs2_type
);
3957 case TRUTH_ANDIF_EXPR
:
3958 case TRUTH_ORIF_EXPR
:
3959 case TRUTH_AND_EXPR
:
3961 case TRUTH_XOR_EXPR
:
3971 case UNORDERED_EXPR
:
3979 /* Comparisons are also binary, but the result type is not
3980 connected to the operand types. */
3981 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
3983 case WIDEN_MULT_EXPR
:
3984 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3986 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3987 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3989 case WIDEN_SUM_EXPR
:
3991 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
3992 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
3993 && ((!INTEGRAL_TYPE_P (rhs1_type
)
3994 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3995 || (!INTEGRAL_TYPE_P (lhs_type
)
3996 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
3997 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3998 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type
)),
3999 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4001 error ("type mismatch in %qs", code_name
);
4002 debug_generic_expr (lhs_type
);
4003 debug_generic_expr (rhs1_type
);
4004 debug_generic_expr (rhs2_type
);
4010 case VEC_WIDEN_MINUS_HI_EXPR
:
4011 case VEC_WIDEN_MINUS_LO_EXPR
:
4012 case VEC_WIDEN_PLUS_HI_EXPR
:
4013 case VEC_WIDEN_PLUS_LO_EXPR
:
4014 case VEC_WIDEN_MULT_HI_EXPR
:
4015 case VEC_WIDEN_MULT_LO_EXPR
:
4016 case VEC_WIDEN_MULT_EVEN_EXPR
:
4017 case VEC_WIDEN_MULT_ODD_EXPR
:
4019 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4020 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4021 || !types_compatible_p (rhs1_type
, rhs2_type
)
4022 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type
)),
4023 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4025 error ("type mismatch in %qs", code_name
);
4026 debug_generic_expr (lhs_type
);
4027 debug_generic_expr (rhs1_type
);
4028 debug_generic_expr (rhs2_type
);
4034 case VEC_PACK_TRUNC_EXPR
:
4035 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4036 vector boolean types. */
4037 if (VECTOR_BOOLEAN_TYPE_P (lhs_type
)
4038 && VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4039 && types_compatible_p (rhs1_type
, rhs2_type
)
4040 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type
),
4041 2 * TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4045 case VEC_PACK_SAT_EXPR
:
4046 case VEC_PACK_FIX_TRUNC_EXPR
:
4048 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4049 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4050 || !((rhs_code
== VEC_PACK_FIX_TRUNC_EXPR
4051 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))
4052 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
)))
4053 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4054 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))))
4055 || !types_compatible_p (rhs1_type
, rhs2_type
)
4056 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type
)),
4057 2 * GET_MODE_SIZE (element_mode (lhs_type
)))
4058 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type
),
4059 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4061 error ("type mismatch in %qs", code_name
);
4062 debug_generic_expr (lhs_type
);
4063 debug_generic_expr (rhs1_type
);
4064 debug_generic_expr (rhs2_type
);
4071 case VEC_PACK_FLOAT_EXPR
:
4072 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4073 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4074 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4075 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type
))
4076 || !types_compatible_p (rhs1_type
, rhs2_type
)
4077 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type
)),
4078 2 * GET_MODE_SIZE (element_mode (lhs_type
)))
4079 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type
),
4080 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4082 error ("type mismatch in %qs", code_name
);
4083 debug_generic_expr (lhs_type
);
4084 debug_generic_expr (rhs1_type
);
4085 debug_generic_expr (rhs2_type
);
4092 case MULT_HIGHPART_EXPR
:
4093 case TRUNC_DIV_EXPR
:
4095 case FLOOR_DIV_EXPR
:
4096 case ROUND_DIV_EXPR
:
4097 case TRUNC_MOD_EXPR
:
4099 case FLOOR_MOD_EXPR
:
4100 case ROUND_MOD_EXPR
:
4102 case EXACT_DIV_EXPR
:
4108 /* Continue with generic binary expression handling. */
4111 case VEC_SERIES_EXPR
:
4112 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
))
4114 error ("type mismatch in %qs", code_name
);
4115 debug_generic_expr (rhs1_type
);
4116 debug_generic_expr (rhs2_type
);
4119 if (TREE_CODE (lhs_type
) != VECTOR_TYPE
4120 || !useless_type_conversion_p (TREE_TYPE (lhs_type
), rhs1_type
))
4122 error ("vector type expected in %qs", code_name
);
4123 debug_generic_expr (lhs_type
);
4132 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4133 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4135 error ("type mismatch in binary expression");
4136 debug_generic_stmt (lhs_type
);
4137 debug_generic_stmt (rhs1_type
);
4138 debug_generic_stmt (rhs2_type
);
4145 /* Verify a gimple assignment statement STMT with a ternary rhs.
4146 Returns true if anything is wrong. */
4149 verify_gimple_assign_ternary (gassign
*stmt
)
4151 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4152 tree lhs
= gimple_assign_lhs (stmt
);
4153 tree lhs_type
= TREE_TYPE (lhs
);
4154 tree rhs1
= gimple_assign_rhs1 (stmt
);
4155 tree rhs1_type
= TREE_TYPE (rhs1
);
4156 tree rhs2
= gimple_assign_rhs2 (stmt
);
4157 tree rhs2_type
= TREE_TYPE (rhs2
);
4158 tree rhs3
= gimple_assign_rhs3 (stmt
);
4159 tree rhs3_type
= TREE_TYPE (rhs3
);
4161 if (!is_gimple_reg (lhs
))
4163 error ("non-register as LHS of ternary operation");
4167 if ((rhs_code
== COND_EXPR
4168 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
4169 || !is_gimple_val (rhs2
)
4170 || !is_gimple_val (rhs3
))
4172 error ("invalid operands in ternary operation");
4176 const char* const code_name
= get_tree_code_name (rhs_code
);
4178 /* First handle operations that involve different types. */
4181 case WIDEN_MULT_PLUS_EXPR
:
4182 case WIDEN_MULT_MINUS_EXPR
:
4183 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4184 && !FIXED_POINT_TYPE_P (rhs1_type
))
4185 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4186 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4187 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4188 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4190 error ("type mismatch in %qs", code_name
);
4191 debug_generic_expr (lhs_type
);
4192 debug_generic_expr (rhs1_type
);
4193 debug_generic_expr (rhs2_type
);
4194 debug_generic_expr (rhs3_type
);
4200 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4201 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4202 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4204 error ("the first argument of a %qs must be of a "
4205 "boolean vector type of the same number of elements "
4206 "as the result", code_name
);
4207 debug_generic_expr (lhs_type
);
4208 debug_generic_expr (rhs1_type
);
4213 if (!is_gimple_val (rhs1
)
4214 && verify_gimple_comparison (TREE_TYPE (rhs1
),
4215 TREE_OPERAND (rhs1
, 0),
4216 TREE_OPERAND (rhs1
, 1),
4219 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4220 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4222 error ("type mismatch in %qs", code_name
);
4223 debug_generic_expr (lhs_type
);
4224 debug_generic_expr (rhs2_type
);
4225 debug_generic_expr (rhs3_type
);
4231 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4232 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4234 error ("type mismatch in %qs", code_name
);
4235 debug_generic_expr (lhs_type
);
4236 debug_generic_expr (rhs1_type
);
4237 debug_generic_expr (rhs2_type
);
4238 debug_generic_expr (rhs3_type
);
4242 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4243 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4244 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4246 error ("vector types expected in %qs", code_name
);
4247 debug_generic_expr (lhs_type
);
4248 debug_generic_expr (rhs1_type
);
4249 debug_generic_expr (rhs2_type
);
4250 debug_generic_expr (rhs3_type
);
4254 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4255 TYPE_VECTOR_SUBPARTS (rhs2_type
))
4256 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type
),
4257 TYPE_VECTOR_SUBPARTS (rhs3_type
))
4258 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type
),
4259 TYPE_VECTOR_SUBPARTS (lhs_type
)))
4261 error ("vectors with different element number found in %qs",
4263 debug_generic_expr (lhs_type
);
4264 debug_generic_expr (rhs1_type
);
4265 debug_generic_expr (rhs2_type
);
4266 debug_generic_expr (rhs3_type
);
4270 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4271 || (TREE_CODE (rhs3
) != VECTOR_CST
4272 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4273 (TREE_TYPE (rhs3_type
)))
4274 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4275 (TREE_TYPE (rhs1_type
))))))
4277 error ("invalid mask type in %qs", code_name
);
4278 debug_generic_expr (lhs_type
);
4279 debug_generic_expr (rhs1_type
);
4280 debug_generic_expr (rhs2_type
);
4281 debug_generic_expr (rhs3_type
);
4288 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4289 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4290 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4291 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4293 error ("type mismatch in %qs", code_name
);
4294 debug_generic_expr (lhs_type
);
4295 debug_generic_expr (rhs1_type
);
4296 debug_generic_expr (rhs2_type
);
4297 debug_generic_expr (rhs3_type
);
4301 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4302 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4303 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4305 error ("vector types expected in %qs", code_name
);
4306 debug_generic_expr (lhs_type
);
4307 debug_generic_expr (rhs1_type
);
4308 debug_generic_expr (rhs2_type
);
4309 debug_generic_expr (rhs3_type
);
4315 case BIT_INSERT_EXPR
:
4316 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4318 error ("type mismatch in %qs", code_name
);
4319 debug_generic_expr (lhs_type
);
4320 debug_generic_expr (rhs1_type
);
4323 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4324 && INTEGRAL_TYPE_P (rhs2_type
))
4325 /* Vector element insert. */
4326 || (VECTOR_TYPE_P (rhs1_type
)
4327 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))
4328 /* Aligned sub-vector insert. */
4329 || (VECTOR_TYPE_P (rhs1_type
)
4330 && VECTOR_TYPE_P (rhs2_type
)
4331 && types_compatible_p (TREE_TYPE (rhs1_type
),
4332 TREE_TYPE (rhs2_type
))
4333 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type
),
4334 TYPE_VECTOR_SUBPARTS (rhs2_type
))
4335 && multiple_of_p (bitsizetype
, rhs3
, TYPE_SIZE (rhs2_type
)))))
4337 error ("not allowed type combination in %qs", code_name
);
4338 debug_generic_expr (rhs1_type
);
4339 debug_generic_expr (rhs2_type
);
4342 if (! tree_fits_uhwi_p (rhs3
)
4343 || ! types_compatible_p (bitsizetype
, TREE_TYPE (rhs3
))
4344 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4346 error ("invalid position or size in %qs", code_name
);
4349 if (INTEGRAL_TYPE_P (rhs1_type
)
4350 && !type_has_mode_precision_p (rhs1_type
))
4352 error ("%qs into non-mode-precision operand", code_name
);
4355 if (INTEGRAL_TYPE_P (rhs1_type
))
4357 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4358 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4359 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4360 > TYPE_PRECISION (rhs1_type
)))
4362 error ("insertion out of range in %qs", code_name
);
4366 else if (VECTOR_TYPE_P (rhs1_type
))
4368 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4369 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4370 if (bitpos
% bitsize
!= 0)
4372 error ("%qs not at element boundary", code_name
);
4380 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4381 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4382 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4383 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4384 || (!INTEGRAL_TYPE_P (lhs_type
)
4385 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4386 || !types_compatible_p (rhs1_type
, rhs2_type
)
4387 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4388 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type
)),
4389 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4391 error ("type mismatch in %qs", code_name
);
4392 debug_generic_expr (lhs_type
);
4393 debug_generic_expr (rhs1_type
);
4394 debug_generic_expr (rhs2_type
);
4400 case REALIGN_LOAD_EXPR
:
4410 /* Verify a gimple assignment statement STMT with a single rhs.
4411 Returns true if anything is wrong. */
4414 verify_gimple_assign_single (gassign
*stmt
)
4416 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4417 tree lhs
= gimple_assign_lhs (stmt
);
4418 tree lhs_type
= TREE_TYPE (lhs
);
4419 tree rhs1
= gimple_assign_rhs1 (stmt
);
4420 tree rhs1_type
= TREE_TYPE (rhs1
);
4423 const char* const code_name
= get_tree_code_name (rhs_code
);
4425 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4427 error ("non-trivial conversion in %qs", code_name
);
4428 debug_generic_expr (lhs_type
);
4429 debug_generic_expr (rhs1_type
);
4433 if (gimple_clobber_p (stmt
)
4434 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4436 error ("%qs LHS in clobber statement",
4437 get_tree_code_name (TREE_CODE (lhs
)));
4438 debug_generic_expr (lhs
);
4442 if (handled_component_p (lhs
)
4443 || TREE_CODE (lhs
) == MEM_REF
4444 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4445 res
|= verify_types_in_gimple_reference (lhs
, true);
4447 /* Special codes we cannot handle via their class. */
4452 tree op
= TREE_OPERAND (rhs1
, 0);
4453 if (!is_gimple_addressable (op
))
4455 error ("invalid operand in %qs", code_name
);
4459 /* Technically there is no longer a need for matching types, but
4460 gimple hygiene asks for this check. In LTO we can end up
4461 combining incompatible units and thus end up with addresses
4462 of globals that change their type to a common one. */
4464 && !types_compatible_p (TREE_TYPE (op
),
4465 TREE_TYPE (TREE_TYPE (rhs1
)))
4466 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4469 error ("type mismatch in %qs", code_name
);
4470 debug_generic_stmt (TREE_TYPE (rhs1
));
4471 debug_generic_stmt (TREE_TYPE (op
));
4475 return (verify_address (rhs1
, true)
4476 || verify_types_in_gimple_reference (op
, true));
4481 error ("%qs in gimple IL", code_name
);
4487 case ARRAY_RANGE_REF
:
4488 case VIEW_CONVERT_EXPR
:
4491 case TARGET_MEM_REF
:
4493 if (!is_gimple_reg (lhs
)
4494 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4496 error ("invalid RHS for gimple memory store: %qs", code_name
);
4497 debug_generic_stmt (lhs
);
4498 debug_generic_stmt (rhs1
);
4501 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4513 /* tcc_declaration */
4518 if (!is_gimple_reg (lhs
)
4519 && !is_gimple_reg (rhs1
)
4520 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4522 error ("invalid RHS for gimple memory store: %qs", code_name
);
4523 debug_generic_stmt (lhs
);
4524 debug_generic_stmt (rhs1
);
4530 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4533 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4535 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4537 /* For vector CONSTRUCTORs we require that either it is empty
4538 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4539 (then the element count must be correct to cover the whole
4540 outer vector and index must be NULL on all elements, or it is
4541 a CONSTRUCTOR of scalar elements, where we as an exception allow
4542 smaller number of elements (assuming zero filling) and
4543 consecutive indexes as compared to NULL indexes (such
4544 CONSTRUCTORs can appear in the IL from FEs). */
4545 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4547 if (elt_t
== NULL_TREE
)
4549 elt_t
= TREE_TYPE (elt_v
);
4550 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4552 tree elt_t
= TREE_TYPE (elt_v
);
4553 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4556 error ("incorrect type of vector %qs elements",
4558 debug_generic_stmt (rhs1
);
4561 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1
)
4562 * TYPE_VECTOR_SUBPARTS (elt_t
),
4563 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4565 error ("incorrect number of vector %qs elements",
4567 debug_generic_stmt (rhs1
);
4571 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4574 error ("incorrect type of vector %qs elements",
4576 debug_generic_stmt (rhs1
);
4579 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1
),
4580 TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4582 error ("incorrect number of vector %qs elements",
4584 debug_generic_stmt (rhs1
);
4588 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4590 error ("incorrect type of vector CONSTRUCTOR elements");
4591 debug_generic_stmt (rhs1
);
4594 if (elt_i
!= NULL_TREE
4595 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4596 || TREE_CODE (elt_i
) != INTEGER_CST
4597 || compare_tree_int (elt_i
, i
) != 0))
4599 error ("vector %qs with non-NULL element index",
4601 debug_generic_stmt (rhs1
);
4604 if (!is_gimple_val (elt_v
))
4606 error ("vector %qs element is not a GIMPLE value",
4608 debug_generic_stmt (rhs1
);
4613 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4615 error ("non-vector %qs with elements", code_name
);
4616 debug_generic_stmt (rhs1
);
4623 rhs1
= fold (ASSERT_EXPR_COND (rhs1
));
4624 if (rhs1
== boolean_false_node
)
4626 error ("%qs with an always-false condition", code_name
);
4627 debug_generic_stmt (rhs1
);
4633 case WITH_SIZE_EXPR
:
4643 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4644 is a problem, otherwise false. */
4647 verify_gimple_assign (gassign
*stmt
)
4649 switch (gimple_assign_rhs_class (stmt
))
4651 case GIMPLE_SINGLE_RHS
:
4652 return verify_gimple_assign_single (stmt
);
4654 case GIMPLE_UNARY_RHS
:
4655 return verify_gimple_assign_unary (stmt
);
4657 case GIMPLE_BINARY_RHS
:
4658 return verify_gimple_assign_binary (stmt
);
4660 case GIMPLE_TERNARY_RHS
:
4661 return verify_gimple_assign_ternary (stmt
);
4668 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4669 is a problem, otherwise false. */
4672 verify_gimple_return (greturn
*stmt
)
4674 tree op
= gimple_return_retval (stmt
);
4675 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4677 /* We cannot test for present return values as we do not fix up missing
4678 return values from the original source. */
4682 if (!is_gimple_val (op
)
4683 && TREE_CODE (op
) != RESULT_DECL
)
4685 error ("invalid operand in return statement");
4686 debug_generic_stmt (op
);
4690 if ((TREE_CODE (op
) == RESULT_DECL
4691 && DECL_BY_REFERENCE (op
))
4692 || (TREE_CODE (op
) == SSA_NAME
4693 && SSA_NAME_VAR (op
)
4694 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4695 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4696 op
= TREE_TYPE (op
);
4698 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4700 error ("invalid conversion in return statement");
4701 debug_generic_stmt (restype
);
4702 debug_generic_stmt (TREE_TYPE (op
));
4710 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4711 is a problem, otherwise false. */
4714 verify_gimple_goto (ggoto
*stmt
)
4716 tree dest
= gimple_goto_dest (stmt
);
4718 /* ??? We have two canonical forms of direct goto destinations, a
4719 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4720 if (TREE_CODE (dest
) != LABEL_DECL
4721 && (!is_gimple_val (dest
)
4722 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4724 error ("goto destination is neither a label nor a pointer");
4731 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4732 is a problem, otherwise false. */
4735 verify_gimple_switch (gswitch
*stmt
)
4738 tree elt
, prev_upper_bound
= NULL_TREE
;
4739 tree index_type
, elt_type
= NULL_TREE
;
4741 if (!is_gimple_val (gimple_switch_index (stmt
)))
4743 error ("invalid operand to switch statement");
4744 debug_generic_stmt (gimple_switch_index (stmt
));
4748 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4749 if (! INTEGRAL_TYPE_P (index_type
))
4751 error ("non-integral type switch statement");
4752 debug_generic_expr (index_type
);
4756 elt
= gimple_switch_label (stmt
, 0);
4757 if (CASE_LOW (elt
) != NULL_TREE
4758 || CASE_HIGH (elt
) != NULL_TREE
4759 || CASE_CHAIN (elt
) != NULL_TREE
)
4761 error ("invalid default case label in switch statement");
4762 debug_generic_expr (elt
);
4766 n
= gimple_switch_num_labels (stmt
);
4767 for (i
= 1; i
< n
; i
++)
4769 elt
= gimple_switch_label (stmt
, i
);
4771 if (CASE_CHAIN (elt
))
4773 error ("invalid %<CASE_CHAIN%>");
4774 debug_generic_expr (elt
);
4777 if (! CASE_LOW (elt
))
4779 error ("invalid case label in switch statement");
4780 debug_generic_expr (elt
);
4784 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4786 error ("invalid case range in switch statement");
4787 debug_generic_expr (elt
);
4793 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4794 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4796 error ("type precision mismatch in switch statement");
4800 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4801 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4803 error ("type mismatch for case label in switch statement");
4804 debug_generic_expr (elt
);
4808 if (prev_upper_bound
)
4810 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4812 error ("case labels not sorted in switch statement");
4817 prev_upper_bound
= CASE_HIGH (elt
);
4818 if (! prev_upper_bound
)
4819 prev_upper_bound
= CASE_LOW (elt
);
4825 /* Verify a gimple debug statement STMT.
4826 Returns true if anything is wrong. */
4829 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4831 /* There isn't much that could be wrong in a gimple debug stmt. A
4832 gimple debug bind stmt, for example, maps a tree, that's usually
4833 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4834 component or member of an aggregate type, to another tree, that
4835 can be an arbitrary expression. These stmts expand into debug
4836 insns, and are converted to debug notes by var-tracking.c. */
4840 /* Verify a gimple label statement STMT.
4841 Returns true if anything is wrong. */
4844 verify_gimple_label (glabel
*stmt
)
4846 tree decl
= gimple_label_label (stmt
);
4850 if (TREE_CODE (decl
) != LABEL_DECL
)
4852 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4853 && DECL_CONTEXT (decl
) != current_function_decl
)
4855 error ("label context is not the current function declaration");
4859 uid
= LABEL_DECL_UID (decl
);
4862 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4864 error ("incorrect entry in %<label_to_block_map%>");
4868 uid
= EH_LANDING_PAD_NR (decl
);
4871 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4872 if (decl
!= lp
->post_landing_pad
)
4874 error ("incorrect setting of landing pad number");
4882 /* Verify a gimple cond statement STMT.
4883 Returns true if anything is wrong. */
4886 verify_gimple_cond (gcond
*stmt
)
4888 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4890 error ("invalid comparison code in gimple cond");
4893 if (!(!gimple_cond_true_label (stmt
)
4894 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4895 || !(!gimple_cond_false_label (stmt
)
4896 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4898 error ("invalid labels in gimple cond");
4902 return verify_gimple_comparison (boolean_type_node
,
4903 gimple_cond_lhs (stmt
),
4904 gimple_cond_rhs (stmt
),
4905 gimple_cond_code (stmt
));
4908 /* Verify the GIMPLE statement STMT. Returns true if there is an
4909 error, otherwise false. */
4912 verify_gimple_stmt (gimple
*stmt
)
4914 switch (gimple_code (stmt
))
4917 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4920 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4923 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4926 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4929 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4932 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4935 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4940 case GIMPLE_TRANSACTION
:
4941 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4943 /* Tuples that do not have tree operands. */
4945 case GIMPLE_PREDICT
:
4947 case GIMPLE_EH_DISPATCH
:
4948 case GIMPLE_EH_MUST_NOT_THROW
:
4952 /* OpenMP directives are validated by the FE and never operated
4953 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4954 non-gimple expressions when the main index variable has had
4955 its address taken. This does not affect the loop itself
4956 because the header of an GIMPLE_OMP_FOR is merely used to determine
4957 how to setup the parallel iteration. */
4961 return verify_gimple_debug (stmt
);
4968 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4969 and false otherwise. */
4972 verify_gimple_phi (gphi
*phi
)
4976 tree phi_result
= gimple_phi_result (phi
);
4981 error ("invalid %<PHI%> result");
4985 virtual_p
= virtual_operand_p (phi_result
);
4986 if (TREE_CODE (phi_result
) != SSA_NAME
4988 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4990 error ("invalid %<PHI%> result");
4994 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4996 tree t
= gimple_phi_arg_def (phi
, i
);
5000 error ("missing %<PHI%> def");
5004 /* Addressable variables do have SSA_NAMEs but they
5005 are not considered gimple values. */
5006 else if ((TREE_CODE (t
) == SSA_NAME
5007 && virtual_p
!= virtual_operand_p (t
))
5009 && (TREE_CODE (t
) != SSA_NAME
5010 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
5012 && !is_gimple_val (t
)))
5014 error ("invalid %<PHI%> argument");
5015 debug_generic_expr (t
);
5018 #ifdef ENABLE_TYPES_CHECKING
5019 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
5021 error ("incompatible types in %<PHI%> argument %u", i
);
5022 debug_generic_stmt (TREE_TYPE (phi_result
));
5023 debug_generic_stmt (TREE_TYPE (t
));
5032 /* Verify the GIMPLE statements inside the sequence STMTS. */
5035 verify_gimple_in_seq_2 (gimple_seq stmts
)
5037 gimple_stmt_iterator ittr
;
5040 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
5042 gimple
*stmt
= gsi_stmt (ittr
);
5044 switch (gimple_code (stmt
))
5047 err
|= verify_gimple_in_seq_2 (
5048 gimple_bind_body (as_a
<gbind
*> (stmt
)));
5052 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
5053 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
5056 case GIMPLE_EH_FILTER
:
5057 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
5060 case GIMPLE_EH_ELSE
:
5062 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
5063 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
5064 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
5069 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
5070 as_a
<gcatch
*> (stmt
)));
5073 case GIMPLE_TRANSACTION
:
5074 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5079 bool err2
= verify_gimple_stmt (stmt
);
5081 debug_gimple_stmt (stmt
);
5090 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5091 is a problem, otherwise false. */
5094 verify_gimple_transaction (gtransaction
*stmt
)
5098 lab
= gimple_transaction_label_norm (stmt
);
5099 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5101 lab
= gimple_transaction_label_uninst (stmt
);
5102 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5104 lab
= gimple_transaction_label_over (stmt
);
5105 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5108 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
5112 /* Verify the GIMPLE statements inside the statement list STMTS. */
5115 verify_gimple_in_seq (gimple_seq stmts
)
5117 timevar_push (TV_TREE_STMT_VERIFY
);
5118 if (verify_gimple_in_seq_2 (stmts
))
5119 internal_error ("%<verify_gimple%> failed");
5120 timevar_pop (TV_TREE_STMT_VERIFY
);
5123 /* Return true when the T can be shared. */
5126 tree_node_can_be_shared (tree t
)
5128 if (IS_TYPE_OR_DECL_P (t
)
5129 || TREE_CODE (t
) == SSA_NAME
5130 || TREE_CODE (t
) == IDENTIFIER_NODE
5131 || TREE_CODE (t
) == CASE_LABEL_EXPR
5132 || is_gimple_min_invariant (t
))
5135 if (t
== error_mark_node
)
5141 /* Called via walk_tree. Verify tree sharing. */
5144 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5146 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
5148 if (tree_node_can_be_shared (*tp
))
5150 *walk_subtrees
= false;
5154 if (visited
->add (*tp
))
5160 /* Called via walk_gimple_stmt. Verify tree sharing. */
5163 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5165 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5166 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5169 static bool eh_error_found
;
5171 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5172 hash_set
<gimple
*> *visited
)
5174 if (!visited
->contains (stmt
))
5176 error ("dead statement in EH table");
5177 debug_gimple_stmt (stmt
);
5178 eh_error_found
= true;
5183 /* Verify if the location LOCs block is in BLOCKS. */
5186 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5188 tree block
= LOCATION_BLOCK (loc
);
5189 if (block
!= NULL_TREE
5190 && !blocks
->contains (block
))
5192 error ("location references block not in block tree");
5195 if (block
!= NULL_TREE
)
5196 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5200 /* Called via walk_tree. Verify that expressions have no blocks. */
5203 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5207 *walk_subtrees
= false;
5211 location_t loc
= EXPR_LOCATION (*tp
);
5212 if (LOCATION_BLOCK (loc
) != NULL
)
5218 /* Called via walk_tree. Verify locations of expressions. */
5221 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5223 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5226 /* ??? This doesn't really belong here but there's no good place to
5227 stick this remainder of old verify_expr. */
5228 /* ??? This barfs on debug stmts which contain binds to vars with
5229 different function context. */
5232 || TREE_CODE (t
) == PARM_DECL
5233 || TREE_CODE (t
) == RESULT_DECL
)
5235 tree context
= decl_function_context (t
);
5236 if (context
!= cfun
->decl
5237 && !SCOPE_FILE_SCOPE_P (context
)
5239 && !DECL_EXTERNAL (t
))
5241 error ("local declaration from a different function");
5247 if (VAR_P (t
) && DECL_HAS_DEBUG_EXPR_P (t
))
5249 tree x
= DECL_DEBUG_EXPR (t
);
5250 tree addr
= walk_tree (&x
, verify_expr_no_block
, NULL
, NULL
);
5255 || TREE_CODE (t
) == PARM_DECL
5256 || TREE_CODE (t
) == RESULT_DECL
)
5257 && DECL_HAS_VALUE_EXPR_P (t
))
5259 tree x
= DECL_VALUE_EXPR (t
);
5260 tree addr
= walk_tree (&x
, verify_expr_no_block
, NULL
, NULL
);
5267 *walk_subtrees
= false;
5271 location_t loc
= EXPR_LOCATION (t
);
5272 if (verify_location (blocks
, loc
))
5278 /* Called via walk_gimple_op. Verify locations of expressions. */
5281 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5283 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5284 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5287 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5290 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5293 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5296 collect_subblocks (blocks
, t
);
5300 /* Disable warnings about missing quoting in GCC diagnostics for
5301 the verification errors. Their format strings don't follow
5302 GCC diagnostic conventions and trigger an ICE in the end. */
5304 # pragma GCC diagnostic push
5305 # pragma GCC diagnostic ignored "-Wformat-diag"
5308 /* Verify the GIMPLE statements in the CFG of FN. */
5311 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5316 timevar_push (TV_TREE_STMT_VERIFY
);
5317 hash_set
<void *> visited
;
5318 hash_set
<gimple
*> visited_throwing_stmts
;
5320 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5321 hash_set
<tree
> blocks
;
5322 if (DECL_INITIAL (fn
->decl
))
5324 blocks
.add (DECL_INITIAL (fn
->decl
));
5325 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5328 FOR_EACH_BB_FN (bb
, fn
)
5330 gimple_stmt_iterator gsi
;
5334 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5338 gphi
*phi
= gpi
.phi ();
5342 if (gimple_bb (phi
) != bb
)
5344 error ("gimple_bb (phi) is set to a wrong basic block");
5348 err2
|= verify_gimple_phi (phi
);
5350 /* Only PHI arguments have locations. */
5351 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5353 error ("PHI node with location");
5357 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5359 tree arg
= gimple_phi_arg_def (phi
, i
);
5360 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5364 error ("incorrect sharing of tree nodes");
5365 debug_generic_expr (addr
);
5368 location_t loc
= gimple_phi_arg_location (phi
, i
);
5369 if (virtual_operand_p (gimple_phi_result (phi
))
5370 && loc
!= UNKNOWN_LOCATION
)
5372 error ("virtual PHI with argument locations");
5375 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5378 debug_generic_expr (addr
);
5381 err2
|= verify_location (&blocks
, loc
);
5385 debug_gimple_stmt (phi
);
5389 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5391 gimple
*stmt
= gsi_stmt (gsi
);
5393 struct walk_stmt_info wi
;
5397 if (gimple_bb (stmt
) != bb
)
5399 error ("gimple_bb (stmt) is set to a wrong basic block");
5403 err2
|= verify_gimple_stmt (stmt
);
5404 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5406 memset (&wi
, 0, sizeof (wi
));
5407 wi
.info
= (void *) &visited
;
5408 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5411 error ("incorrect sharing of tree nodes");
5412 debug_generic_expr (addr
);
5416 memset (&wi
, 0, sizeof (wi
));
5417 wi
.info
= (void *) &blocks
;
5418 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5421 debug_generic_expr (addr
);
5425 /* If the statement is marked as part of an EH region, then it is
5426 expected that the statement could throw. Verify that when we
5427 have optimizations that simplify statements such that we prove
5428 that they cannot throw, that we update other data structures
5430 lp_nr
= lookup_stmt_eh_lp (stmt
);
5432 visited_throwing_stmts
.add (stmt
);
5435 if (!stmt_could_throw_p (cfun
, stmt
))
5439 error ("statement marked for throw, but doesn%'t");
5443 else if (!gsi_one_before_end_p (gsi
))
5445 error ("statement marked for throw in middle of block");
5451 debug_gimple_stmt (stmt
);
5455 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5456 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5457 err
|= verify_location (&blocks
, e
->goto_locus
);
5460 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5461 eh_error_found
= false;
5463 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5464 (&visited_throwing_stmts
);
5466 if (err
|| eh_error_found
)
5467 internal_error ("verify_gimple failed");
5469 verify_histograms ();
5470 timevar_pop (TV_TREE_STMT_VERIFY
);
5474 /* Verifies that the flow information is OK. */
5477 gimple_verify_flow_info (void)
5481 gimple_stmt_iterator gsi
;
5486 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5487 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5489 error ("ENTRY_BLOCK has IL associated with it");
5493 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5494 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5496 error ("EXIT_BLOCK has IL associated with it");
5500 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5501 if (e
->flags
& EDGE_FALLTHRU
)
5503 error ("fallthru to exit from bb %d", e
->src
->index
);
5507 FOR_EACH_BB_FN (bb
, cfun
)
5509 bool found_ctrl_stmt
= false;
5513 /* Skip labels on the start of basic block. */
5514 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5517 gimple
*prev_stmt
= stmt
;
5519 stmt
= gsi_stmt (gsi
);
5521 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5524 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5525 if (prev_stmt
&& DECL_NONLOCAL (label
))
5527 error ("nonlocal label ");
5528 print_generic_expr (stderr
, label
);
5529 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5534 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5536 error ("EH landing pad label ");
5537 print_generic_expr (stderr
, label
);
5538 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5543 if (label_to_block (cfun
, label
) != bb
)
5546 print_generic_expr (stderr
, label
);
5547 fprintf (stderr
, " to block does not match in bb %d",
5552 if (decl_function_context (label
) != current_function_decl
)
5555 print_generic_expr (stderr
, label
);
5556 fprintf (stderr
, " has incorrect context in bb %d",
5562 /* Verify that body of basic block BB is free of control flow. */
5563 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5565 gimple
*stmt
= gsi_stmt (gsi
);
5567 if (found_ctrl_stmt
)
5569 error ("control flow in the middle of basic block %d",
5574 if (stmt_ends_bb_p (stmt
))
5575 found_ctrl_stmt
= true;
5577 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5580 print_generic_expr (stderr
, gimple_label_label (label_stmt
));
5581 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5586 gsi
= gsi_last_nondebug_bb (bb
);
5587 if (gsi_end_p (gsi
))
5590 stmt
= gsi_stmt (gsi
);
5592 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5595 err
|= verify_eh_edges (stmt
);
5597 if (is_ctrl_stmt (stmt
))
5599 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5600 if (e
->flags
& EDGE_FALLTHRU
)
5602 error ("fallthru edge after a control statement in bb %d",
5608 if (gimple_code (stmt
) != GIMPLE_COND
)
5610 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5611 after anything else but if statement. */
5612 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5613 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5615 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5621 switch (gimple_code (stmt
))
5628 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5632 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5633 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5634 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5635 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5636 || EDGE_COUNT (bb
->succs
) >= 3)
5638 error ("wrong outgoing edge flags at end of bb %d",
5646 if (simple_goto_p (stmt
))
5648 error ("explicit goto at end of bb %d", bb
->index
);
5653 /* FIXME. We should double check that the labels in the
5654 destination blocks have their address taken. */
5655 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5656 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5657 | EDGE_FALSE_VALUE
))
5658 || !(e
->flags
& EDGE_ABNORMAL
))
5660 error ("wrong outgoing edge flags at end of bb %d",
5668 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5672 if (!single_succ_p (bb
)
5673 || (single_succ_edge (bb
)->flags
5674 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5675 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5677 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5680 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5682 error ("return edge does not point to exit in bb %d",
5690 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5695 n
= gimple_switch_num_labels (switch_stmt
);
5697 /* Mark all the destination basic blocks. */
5698 for (i
= 0; i
< n
; ++i
)
5700 basic_block label_bb
= gimple_switch_label_bb (cfun
, switch_stmt
, i
);
5701 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5702 label_bb
->aux
= (void *)1;
5705 /* Verify that the case labels are sorted. */
5706 prev
= gimple_switch_label (switch_stmt
, 0);
5707 for (i
= 1; i
< n
; ++i
)
5709 tree c
= gimple_switch_label (switch_stmt
, i
);
5712 error ("found default case not at the start of "
5718 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5720 error ("case labels not sorted: ");
5721 print_generic_expr (stderr
, prev
);
5722 fprintf (stderr
," is greater than ");
5723 print_generic_expr (stderr
, c
);
5724 fprintf (stderr
," but comes before it.\n");
5729 /* VRP will remove the default case if it can prove it will
5730 never be executed. So do not verify there always exists
5731 a default case here. */
5733 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5737 error ("extra outgoing edge %d->%d",
5738 bb
->index
, e
->dest
->index
);
5742 e
->dest
->aux
= (void *)2;
5743 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5744 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5746 error ("wrong outgoing edge flags at end of bb %d",
5752 /* Check that we have all of them. */
5753 for (i
= 0; i
< n
; ++i
)
5755 basic_block label_bb
= gimple_switch_label_bb (cfun
,
5758 if (label_bb
->aux
!= (void *)2)
5760 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5765 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5766 e
->dest
->aux
= (void *)0;
5770 case GIMPLE_EH_DISPATCH
:
5771 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5779 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5780 verify_dominators (CDI_DOMINATORS
);
5786 # pragma GCC diagnostic pop
5789 /* Updates phi nodes after creating a forwarder block joined
5790 by edge FALLTHRU. */
5793 gimple_make_forwarder_block (edge fallthru
)
5797 basic_block dummy
, bb
;
5800 bool forward_location_p
;
5802 dummy
= fallthru
->src
;
5803 bb
= fallthru
->dest
;
5805 if (single_pred_p (bb
))
5808 /* We can forward location info if we have only one predecessor. */
5809 forward_location_p
= single_pred_p (dummy
);
5811 /* If we redirected a branch we must create new PHI nodes at the
5813 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5815 gphi
*phi
, *new_phi
;
5818 var
= gimple_phi_result (phi
);
5819 new_phi
= create_phi_node (var
, bb
);
5820 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5821 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5823 ? gimple_phi_arg_location (phi
, 0) : UNKNOWN_LOCATION
);
5826 /* Add the arguments we have stored on edges. */
5827 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5832 flush_pending_stmts (e
);
5837 /* Return a non-special label in the head of basic block BLOCK.
5838 Create one if it doesn't exist. */
5841 gimple_block_label (basic_block bb
)
5843 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5848 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5850 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5853 label
= gimple_label_label (stmt
);
5854 if (!DECL_NONLOCAL (label
))
5857 gsi_move_before (&i
, &s
);
5862 label
= create_artificial_label (UNKNOWN_LOCATION
);
5863 stmt
= gimple_build_label (label
);
5864 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5869 /* Attempt to perform edge redirection by replacing a possibly complex
5870 jump instruction by a goto or by removing the jump completely.
5871 This can apply only if all edges now point to the same block. The
5872 parameters and return values are equivalent to
5873 redirect_edge_and_branch. */
5876 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5878 basic_block src
= e
->src
;
5879 gimple_stmt_iterator i
;
5882 /* We can replace or remove a complex jump only when we have exactly
5884 if (EDGE_COUNT (src
->succs
) != 2
5885 /* Verify that all targets will be TARGET. Specifically, the
5886 edge that is not E must also go to TARGET. */
5887 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5890 i
= gsi_last_bb (src
);
5894 stmt
= gsi_stmt (i
);
5896 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5898 gsi_remove (&i
, true);
5899 e
= ssa_redirect_edge (e
, target
);
5900 e
->flags
= EDGE_FALLTHRU
;
5908 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5909 edge representing the redirected branch. */
5912 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5914 basic_block bb
= e
->src
;
5915 gimple_stmt_iterator gsi
;
5919 if (e
->flags
& EDGE_ABNORMAL
)
5922 if (e
->dest
== dest
)
5925 if (e
->flags
& EDGE_EH
)
5926 return redirect_eh_edge (e
, dest
);
5928 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5930 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5935 gsi
= gsi_last_nondebug_bb (bb
);
5936 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5938 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5941 /* For COND_EXPR, we only need to redirect the edge. */
5945 /* No non-abnormal edges should lead from a non-simple goto, and
5946 simple ones should be represented implicitly. */
5951 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5952 tree label
= gimple_block_label (dest
);
5953 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5955 /* If we have a list of cases associated with E, then use it
5956 as it's a lot faster than walking the entire case vector. */
5959 edge e2
= find_edge (e
->src
, dest
);
5966 CASE_LABEL (cases
) = label
;
5967 cases
= CASE_CHAIN (cases
);
5970 /* If there was already an edge in the CFG, then we need
5971 to move all the cases associated with E to E2. */
5974 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5976 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5977 CASE_CHAIN (cases2
) = first
;
5979 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5983 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5985 for (i
= 0; i
< n
; i
++)
5987 tree elt
= gimple_switch_label (switch_stmt
, i
);
5988 if (label_to_block (cfun
, CASE_LABEL (elt
)) == e
->dest
)
5989 CASE_LABEL (elt
) = label
;
5997 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5998 int i
, n
= gimple_asm_nlabels (asm_stmt
);
6001 for (i
= 0; i
< n
; ++i
)
6003 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
6004 if (label_to_block (cfun
, TREE_VALUE (cons
)) == e
->dest
)
6007 label
= gimple_block_label (dest
);
6008 TREE_VALUE (cons
) = label
;
6012 /* If we didn't find any label matching the former edge in the
6013 asm labels, we must be redirecting the fallthrough
6015 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
6020 gsi_remove (&gsi
, true);
6021 e
->flags
|= EDGE_FALLTHRU
;
6024 case GIMPLE_OMP_RETURN
:
6025 case GIMPLE_OMP_CONTINUE
:
6026 case GIMPLE_OMP_SECTIONS_SWITCH
:
6027 case GIMPLE_OMP_FOR
:
6028 /* The edges from OMP constructs can be simply redirected. */
6031 case GIMPLE_EH_DISPATCH
:
6032 if (!(e
->flags
& EDGE_FALLTHRU
))
6033 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
6036 case GIMPLE_TRANSACTION
:
6037 if (e
->flags
& EDGE_TM_ABORT
)
6038 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
6039 gimple_block_label (dest
));
6040 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
6041 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
6042 gimple_block_label (dest
));
6044 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
6045 gimple_block_label (dest
));
6049 /* Otherwise it must be a fallthru edge, and we don't need to
6050 do anything besides redirecting it. */
6051 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
6055 /* Update/insert PHI nodes as necessary. */
6057 /* Now update the edges in the CFG. */
6058 e
= ssa_redirect_edge (e
, dest
);
6063 /* Returns true if it is possible to remove edge E by redirecting
6064 it to the destination of the other edge from E->src. */
6067 gimple_can_remove_branch_p (const_edge e
)
6069 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
6075 /* Simple wrapper, as we can always redirect fallthru edges. */
6078 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
6080 e
= gimple_redirect_edge_and_branch (e
, dest
);
6087 /* Splits basic block BB after statement STMT (but at least after the
6088 labels). If STMT is NULL, BB is split just after the labels. */
6091 gimple_split_block (basic_block bb
, void *stmt
)
6093 gimple_stmt_iterator gsi
;
6094 gimple_stmt_iterator gsi_tgt
;
6100 new_bb
= create_empty_bb (bb
);
6102 /* Redirect the outgoing edges. */
6103 new_bb
->succs
= bb
->succs
;
6105 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
6108 /* Get a stmt iterator pointing to the first stmt to move. */
6109 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
6110 gsi
= gsi_after_labels (bb
);
6113 gsi
= gsi_for_stmt ((gimple
*) stmt
);
6117 /* Move everything from GSI to the new basic block. */
6118 if (gsi_end_p (gsi
))
6121 /* Split the statement list - avoid re-creating new containers as this
6122 brings ugly quadratic memory consumption in the inliner.
6123 (We are still quadratic since we need to update stmt BB pointers,
6125 gsi_split_seq_before (&gsi
, &list
);
6126 set_bb_seq (new_bb
, list
);
6127 for (gsi_tgt
= gsi_start (list
);
6128 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
6129 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
6135 /* Moves basic block BB after block AFTER. */
6138 gimple_move_block_after (basic_block bb
, basic_block after
)
6140 if (bb
->prev_bb
== after
)
6144 link_block (bb
, after
);
6150 /* Return TRUE if block BB has no executable statements, otherwise return
6154 gimple_empty_block_p (basic_block bb
)
6156 /* BB must have no executable statements. */
6157 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
6160 while (!gsi_end_p (gsi
))
6162 gimple
*stmt
= gsi_stmt (gsi
);
6163 if (is_gimple_debug (stmt
))
6165 else if (gimple_code (stmt
) == GIMPLE_NOP
6166 || gimple_code (stmt
) == GIMPLE_PREDICT
)
6176 /* Split a basic block if it ends with a conditional branch and if the
6177 other part of the block is not empty. */
6180 gimple_split_block_before_cond_jump (basic_block bb
)
6182 gimple
*last
, *split_point
;
6183 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
6184 if (gsi_end_p (gsi
))
6186 last
= gsi_stmt (gsi
);
6187 if (gimple_code (last
) != GIMPLE_COND
6188 && gimple_code (last
) != GIMPLE_SWITCH
)
6191 split_point
= gsi_stmt (gsi
);
6192 return split_block (bb
, split_point
)->dest
;
6196 /* Return true if basic_block can be duplicated. */
6199 gimple_can_duplicate_bb_p (const_basic_block bb
)
6201 gimple
*last
= last_stmt (CONST_CAST_BB (bb
));
6203 /* Do checks that can only fail for the last stmt, to minimize the work in the
6206 /* A transaction is a single entry multiple exit region. It
6207 must be duplicated in its entirety or not at all. */
6208 if (gimple_code (last
) == GIMPLE_TRANSACTION
)
6211 /* An IFN_UNIQUE call must be duplicated as part of its group,
6213 if (is_gimple_call (last
)
6214 && gimple_call_internal_p (last
)
6215 && gimple_call_internal_unique_p (last
))
6219 for (gimple_stmt_iterator gsi
= gsi_start_bb (CONST_CAST_BB (bb
));
6220 !gsi_end_p (gsi
); gsi_next (&gsi
))
6222 gimple
*g
= gsi_stmt (gsi
);
6224 /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6225 duplicated as part of its group, or not at all.
6226 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6227 group, so the same holds there. */
6228 if (is_gimple_call (g
)
6229 && (gimple_call_internal_p (g
, IFN_GOMP_SIMT_ENTER_ALLOC
)
6230 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_EXIT
)
6231 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_VOTE_ANY
)
6232 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_XCHG_BFLY
)
6233 || gimple_call_internal_p (g
, IFN_GOMP_SIMT_XCHG_IDX
)))
6240 /* Create a duplicate of the basic block BB. NOTE: This does not
6241 preserve SSA form. */
6244 gimple_duplicate_bb (basic_block bb
, copy_bb_data
*id
)
6247 gimple_stmt_iterator gsi_tgt
;
6249 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6251 /* Copy the PHI nodes. We ignore PHI node arguments here because
6252 the incoming edges have not been setup yet. */
6253 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6259 copy
= create_phi_node (NULL_TREE
, new_bb
);
6260 create_new_def_for (gimple_phi_result (phi
), copy
,
6261 gimple_phi_result_ptr (copy
));
6262 gimple_set_uid (copy
, gimple_uid (phi
));
6265 gsi_tgt
= gsi_start_bb (new_bb
);
6266 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6270 def_operand_p def_p
;
6271 ssa_op_iter op_iter
;
6273 gimple
*stmt
, *copy
;
6275 stmt
= gsi_stmt (gsi
);
6276 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6279 /* Don't duplicate label debug stmts. */
6280 if (gimple_debug_bind_p (stmt
)
6281 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6285 /* Create a new copy of STMT and duplicate STMT's virtual
6287 copy
= gimple_copy (stmt
);
6288 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6290 maybe_duplicate_eh_stmt (copy
, stmt
);
6291 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6293 /* When copying around a stmt writing into a local non-user
6294 aggregate, make sure it won't share stack slot with other
6296 lhs
= gimple_get_lhs (stmt
);
6297 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6299 tree base
= get_base_address (lhs
);
6301 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6302 && DECL_IGNORED_P (base
)
6303 && !TREE_STATIC (base
)
6304 && !DECL_EXTERNAL (base
)
6305 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6306 DECL_NONSHAREABLE (base
) = 1;
6309 /* If requested remap dependence info of cliques brought in
6312 for (unsigned i
= 0; i
< gimple_num_ops (copy
); ++i
)
6314 tree op
= gimple_op (copy
, i
);
6317 if (TREE_CODE (op
) == ADDR_EXPR
6318 || TREE_CODE (op
) == WITH_SIZE_EXPR
)
6319 op
= TREE_OPERAND (op
, 0);
6320 while (handled_component_p (op
))
6321 op
= TREE_OPERAND (op
, 0);
6322 if ((TREE_CODE (op
) == MEM_REF
6323 || TREE_CODE (op
) == TARGET_MEM_REF
)
6324 && MR_DEPENDENCE_CLIQUE (op
) > 1
6325 && MR_DEPENDENCE_CLIQUE (op
) != bb
->loop_father
->owned_clique
)
6327 if (!id
->dependence_map
)
6328 id
->dependence_map
= new hash_map
<dependence_hash
,
6331 unsigned short &newc
= id
->dependence_map
->get_or_insert
6332 (MR_DEPENDENCE_CLIQUE (op
), &existed
);
6335 gcc_assert (MR_DEPENDENCE_CLIQUE (op
) <= cfun
->last_clique
);
6336 newc
= ++cfun
->last_clique
;
6338 MR_DEPENDENCE_CLIQUE (op
) = newc
;
6342 /* Create new names for all the definitions created by COPY and
6343 add replacement mappings for each new name. */
6344 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6345 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6351 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6354 add_phi_args_after_copy_edge (edge e_copy
)
6356 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6359 gphi
*phi
, *phi_copy
;
6361 gphi_iterator psi
, psi_copy
;
6363 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6366 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6368 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6369 dest
= get_bb_original (e_copy
->dest
);
6371 dest
= e_copy
->dest
;
6373 e
= find_edge (bb
, dest
);
6376 /* During loop unrolling the target of the latch edge is copied.
6377 In this case we are not looking for edge to dest, but to
6378 duplicated block whose original was dest. */
6379 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6381 if ((e
->dest
->flags
& BB_DUPLICATED
)
6382 && get_bb_original (e
->dest
) == dest
)
6386 gcc_assert (e
!= NULL
);
6389 for (psi
= gsi_start_phis (e
->dest
),
6390 psi_copy
= gsi_start_phis (e_copy
->dest
);
6392 gsi_next (&psi
), gsi_next (&psi_copy
))
6395 phi_copy
= psi_copy
.phi ();
6396 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6397 add_phi_arg (phi_copy
, def
, e_copy
,
6398 gimple_phi_arg_location_from_edge (phi
, e
));
6403 /* Basic block BB_COPY was created by code duplication. Add phi node
6404 arguments for edges going out of BB_COPY. The blocks that were
6405 duplicated have BB_DUPLICATED set. */
6408 add_phi_args_after_copy_bb (basic_block bb_copy
)
6413 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6415 add_phi_args_after_copy_edge (e_copy
);
6419 /* Blocks in REGION_COPY array of length N_REGION were created by
6420 duplication of basic blocks. Add phi node arguments for edges
6421 going from these blocks. If E_COPY is not NULL, also add
6422 phi node arguments for its destination.*/
6425 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6430 for (i
= 0; i
< n_region
; i
++)
6431 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6433 for (i
= 0; i
< n_region
; i
++)
6434 add_phi_args_after_copy_bb (region_copy
[i
]);
6436 add_phi_args_after_copy_edge (e_copy
);
6438 for (i
= 0; i
< n_region
; i
++)
6439 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6442 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6443 important exit edge EXIT. By important we mean that no SSA name defined
6444 inside region is live over the other exit edges of the region. All entry
6445 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6446 to the duplicate of the region. Dominance and loop information is
6447 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6448 UPDATE_DOMINANCE is false then we assume that the caller will update the
6449 dominance information after calling this function. The new basic
6450 blocks are stored to REGION_COPY in the same order as they had in REGION,
6451 provided that REGION_COPY is not NULL.
6452 The function returns false if it is unable to copy the region,
6456 gimple_duplicate_sese_region (edge entry
, edge exit
,
6457 basic_block
*region
, unsigned n_region
,
6458 basic_block
*region_copy
,
6459 bool update_dominance
)
6462 bool free_region_copy
= false, copying_header
= false;
6463 class loop
*loop
= entry
->dest
->loop_father
;
6465 vec
<basic_block
> doms
= vNULL
;
6467 profile_count total_count
= profile_count::uninitialized ();
6468 profile_count entry_count
= profile_count::uninitialized ();
6470 if (!can_copy_bbs_p (region
, n_region
))
6473 /* Some sanity checking. Note that we do not check for all possible
6474 missuses of the functions. I.e. if you ask to copy something weird,
6475 it will work, but the state of structures probably will not be
6477 for (i
= 0; i
< n_region
; i
++)
6479 /* We do not handle subloops, i.e. all the blocks must belong to the
6481 if (region
[i
]->loop_father
!= loop
)
6484 if (region
[i
] != entry
->dest
6485 && region
[i
] == loop
->header
)
6489 /* In case the function is used for loop header copying (which is the primary
6490 use), ensure that EXIT and its copy will be new latch and entry edges. */
6491 if (loop
->header
== entry
->dest
)
6493 copying_header
= true;
6495 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6498 for (i
= 0; i
< n_region
; i
++)
6499 if (region
[i
] != exit
->src
6500 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6504 initialize_original_copy_tables ();
6507 set_loop_copy (loop
, loop_outer (loop
));
6509 set_loop_copy (loop
, loop
);
6513 region_copy
= XNEWVEC (basic_block
, n_region
);
6514 free_region_copy
= true;
6517 /* Record blocks outside the region that are dominated by something
6519 if (update_dominance
)
6522 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6525 if (entry
->dest
->count
.initialized_p ())
6527 total_count
= entry
->dest
->count
;
6528 entry_count
= entry
->count ();
6529 /* Fix up corner cases, to avoid division by zero or creation of negative
6531 if (entry_count
> total_count
)
6532 entry_count
= total_count
;
6535 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6536 split_edge_bb_loc (entry
), update_dominance
);
6537 if (total_count
.initialized_p () && entry_count
.initialized_p ())
6539 scale_bbs_frequencies_profile_count (region
, n_region
,
6540 total_count
- entry_count
,
6542 scale_bbs_frequencies_profile_count (region_copy
, n_region
, entry_count
,
6548 loop
->header
= exit
->dest
;
6549 loop
->latch
= exit
->src
;
6552 /* Redirect the entry and add the phi node arguments. */
6553 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6554 gcc_assert (redirected
!= NULL
);
6555 flush_pending_stmts (entry
);
6557 /* Concerning updating of dominators: We must recount dominators
6558 for entry block and its copy. Anything that is outside of the
6559 region, but was dominated by something inside needs recounting as
6561 if (update_dominance
)
6563 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6564 doms
.safe_push (get_bb_original (entry
->dest
));
6565 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6569 /* Add the other PHI node arguments. */
6570 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6572 if (free_region_copy
)
6575 free_original_copy_tables ();
6579 /* Checks if BB is part of the region defined by N_REGION BBS. */
6581 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6585 for (n
= 0; n
< n_region
; n
++)
6593 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6594 are stored to REGION_COPY in the same order in that they appear
6595 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6596 the region, EXIT an exit from it. The condition guarding EXIT
6597 is moved to ENTRY. Returns true if duplication succeeds, false
6623 gimple_duplicate_sese_tail (edge entry
, edge exit
,
6624 basic_block
*region
, unsigned n_region
,
6625 basic_block
*region_copy
)
6628 bool free_region_copy
= false;
6629 class loop
*loop
= exit
->dest
->loop_father
;
6630 class loop
*orig_loop
= entry
->dest
->loop_father
;
6631 basic_block switch_bb
, entry_bb
, nentry_bb
;
6632 vec
<basic_block
> doms
;
6633 profile_count total_count
= profile_count::uninitialized (),
6634 exit_count
= profile_count::uninitialized ();
6635 edge exits
[2], nexits
[2], e
;
6636 gimple_stmt_iterator gsi
;
6639 basic_block exit_bb
;
6643 class loop
*target
, *aloop
, *cloop
;
6645 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6647 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6649 if (!can_copy_bbs_p (region
, n_region
))
6652 initialize_original_copy_tables ();
6653 set_loop_copy (orig_loop
, loop
);
6656 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6658 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6660 cloop
= duplicate_loop (aloop
, target
);
6661 duplicate_subloops (aloop
, cloop
);
6667 region_copy
= XNEWVEC (basic_block
, n_region
);
6668 free_region_copy
= true;
6671 gcc_assert (!need_ssa_update_p (cfun
));
6673 /* Record blocks outside the region that are dominated by something
6675 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6677 total_count
= exit
->src
->count
;
6678 exit_count
= exit
->count ();
6679 /* Fix up corner cases, to avoid division by zero or creation of negative
6681 if (exit_count
> total_count
)
6682 exit_count
= total_count
;
6684 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6685 split_edge_bb_loc (exit
), true);
6686 if (total_count
.initialized_p () && exit_count
.initialized_p ())
6688 scale_bbs_frequencies_profile_count (region
, n_region
,
6689 total_count
- exit_count
,
6691 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6695 /* Create the switch block, and put the exit condition to it. */
6696 entry_bb
= entry
->dest
;
6697 nentry_bb
= get_bb_copy (entry_bb
);
6698 if (!last_stmt (entry
->src
)
6699 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6700 switch_bb
= entry
->src
;
6702 switch_bb
= split_edge (entry
);
6703 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6705 gsi
= gsi_last_bb (switch_bb
);
6706 cond_stmt
= last_stmt (exit
->src
);
6707 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6708 cond_stmt
= gimple_copy (cond_stmt
);
6710 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6712 sorig
= single_succ_edge (switch_bb
);
6713 sorig
->flags
= exits
[1]->flags
;
6714 sorig
->probability
= exits
[1]->probability
;
6715 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6716 snew
->probability
= exits
[0]->probability
;
6719 /* Register the new edge from SWITCH_BB in loop exit lists. */
6720 rescan_loop_exit (snew
, true, false);
6722 /* Add the PHI node arguments. */
6723 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6725 /* Get rid of now superfluous conditions and associated edges (and phi node
6727 exit_bb
= exit
->dest
;
6729 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6730 PENDING_STMT (e
) = NULL
;
6732 /* The latch of ORIG_LOOP was copied, and so was the backedge
6733 to the original header. We redirect this backedge to EXIT_BB. */
6734 for (i
= 0; i
< n_region
; i
++)
6735 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6737 gcc_assert (single_succ_edge (region_copy
[i
]));
6738 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6739 PENDING_STMT (e
) = NULL
;
6740 for (psi
= gsi_start_phis (exit_bb
);
6745 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6746 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6749 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6750 PENDING_STMT (e
) = NULL
;
6752 /* Anything that is outside of the region, but was dominated by something
6753 inside needs to update dominance info. */
6754 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6756 /* Update the SSA web. */
6757 update_ssa (TODO_update_ssa
);
6759 if (free_region_copy
)
6762 free_original_copy_tables ();
6766 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6767 adding blocks when the dominator traversal reaches EXIT. This
6768 function silently assumes that ENTRY strictly dominates EXIT. */
6771 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6772 vec
<basic_block
> *bbs_p
)
6776 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6778 son
= next_dom_son (CDI_DOMINATORS
, son
))
6780 bbs_p
->safe_push (son
);
6782 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6786 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6787 The duplicates are recorded in VARS_MAP. */
6790 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6793 tree t
= *tp
, new_t
;
6794 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6796 if (DECL_CONTEXT (t
) == to_context
)
6800 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6806 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6807 add_local_decl (f
, new_t
);
6811 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6812 new_t
= copy_node (t
);
6814 DECL_CONTEXT (new_t
) = to_context
;
6825 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6826 VARS_MAP maps old ssa names and var_decls to the new ones. */
6829 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6834 gcc_assert (!virtual_operand_p (name
));
6836 tree
*loc
= vars_map
->get (name
);
6840 tree decl
= SSA_NAME_VAR (name
);
6843 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6844 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6845 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6846 decl
, SSA_NAME_DEF_STMT (name
));
6849 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6850 name
, SSA_NAME_DEF_STMT (name
));
6852 /* Now that we've used the def stmt to define new_name, make sure it
6853 doesn't define name anymore. */
6854 SSA_NAME_DEF_STMT (name
) = NULL
;
6856 vars_map
->put (name
, new_name
);
6870 hash_map
<tree
, tree
> *vars_map
;
6871 htab_t new_label_map
;
6872 hash_map
<void *, void *> *eh_map
;
6876 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6877 contained in *TP if it has been ORIG_BLOCK previously and change the
6878 DECL_CONTEXT of every local variable referenced in *TP. */
6881 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6883 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6884 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6889 tree block
= TREE_BLOCK (t
);
6890 if (block
== NULL_TREE
)
6892 else if (block
== p
->orig_block
6893 || p
->orig_block
== NULL_TREE
)
6895 /* tree_node_can_be_shared says we can share invariant
6896 addresses but unshare_expr copies them anyways. Make sure
6897 to unshare before adjusting the block in place - we do not
6898 always see a copy here. */
6899 if (TREE_CODE (t
) == ADDR_EXPR
6900 && is_gimple_min_invariant (t
))
6901 *tp
= t
= unshare_expr (t
);
6902 TREE_SET_BLOCK (t
, p
->new_block
);
6904 else if (flag_checking
)
6906 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6907 block
= BLOCK_SUPERCONTEXT (block
);
6908 gcc_assert (block
== p
->orig_block
);
6911 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6913 if (TREE_CODE (t
) == SSA_NAME
)
6914 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6915 else if (TREE_CODE (t
) == PARM_DECL
6916 && gimple_in_ssa_p (cfun
))
6917 *tp
= *(p
->vars_map
->get (t
));
6918 else if (TREE_CODE (t
) == LABEL_DECL
)
6920 if (p
->new_label_map
)
6922 struct tree_map in
, *out
;
6924 out
= (struct tree_map
*)
6925 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6930 /* For FORCED_LABELs we can end up with references from other
6931 functions if some SESE regions are outlined. It is UB to
6932 jump in between them, but they could be used just for printing
6933 addresses etc. In that case, DECL_CONTEXT on the label should
6934 be the function containing the glabel stmt with that LABEL_DECL,
6935 rather than whatever function a reference to the label was seen
6937 if (!FORCED_LABEL (t
) && !DECL_NONLOCAL (t
))
6938 DECL_CONTEXT (t
) = p
->to_context
;
6940 else if (p
->remap_decls_p
)
6942 /* Replace T with its duplicate. T should no longer appear in the
6943 parent function, so this looks wasteful; however, it may appear
6944 in referenced_vars, and more importantly, as virtual operands of
6945 statements, and in alias lists of other variables. It would be
6946 quite difficult to expunge it from all those places. ??? It might
6947 suffice to do this for addressable variables. */
6948 if ((VAR_P (t
) && !is_global_var (t
))
6949 || TREE_CODE (t
) == CONST_DECL
)
6950 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6954 else if (TYPE_P (t
))
6960 /* Helper for move_stmt_r. Given an EH region number for the source
6961 function, map that to the duplicate EH regio number in the dest. */
6964 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6966 eh_region old_r
, new_r
;
6968 old_r
= get_eh_region_from_number (old_nr
);
6969 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6971 return new_r
->index
;
6974 /* Similar, but operate on INTEGER_CSTs. */
6977 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6981 old_nr
= tree_to_shwi (old_t_nr
);
6982 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6984 return build_int_cst (integer_type_node
, new_nr
);
6987 /* Like move_stmt_op, but for gimple statements.
6989 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6990 contained in the current statement in *GSI_P and change the
6991 DECL_CONTEXT of every local variable referenced in the current
6995 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6996 struct walk_stmt_info
*wi
)
6998 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6999 gimple
*stmt
= gsi_stmt (*gsi_p
);
7000 tree block
= gimple_block (stmt
);
7002 if (block
== p
->orig_block
7003 || (p
->orig_block
== NULL_TREE
7004 && block
!= NULL_TREE
))
7005 gimple_set_block (stmt
, p
->new_block
);
7007 switch (gimple_code (stmt
))
7010 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7012 tree r
, fndecl
= gimple_call_fndecl (stmt
);
7013 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
7014 switch (DECL_FUNCTION_CODE (fndecl
))
7016 case BUILT_IN_EH_COPY_VALUES
:
7017 r
= gimple_call_arg (stmt
, 1);
7018 r
= move_stmt_eh_region_tree_nr (r
, p
);
7019 gimple_call_set_arg (stmt
, 1, r
);
7022 case BUILT_IN_EH_POINTER
:
7023 case BUILT_IN_EH_FILTER
:
7024 r
= gimple_call_arg (stmt
, 0);
7025 r
= move_stmt_eh_region_tree_nr (r
, p
);
7026 gimple_call_set_arg (stmt
, 0, r
);
7037 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
7038 int r
= gimple_resx_region (resx_stmt
);
7039 r
= move_stmt_eh_region_nr (r
, p
);
7040 gimple_resx_set_region (resx_stmt
, r
);
7044 case GIMPLE_EH_DISPATCH
:
7046 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
7047 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
7048 r
= move_stmt_eh_region_nr (r
, p
);
7049 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
7053 case GIMPLE_OMP_RETURN
:
7054 case GIMPLE_OMP_CONTINUE
:
7059 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7060 so that such labels can be referenced from other regions.
7061 Make sure to update it when seeing a GIMPLE_LABEL though,
7062 that is the owner of the label. */
7063 walk_gimple_op (stmt
, move_stmt_op
, wi
);
7064 *handled_ops_p
= true;
7065 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
7066 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
7067 DECL_CONTEXT (label
) = p
->to_context
;
7072 if (is_gimple_omp (stmt
))
7074 /* Do not remap variables inside OMP directives. Variables
7075 referenced in clauses and directive header belong to the
7076 parent function and should not be moved into the child
7078 bool save_remap_decls_p
= p
->remap_decls_p
;
7079 p
->remap_decls_p
= false;
7080 *handled_ops_p
= true;
7082 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
7085 p
->remap_decls_p
= save_remap_decls_p
;
7093 /* Move basic block BB from function CFUN to function DEST_FN. The
7094 block is moved out of the original linked list and placed after
7095 block AFTER in the new list. Also, the block is removed from the
7096 original array of blocks and placed in DEST_FN's array of blocks.
7097 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7098 updated to reflect the moved edges.
7100 The local variables are remapped to new instances, VARS_MAP is used
7101 to record the mapping. */
7104 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
7105 basic_block after
, bool update_edge_count_p
,
7106 struct move_stmt_d
*d
)
7108 struct control_flow_graph
*cfg
;
7111 gimple_stmt_iterator si
;
7114 /* Remove BB from dominance structures. */
7115 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
7117 /* Move BB from its current loop to the copy in the new function. */
7120 class loop
*new_loop
= (class loop
*)bb
->loop_father
->aux
;
7122 bb
->loop_father
= new_loop
;
7125 /* Link BB to the new linked list. */
7126 move_block_after (bb
, after
);
7128 /* Update the edge count in the corresponding flowgraphs. */
7129 if (update_edge_count_p
)
7130 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7132 cfun
->cfg
->x_n_edges
--;
7133 dest_cfun
->cfg
->x_n_edges
++;
7136 /* Remove BB from the original basic block array. */
7137 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
7138 cfun
->cfg
->x_n_basic_blocks
--;
7140 /* Grow DEST_CFUN's basic block array if needed. */
7141 cfg
= dest_cfun
->cfg
;
7142 cfg
->x_n_basic_blocks
++;
7143 if (bb
->index
>= cfg
->x_last_basic_block
)
7144 cfg
->x_last_basic_block
= bb
->index
+ 1;
7146 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
7147 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
7148 vec_safe_grow_cleared (cfg
->x_basic_block_info
,
7149 cfg
->x_last_basic_block
+ 1);
7151 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
7153 /* Remap the variables in phi nodes. */
7154 for (gphi_iterator psi
= gsi_start_phis (bb
);
7157 gphi
*phi
= psi
.phi ();
7159 tree op
= PHI_RESULT (phi
);
7163 if (virtual_operand_p (op
))
7165 /* Remove the phi nodes for virtual operands (alias analysis will be
7166 run for the new function, anyway). But replace all uses that
7167 might be outside of the region we move. */
7168 use_operand_p use_p
;
7169 imm_use_iterator iter
;
7171 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, op
)
7172 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7173 SET_USE (use_p
, SSA_NAME_VAR (op
));
7174 remove_phi_node (&psi
, true);
7178 SET_PHI_RESULT (phi
,
7179 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7180 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
7182 op
= USE_FROM_PTR (use
);
7183 if (TREE_CODE (op
) == SSA_NAME
)
7184 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7187 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
7189 location_t locus
= gimple_phi_arg_location (phi
, i
);
7190 tree block
= LOCATION_BLOCK (locus
);
7192 if (locus
== UNKNOWN_LOCATION
)
7194 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
7196 locus
= set_block (locus
, d
->new_block
);
7197 gimple_phi_arg_set_location (phi
, i
, locus
);
7204 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7206 gimple
*stmt
= gsi_stmt (si
);
7207 struct walk_stmt_info wi
;
7209 memset (&wi
, 0, sizeof (wi
));
7211 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
7213 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
7215 tree label
= gimple_label_label (label_stmt
);
7216 int uid
= LABEL_DECL_UID (label
);
7218 gcc_assert (uid
> -1);
7220 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
7221 if (old_len
<= (unsigned) uid
)
7222 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, uid
+ 1);
7224 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
7225 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
7227 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
7229 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
7230 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
7233 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
7234 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
7236 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
7237 gimple_remove_stmt_histograms (cfun
, stmt
);
7239 /* We cannot leave any operands allocated from the operand caches of
7240 the current function. */
7241 free_stmt_operands (cfun
, stmt
);
7242 push_cfun (dest_cfun
);
7244 if (is_gimple_call (stmt
))
7245 notice_special_calls (as_a
<gcall
*> (stmt
));
7249 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7250 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
7252 tree block
= LOCATION_BLOCK (e
->goto_locus
);
7253 if (d
->orig_block
== NULL_TREE
7254 || block
== d
->orig_block
)
7255 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
7259 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7260 the outermost EH region. Use REGION as the incoming base EH region.
7261 If there is no single outermost region, return NULL and set *ALL to
7265 find_outermost_region_in_block (struct function
*src_cfun
,
7266 basic_block bb
, eh_region region
,
7269 gimple_stmt_iterator si
;
7271 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7273 gimple
*stmt
= gsi_stmt (si
);
7274 eh_region stmt_region
;
7277 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7278 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7282 region
= stmt_region
;
7283 else if (stmt_region
!= region
)
7285 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7299 new_label_mapper (tree decl
, void *data
)
7301 htab_t hash
= (htab_t
) data
;
7305 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7307 m
= XNEW (struct tree_map
);
7308 m
->hash
= DECL_UID (decl
);
7309 m
->base
.from
= decl
;
7310 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7311 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7312 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7313 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7315 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7316 gcc_assert (*slot
== NULL
);
7323 /* Tree walker to replace the decls used inside value expressions by
7327 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7329 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7331 switch (TREE_CODE (*tp
))
7336 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7342 if (IS_TYPE_OR_DECL_P (*tp
))
7343 *walk_subtrees
= false;
7348 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7352 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7357 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7360 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7362 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7365 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7367 tree x
= DECL_VALUE_EXPR (*tp
);
7368 struct replace_decls_d rd
= { vars_map
, to_context
};
7370 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7371 SET_DECL_VALUE_EXPR (t
, x
);
7372 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7374 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7379 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7380 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7383 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7387 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7390 /* Discard it from the old loop array. */
7391 (*get_loops (fn1
))[loop
->num
] = NULL
;
7393 /* Place it in the new loop array, assigning it a new number. */
7394 loop
->num
= number_of_loops (fn2
);
7395 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7397 /* Recurse to children. */
7398 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7399 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7402 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7403 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7406 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7411 bitmap bbs
= BITMAP_ALLOC (NULL
);
7414 gcc_assert (entry
!= NULL
);
7415 gcc_assert (entry
!= exit
);
7416 gcc_assert (bbs_p
!= NULL
);
7418 gcc_assert (bbs_p
->length () > 0);
7420 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7421 bitmap_set_bit (bbs
, bb
->index
);
7423 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7424 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7426 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7430 gcc_assert (single_pred_p (entry
));
7431 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7434 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7437 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7442 gcc_assert (single_succ_p (exit
));
7443 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7446 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7449 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7456 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7459 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7461 bitmap release_names
= (bitmap
)data
;
7463 if (TREE_CODE (from
) != SSA_NAME
)
7466 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7470 /* Return LOOP_DIST_ALIAS call if present in BB. */
7473 find_loop_dist_alias (basic_block bb
)
7475 gimple
*g
= last_stmt (bb
);
7476 if (g
== NULL
|| gimple_code (g
) != GIMPLE_COND
)
7479 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7481 if (gsi_end_p (gsi
))
7485 if (gimple_call_internal_p (g
, IFN_LOOP_DIST_ALIAS
))
7490 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7491 to VALUE and update any immediate uses of it's LHS. */
7494 fold_loop_internal_call (gimple
*g
, tree value
)
7496 tree lhs
= gimple_call_lhs (g
);
7497 use_operand_p use_p
;
7498 imm_use_iterator iter
;
7500 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
7502 update_call_from_tree (&gsi
, value
);
7503 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
7505 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
7506 SET_USE (use_p
, value
);
7507 update_stmt (use_stmt
);
7511 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7512 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7513 single basic block in the original CFG and the new basic block is
7514 returned. DEST_CFUN must not have a CFG yet.
7516 Note that the region need not be a pure SESE region. Blocks inside
7517 the region may contain calls to abort/exit. The only restriction
7518 is that ENTRY_BB should be the only entry point and it must
7521 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7522 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7523 to the new function.
7525 All local variables referenced in the region are assumed to be in
7526 the corresponding BLOCK_VARS and unexpanded variable lists
7527 associated with DEST_CFUN.
7529 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7530 reimplement move_sese_region_to_fn by duplicating the region rather than
7534 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7535 basic_block exit_bb
, tree orig_block
)
7537 vec
<basic_block
> bbs
, dom_bbs
;
7538 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7539 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7540 struct function
*saved_cfun
= cfun
;
7541 int *entry_flag
, *exit_flag
;
7542 profile_probability
*entry_prob
, *exit_prob
;
7543 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7546 htab_t new_label_map
;
7547 hash_map
<void *, void *> *eh_map
;
7548 class loop
*loop
= entry_bb
->loop_father
;
7549 class loop
*loop0
= get_loop (saved_cfun
, 0);
7550 struct move_stmt_d d
;
7552 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7554 gcc_assert (entry_bb
!= exit_bb
7556 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7558 /* Collect all the blocks in the region. Manually add ENTRY_BB
7559 because it won't be added by dfs_enumerate_from. */
7561 bbs
.safe_push (entry_bb
);
7562 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7565 verify_sese (entry_bb
, exit_bb
, &bbs
);
7567 /* The blocks that used to be dominated by something in BBS will now be
7568 dominated by the new block. */
7569 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7573 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7574 the predecessor edges to ENTRY_BB and the successor edges to
7575 EXIT_BB so that we can re-attach them to the new basic block that
7576 will replace the region. */
7577 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7578 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7579 entry_flag
= XNEWVEC (int, num_entry_edges
);
7580 entry_prob
= XNEWVEC (profile_probability
, num_entry_edges
);
7582 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7584 entry_prob
[i
] = e
->probability
;
7585 entry_flag
[i
] = e
->flags
;
7586 entry_pred
[i
++] = e
->src
;
7592 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7593 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7594 exit_flag
= XNEWVEC (int, num_exit_edges
);
7595 exit_prob
= XNEWVEC (profile_probability
, num_exit_edges
);
7597 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7599 exit_prob
[i
] = e
->probability
;
7600 exit_flag
[i
] = e
->flags
;
7601 exit_succ
[i
++] = e
->dest
;
7613 /* Switch context to the child function to initialize DEST_FN's CFG. */
7614 gcc_assert (dest_cfun
->cfg
== NULL
);
7615 push_cfun (dest_cfun
);
7617 init_empty_tree_cfg ();
7619 /* Initialize EH information for the new function. */
7621 new_label_map
= NULL
;
7624 eh_region region
= NULL
;
7627 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7629 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
, &all
);
7634 init_eh_for_function ();
7635 if (region
!= NULL
|| all
)
7637 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7638 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7639 new_label_mapper
, new_label_map
);
7643 /* Initialize an empty loop tree. */
7644 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7645 init_loops_structure (dest_cfun
, loops
, 1);
7646 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7647 set_loops_for_fn (dest_cfun
, loops
);
7649 vec
<loop_p
, va_gc
> *larray
= get_loops (saved_cfun
)->copy ();
7651 /* Move the outlined loop tree part. */
7652 num_nodes
= bbs
.length ();
7653 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7655 if (bb
->loop_father
->header
== bb
)
7657 class loop
*this_loop
= bb
->loop_father
;
7658 class loop
*outer
= loop_outer (this_loop
);
7660 /* If the SESE region contains some bbs ending with
7661 a noreturn call, those are considered to belong
7662 to the outermost loop in saved_cfun, rather than
7663 the entry_bb's loop_father. */
7667 num_nodes
-= this_loop
->num_nodes
;
7668 flow_loop_tree_node_remove (bb
->loop_father
);
7669 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7670 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7673 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7676 /* Remove loop exits from the outlined region. */
7677 if (loops_for_fn (saved_cfun
)->exits
)
7678 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7680 struct loops
*l
= loops_for_fn (saved_cfun
);
7682 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7685 l
->exits
->clear_slot (slot
);
7689 /* Adjust the number of blocks in the tree root of the outlined part. */
7690 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7692 /* Setup a mapping to be used by move_block_to_fn. */
7693 loop
->aux
= current_loops
->tree_root
;
7694 loop0
->aux
= current_loops
->tree_root
;
7696 /* Fix up orig_loop_num. If the block referenced in it has been moved
7697 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7699 signed char *moved_orig_loop_num
= NULL
;
7700 FOR_EACH_LOOP_FN (dest_cfun
, dloop
, 0)
7701 if (dloop
->orig_loop_num
)
7703 if (moved_orig_loop_num
== NULL
)
7705 = XCNEWVEC (signed char, vec_safe_length (larray
));
7706 if ((*larray
)[dloop
->orig_loop_num
] != NULL
7707 && get_loop (saved_cfun
, dloop
->orig_loop_num
) == NULL
)
7709 if (moved_orig_loop_num
[dloop
->orig_loop_num
] >= 0
7710 && moved_orig_loop_num
[dloop
->orig_loop_num
] < 2)
7711 moved_orig_loop_num
[dloop
->orig_loop_num
]++;
7712 dloop
->orig_loop_num
= (*larray
)[dloop
->orig_loop_num
]->num
;
7716 moved_orig_loop_num
[dloop
->orig_loop_num
] = -1;
7717 dloop
->orig_loop_num
= 0;
7722 if (moved_orig_loop_num
)
7724 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7726 gimple
*g
= find_loop_dist_alias (bb
);
7730 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
7731 gcc_assert (orig_loop_num
7732 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
7733 if (moved_orig_loop_num
[orig_loop_num
] == 2)
7735 /* If we have moved both loops with this orig_loop_num into
7736 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7737 too, update the first argument. */
7738 gcc_assert ((*larray
)[dloop
->orig_loop_num
] != NULL
7739 && (get_loop (saved_cfun
, dloop
->orig_loop_num
)
7741 tree t
= build_int_cst (integer_type_node
,
7742 (*larray
)[dloop
->orig_loop_num
]->num
);
7743 gimple_call_set_arg (g
, 0, t
);
7745 /* Make sure the following loop will not update it. */
7746 moved_orig_loop_num
[orig_loop_num
] = 0;
7749 /* Otherwise at least one of the loops stayed in saved_cfun.
7750 Remove the LOOP_DIST_ALIAS call. */
7751 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
7753 FOR_EACH_BB_FN (bb
, saved_cfun
)
7755 gimple
*g
= find_loop_dist_alias (bb
);
7758 int orig_loop_num
= tree_to_shwi (gimple_call_arg (g
, 0));
7759 gcc_assert (orig_loop_num
7760 && (unsigned) orig_loop_num
< vec_safe_length (larray
));
7761 if (moved_orig_loop_num
[orig_loop_num
])
7762 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7763 of the corresponding loops was moved, remove it. */
7764 fold_loop_internal_call (g
, gimple_call_arg (g
, 1));
7766 XDELETEVEC (moved_orig_loop_num
);
7770 /* Move blocks from BBS into DEST_CFUN. */
7771 gcc_assert (bbs
.length () >= 2);
7772 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7773 hash_map
<tree
, tree
> vars_map
;
7775 memset (&d
, 0, sizeof (d
));
7776 d
.orig_block
= orig_block
;
7777 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7778 d
.from_context
= cfun
->decl
;
7779 d
.to_context
= dest_cfun
->decl
;
7780 d
.vars_map
= &vars_map
;
7781 d
.new_label_map
= new_label_map
;
7783 d
.remap_decls_p
= true;
7785 if (gimple_in_ssa_p (cfun
))
7786 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7788 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7789 set_ssa_default_def (dest_cfun
, arg
, narg
);
7790 vars_map
.put (arg
, narg
);
7793 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7795 /* No need to update edge counts on the last block. It has
7796 already been updated earlier when we detached the region from
7797 the original CFG. */
7798 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7802 /* Adjust the maximum clique used. */
7803 dest_cfun
->last_clique
= saved_cfun
->last_clique
;
7807 /* Loop sizes are no longer correct, fix them up. */
7808 loop
->num_nodes
-= num_nodes
;
7809 for (class loop
*outer
= loop_outer (loop
);
7810 outer
; outer
= loop_outer (outer
))
7811 outer
->num_nodes
-= num_nodes
;
7812 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7814 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7817 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7822 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7824 dest_cfun
->has_simduid_loops
= true;
7826 if (aloop
->force_vectorize
)
7827 dest_cfun
->has_force_vectorize_loops
= true;
7831 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7835 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7837 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7838 = BLOCK_SUBBLOCKS (orig_block
);
7839 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7840 block
; block
= BLOCK_CHAIN (block
))
7841 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7842 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7845 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7846 &vars_map
, dest_cfun
->decl
);
7849 htab_delete (new_label_map
);
7853 if (gimple_in_ssa_p (cfun
))
7855 /* We need to release ssa-names in a defined order, so first find them,
7856 and then iterate in ascending version order. */
7857 bitmap release_names
= BITMAP_ALLOC (NULL
);
7858 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7861 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7862 release_ssa_name (ssa_name (i
));
7863 BITMAP_FREE (release_names
);
7866 /* Rewire the entry and exit blocks. The successor to the entry
7867 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7868 the child function. Similarly, the predecessor of DEST_FN's
7869 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7870 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7871 various CFG manipulation function get to the right CFG.
7873 FIXME, this is silly. The CFG ought to become a parameter to
7875 push_cfun (dest_cfun
);
7876 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= entry_bb
->count
;
7877 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7880 make_single_succ_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7881 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= exit_bb
->count
;
7884 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
= profile_count::zero ();
7887 /* Back in the original function, the SESE region has disappeared,
7888 create a new basic block in its place. */
7889 bb
= create_empty_bb (entry_pred
[0]);
7891 add_bb_to_loop (bb
, loop
);
7892 for (i
= 0; i
< num_entry_edges
; i
++)
7894 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7895 e
->probability
= entry_prob
[i
];
7898 for (i
= 0; i
< num_exit_edges
; i
++)
7900 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7901 e
->probability
= exit_prob
[i
];
7904 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7905 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7906 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7923 /* Dump default def DEF to file FILE using FLAGS and indentation
7927 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
7929 for (int i
= 0; i
< spc
; ++i
)
7930 fprintf (file
, " ");
7931 dump_ssaname_info_to_file (file
, def
, spc
);
7933 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7934 fprintf (file
, " ");
7935 print_generic_expr (file
, def
, flags
);
7936 fprintf (file
, " = ");
7937 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7938 fprintf (file
, ";\n");
7941 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7944 print_no_sanitize_attr_value (FILE *file
, tree value
)
7946 unsigned int flags
= tree_to_uhwi (value
);
7948 for (int i
= 0; sanitizer_opts
[i
].name
!= NULL
; ++i
)
7950 if ((sanitizer_opts
[i
].flag
& flags
) == sanitizer_opts
[i
].flag
)
7953 fprintf (file
, " | ");
7954 fprintf (file
, "%s", sanitizer_opts
[i
].name
);
7960 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7964 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
7966 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7967 struct function
*dsf
;
7968 bool ignore_topmost_bind
= false, any_var
= false;
7971 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7972 && decl_is_tm_clone (fndecl
));
7973 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7975 tree fntype
= TREE_TYPE (fndecl
);
7976 tree attrs
[] = { DECL_ATTRIBUTES (fndecl
), TYPE_ATTRIBUTES (fntype
) };
7978 for (int i
= 0; i
!= 2; ++i
)
7983 fprintf (file
, "__attribute__((");
7987 for (chain
= attrs
[i
]; chain
; first
= false, chain
= TREE_CHAIN (chain
))
7990 fprintf (file
, ", ");
7992 tree name
= get_attribute_name (chain
);
7993 print_generic_expr (file
, name
, dump_flags
);
7994 if (TREE_VALUE (chain
) != NULL_TREE
)
7996 fprintf (file
, " (");
7998 if (strstr (IDENTIFIER_POINTER (name
), "no_sanitize"))
7999 print_no_sanitize_attr_value (file
, TREE_VALUE (chain
));
8001 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
8002 fprintf (file
, ")");
8006 fprintf (file
, "))\n");
8009 current_function_decl
= fndecl
;
8010 if (flags
& TDF_GIMPLE
)
8012 static bool hotness_bb_param_printed
= false;
8013 if (profile_info
!= NULL
8014 && !hotness_bb_param_printed
)
8016 hotness_bb_param_printed
= true;
8018 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8019 " */\n", get_hot_bb_threshold ());
8022 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
8023 dump_flags
| TDF_SLIM
);
8024 fprintf (file
, " __GIMPLE (%s",
8025 (fun
->curr_properties
& PROP_ssa
) ? "ssa"
8026 : (fun
->curr_properties
& PROP_cfg
) ? "cfg"
8031 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8032 if (bb
->count
.initialized_p ())
8033 fprintf (file
, ",%s(%d)",
8034 profile_quality_as_string (bb
->count
.quality ()),
8035 bb
->count
.value ());
8036 fprintf (file
, ")\n%s (", function_name (fun
));
8041 print_generic_expr (file
, TREE_TYPE (fntype
), dump_flags
);
8042 fprintf (file
, " %s %s(", function_name (fun
),
8043 tmclone
? "[tm-clone] " : "");
8046 arg
= DECL_ARGUMENTS (fndecl
);
8049 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
8050 fprintf (file
, " ");
8051 print_generic_expr (file
, arg
, dump_flags
);
8052 if (DECL_CHAIN (arg
))
8053 fprintf (file
, ", ");
8054 arg
= DECL_CHAIN (arg
);
8056 fprintf (file
, ")\n");
8058 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
8059 if (dsf
&& (flags
& TDF_EH
))
8060 dump_eh_tree (file
, dsf
);
8062 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
8064 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
8065 current_function_decl
= old_current_fndecl
;
8069 /* When GIMPLE is lowered, the variables are no longer available in
8070 BIND_EXPRs, so display them separately. */
8071 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
8074 ignore_topmost_bind
= true;
8076 fprintf (file
, "{\n");
8077 if (gimple_in_ssa_p (fun
)
8078 && (flags
& TDF_ALIAS
))
8080 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
8081 arg
= DECL_CHAIN (arg
))
8083 tree def
= ssa_default_def (fun
, arg
);
8085 dump_default_def (file
, def
, 2, flags
);
8088 tree res
= DECL_RESULT (fun
->decl
);
8089 if (res
!= NULL_TREE
8090 && DECL_BY_REFERENCE (res
))
8092 tree def
= ssa_default_def (fun
, res
);
8094 dump_default_def (file
, def
, 2, flags
);
8097 tree static_chain
= fun
->static_chain_decl
;
8098 if (static_chain
!= NULL_TREE
)
8100 tree def
= ssa_default_def (fun
, static_chain
);
8102 dump_default_def (file
, def
, 2, flags
);
8106 if (!vec_safe_is_empty (fun
->local_decls
))
8107 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
8109 print_generic_decl (file
, var
, flags
);
8110 fprintf (file
, "\n");
8117 if (gimple_in_ssa_p (cfun
))
8118 FOR_EACH_SSA_NAME (ix
, name
, cfun
)
8120 if (!SSA_NAME_VAR (name
))
8122 fprintf (file
, " ");
8123 print_generic_expr (file
, TREE_TYPE (name
), flags
);
8124 fprintf (file
, " ");
8125 print_generic_expr (file
, name
, flags
);
8126 fprintf (file
, ";\n");
8133 if (fun
&& fun
->decl
== fndecl
8135 && basic_block_info_for_fn (fun
))
8137 /* If the CFG has been built, emit a CFG-based dump. */
8138 if (!ignore_topmost_bind
)
8139 fprintf (file
, "{\n");
8141 if (any_var
&& n_basic_blocks_for_fn (fun
))
8142 fprintf (file
, "\n");
8144 FOR_EACH_BB_FN (bb
, fun
)
8145 dump_bb (file
, bb
, 2, flags
);
8147 fprintf (file
, "}\n");
8149 else if (fun
->curr_properties
& PROP_gimple_any
)
8151 /* The function is now in GIMPLE form but the CFG has not been
8152 built yet. Emit the single sequence of GIMPLE statements
8153 that make up its body. */
8154 gimple_seq body
= gimple_body (fndecl
);
8156 if (gimple_seq_first_stmt (body
)
8157 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
8158 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
8159 print_gimple_seq (file
, body
, 0, flags
);
8162 if (!ignore_topmost_bind
)
8163 fprintf (file
, "{\n");
8166 fprintf (file
, "\n");
8168 print_gimple_seq (file
, body
, 2, flags
);
8169 fprintf (file
, "}\n");
8176 /* Make a tree based dump. */
8177 chain
= DECL_SAVED_TREE (fndecl
);
8178 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
8180 if (ignore_topmost_bind
)
8182 chain
= BIND_EXPR_BODY (chain
);
8190 if (!ignore_topmost_bind
)
8192 fprintf (file
, "{\n");
8193 /* No topmost bind, pretend it's ignored for later. */
8194 ignore_topmost_bind
= true;
8200 fprintf (file
, "\n");
8202 print_generic_stmt_indented (file
, chain
, flags
, indent
);
8203 if (ignore_topmost_bind
)
8204 fprintf (file
, "}\n");
8207 if (flags
& TDF_ENUMERATE_LOCALS
)
8208 dump_enumerated_decls (file
, flags
);
8209 fprintf (file
, "\n\n");
8211 current_function_decl
= old_current_fndecl
;
8214 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8217 debug_function (tree fn
, dump_flags_t flags
)
8219 dump_function_to_file (fn
, stderr
, flags
);
8223 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8226 print_pred_bbs (FILE *file
, basic_block bb
)
8231 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8232 fprintf (file
, "bb_%d ", e
->src
->index
);
8236 /* Print on FILE the indexes for the successors of basic_block BB. */
8239 print_succ_bbs (FILE *file
, basic_block bb
)
8244 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8245 fprintf (file
, "bb_%d ", e
->dest
->index
);
8248 /* Print to FILE the basic block BB following the VERBOSITY level. */
8251 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
8253 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
8254 memset ((void *) s_indent
, ' ', (size_t) indent
);
8255 s_indent
[indent
] = '\0';
8257 /* Print basic_block's header. */
8260 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
8261 print_pred_bbs (file
, bb
);
8262 fprintf (file
, "}, succs = {");
8263 print_succ_bbs (file
, bb
);
8264 fprintf (file
, "})\n");
8267 /* Print basic_block's body. */
8270 fprintf (file
, "%s {\n", s_indent
);
8271 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
8272 fprintf (file
, "%s }\n", s_indent
);
8276 static void print_loop_and_siblings (FILE *, class loop
*, int, int);
8278 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8279 VERBOSITY level this outputs the contents of the loop, or just its
8283 print_loop (FILE *file
, class loop
*loop
, int indent
, int verbosity
)
8291 s_indent
= (char *) alloca ((size_t) indent
+ 1);
8292 memset ((void *) s_indent
, ' ', (size_t) indent
);
8293 s_indent
[indent
] = '\0';
8295 /* Print loop's header. */
8296 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
8298 fprintf (file
, "header = %d", loop
->header
->index
);
8301 fprintf (file
, "deleted)\n");
8305 fprintf (file
, ", latch = %d", loop
->latch
->index
);
8307 fprintf (file
, ", multiple latches");
8308 fprintf (file
, ", niter = ");
8309 print_generic_expr (file
, loop
->nb_iterations
);
8311 if (loop
->any_upper_bound
)
8313 fprintf (file
, ", upper_bound = ");
8314 print_decu (loop
->nb_iterations_upper_bound
, file
);
8316 if (loop
->any_likely_upper_bound
)
8318 fprintf (file
, ", likely_upper_bound = ");
8319 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
8322 if (loop
->any_estimate
)
8324 fprintf (file
, ", estimate = ");
8325 print_decu (loop
->nb_iterations_estimate
, file
);
8328 fprintf (file
, ", unroll = %d", loop
->unroll
);
8329 fprintf (file
, ")\n");
8331 /* Print loop's body. */
8334 fprintf (file
, "%s{\n", s_indent
);
8335 FOR_EACH_BB_FN (bb
, cfun
)
8336 if (bb
->loop_father
== loop
)
8337 print_loops_bb (file
, bb
, indent
, verbosity
);
8339 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
8340 fprintf (file
, "%s}\n", s_indent
);
8344 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8345 spaces. Following VERBOSITY level this outputs the contents of the
8346 loop, or just its structure. */
8349 print_loop_and_siblings (FILE *file
, class loop
*loop
, int indent
,
8355 print_loop (file
, loop
, indent
, verbosity
);
8356 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
8359 /* Follow a CFG edge from the entry point of the program, and on entry
8360 of a loop, pretty print the loop structure on FILE. */
8363 print_loops (FILE *file
, int verbosity
)
8367 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8368 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
8369 if (bb
&& bb
->loop_father
)
8370 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
8376 debug (class loop
&ref
)
8378 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
8382 debug (class loop
*ptr
)
8387 fprintf (stderr
, "<nil>\n");
8390 /* Dump a loop verbosely. */
8393 debug_verbose (class loop
&ref
)
8395 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
8399 debug_verbose (class loop
*ptr
)
8404 fprintf (stderr
, "<nil>\n");
8408 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8411 debug_loops (int verbosity
)
8413 print_loops (stderr
, verbosity
);
8416 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8419 debug_loop (class loop
*loop
, int verbosity
)
8421 print_loop (stderr
, loop
, 0, verbosity
);
8424 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8428 debug_loop_num (unsigned num
, int verbosity
)
8430 debug_loop (get_loop (cfun
, num
), verbosity
);
8433 /* Return true if BB ends with a call, possibly followed by some
8434 instructions that must stay with the call. Return false,
8438 gimple_block_ends_with_call_p (basic_block bb
)
8440 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8441 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
8445 /* Return true if BB ends with a conditional branch. Return false,
8449 gimple_block_ends_with_condjump_p (const_basic_block bb
)
8451 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
8452 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
8456 /* Return true if statement T may terminate execution of BB in ways not
8457 explicitly represtented in the CFG. */
8460 stmt_can_terminate_bb_p (gimple
*t
)
8462 tree fndecl
= NULL_TREE
;
8465 /* Eh exception not handled internally terminates execution of the whole
8467 if (stmt_can_throw_external (cfun
, t
))
8470 /* NORETURN and LONGJMP calls already have an edge to exit.
8471 CONST and PURE calls do not need one.
8472 We don't currently check for CONST and PURE here, although
8473 it would be a good idea, because those attributes are
8474 figured out from the RTL in mark_constant_function, and
8475 the counter incrementation code from -fprofile-arcs
8476 leads to different results from -fbranch-probabilities. */
8477 if (is_gimple_call (t
))
8479 fndecl
= gimple_call_fndecl (t
);
8480 call_flags
= gimple_call_flags (t
);
8483 if (is_gimple_call (t
)
8485 && fndecl_built_in_p (fndecl
)
8486 && (call_flags
& ECF_NOTHROW
)
8487 && !(call_flags
& ECF_RETURNS_TWICE
)
8488 /* fork() doesn't really return twice, but the effect of
8489 wrapping it in __gcov_fork() which calls __gcov_dump() and
8490 __gcov_reset() and clears the counters before forking has the same
8491 effect as returning twice. Force a fake edge. */
8492 && !fndecl_built_in_p (fndecl
, BUILT_IN_FORK
))
8495 if (is_gimple_call (t
))
8501 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8502 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8505 /* Function call may do longjmp, terminate program or do other things.
8506 Special case noreturn that have non-abnormal edges out as in this case
8507 the fact is sufficiently represented by lack of edges out of T. */
8508 if (!(call_flags
& ECF_NORETURN
))
8512 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8513 if ((e
->flags
& EDGE_FAKE
) == 0)
8517 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8518 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8525 /* Add fake edges to the function exit for any non constant and non
8526 noreturn calls (or noreturn calls with EH/abnormal edges),
8527 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8528 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8531 The goal is to expose cases in which entering a basic block does
8532 not imply that all subsequent instructions must be executed. */
8535 gimple_flow_call_edges_add (sbitmap blocks
)
8538 int blocks_split
= 0;
8539 int last_bb
= last_basic_block_for_fn (cfun
);
8540 bool check_last_block
= false;
8542 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8546 check_last_block
= true;
8548 check_last_block
= bitmap_bit_p (blocks
,
8549 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8551 /* In the last basic block, before epilogue generation, there will be
8552 a fallthru edge to EXIT. Special care is required if the last insn
8553 of the last basic block is a call because make_edge folds duplicate
8554 edges, which would result in the fallthru edge also being marked
8555 fake, which would result in the fallthru edge being removed by
8556 remove_fake_edges, which would result in an invalid CFG.
8558 Moreover, we can't elide the outgoing fake edge, since the block
8559 profiler needs to take this into account in order to solve the minimal
8560 spanning tree in the case that the call doesn't return.
8562 Handle this by adding a dummy instruction in a new last basic block. */
8563 if (check_last_block
)
8565 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8566 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8569 if (!gsi_end_p (gsi
))
8572 if (t
&& stmt_can_terminate_bb_p (t
))
8576 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8579 gsi_insert_on_edge (e
, gimple_build_nop ());
8580 gsi_commit_edge_inserts ();
8585 /* Now add fake edges to the function exit for any non constant
8586 calls since there is no way that we can determine if they will
8588 for (i
= 0; i
< last_bb
; i
++)
8590 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8591 gimple_stmt_iterator gsi
;
8592 gimple
*stmt
, *last_stmt
;
8597 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8600 gsi
= gsi_last_nondebug_bb (bb
);
8601 if (!gsi_end_p (gsi
))
8603 last_stmt
= gsi_stmt (gsi
);
8606 stmt
= gsi_stmt (gsi
);
8607 if (stmt_can_terminate_bb_p (stmt
))
8611 /* The handling above of the final block before the
8612 epilogue should be enough to verify that there is
8613 no edge to the exit block in CFG already.
8614 Calling make_edge in such case would cause us to
8615 mark that edge as fake and remove it later. */
8616 if (flag_checking
&& stmt
== last_stmt
)
8618 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8619 gcc_assert (e
== NULL
);
8622 /* Note that the following may create a new basic block
8623 and renumber the existing basic blocks. */
8624 if (stmt
!= last_stmt
)
8626 e
= split_block (bb
, stmt
);
8630 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8631 e
->probability
= profile_probability::guessed_never ();
8635 while (!gsi_end_p (gsi
));
8640 checking_verify_flow_info ();
8642 return blocks_split
;
8645 /* Removes edge E and all the blocks dominated by it, and updates dominance
8646 information. The IL in E->src needs to be updated separately.
8647 If dominance info is not available, only the edge E is removed.*/
8650 remove_edge_and_dominated_blocks (edge e
)
8652 vec
<basic_block
> bbs_to_remove
= vNULL
;
8653 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8656 bool none_removed
= false;
8658 basic_block bb
, dbb
;
8661 /* If we are removing a path inside a non-root loop that may change
8662 loop ownership of blocks or remove loops. Mark loops for fixup. */
8664 && loop_outer (e
->src
->loop_father
) != NULL
8665 && e
->src
->loop_father
== e
->dest
->loop_father
)
8666 loops_state_set (LOOPS_NEED_FIXUP
);
8668 if (!dom_info_available_p (CDI_DOMINATORS
))
8674 /* No updating is needed for edges to exit. */
8675 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8677 if (cfgcleanup_altered_bbs
)
8678 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8683 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8684 that is not dominated by E->dest, then this set is empty. Otherwise,
8685 all the basic blocks dominated by E->dest are removed.
8687 Also, to DF_IDOM we store the immediate dominators of the blocks in
8688 the dominance frontier of E (i.e., of the successors of the
8689 removed blocks, if there are any, and of E->dest otherwise). */
8690 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8695 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8697 none_removed
= true;
8702 auto_bitmap df
, df_idom
;
8704 bitmap_set_bit (df_idom
,
8705 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8708 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8709 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8711 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8713 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8714 bitmap_set_bit (df
, f
->dest
->index
);
8717 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8718 bitmap_clear_bit (df
, bb
->index
);
8720 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8722 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8723 bitmap_set_bit (df_idom
,
8724 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8728 if (cfgcleanup_altered_bbs
)
8730 /* Record the set of the altered basic blocks. */
8731 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8732 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8735 /* Remove E and the cancelled blocks. */
8740 /* Walk backwards so as to get a chance to substitute all
8741 released DEFs into debug stmts. See
8742 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8744 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8745 delete_basic_block (bbs_to_remove
[i
]);
8748 /* Update the dominance information. The immediate dominator may change only
8749 for blocks whose immediate dominator belongs to DF_IDOM:
8751 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8752 removal. Let Z the arbitrary block such that idom(Z) = Y and
8753 Z dominates X after the removal. Before removal, there exists a path P
8754 from Y to X that avoids Z. Let F be the last edge on P that is
8755 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8756 dominates W, and because of P, Z does not dominate W), and W belongs to
8757 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8758 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8760 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8761 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8763 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8764 bbs_to_fix_dom
.safe_push (dbb
);
8767 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8769 bbs_to_remove
.release ();
8770 bbs_to_fix_dom
.release ();
8773 /* Purge dead EH edges from basic block BB. */
8776 gimple_purge_dead_eh_edges (basic_block bb
)
8778 bool changed
= false;
8781 gimple
*stmt
= last_stmt (bb
);
8783 if (stmt
&& stmt_can_throw_internal (cfun
, stmt
))
8786 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8788 if (e
->flags
& EDGE_EH
)
8790 remove_edge_and_dominated_blocks (e
);
8800 /* Purge dead EH edges from basic block listed in BLOCKS. */
8803 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8805 bool changed
= false;
8809 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8811 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8813 /* Earlier gimple_purge_dead_eh_edges could have removed
8814 this basic block already. */
8815 gcc_assert (bb
|| changed
);
8817 changed
|= gimple_purge_dead_eh_edges (bb
);
8823 /* Purge dead abnormal call edges from basic block BB. */
8826 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8828 bool changed
= false;
8831 gimple
*stmt
= last_stmt (bb
);
8833 if (!cfun
->has_nonlocal_label
8834 && !cfun
->calls_setjmp
)
8837 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8840 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8842 if (e
->flags
& EDGE_ABNORMAL
)
8844 if (e
->flags
& EDGE_FALLTHRU
)
8845 e
->flags
&= ~EDGE_ABNORMAL
;
8847 remove_edge_and_dominated_blocks (e
);
8857 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8860 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8862 bool changed
= false;
8866 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8868 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8870 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8871 this basic block already. */
8872 gcc_assert (bb
|| changed
);
8874 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8880 /* This function is called whenever a new edge is created or
8884 gimple_execute_on_growing_pred (edge e
)
8886 basic_block bb
= e
->dest
;
8888 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8889 reserve_phi_args_for_new_edge (bb
);
8892 /* This function is called immediately before edge E is removed from
8893 the edge vector E->dest->preds. */
8896 gimple_execute_on_shrinking_pred (edge e
)
8898 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8899 remove_phi_args (e
);
8902 /*---------------------------------------------------------------------------
8903 Helper functions for Loop versioning
8904 ---------------------------------------------------------------------------*/
8906 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8907 of 'first'. Both of them are dominated by 'new_head' basic block. When
8908 'new_head' was created by 'second's incoming edge it received phi arguments
8909 on the edge by split_edge(). Later, additional edge 'e' was created to
8910 connect 'new_head' and 'first'. Now this routine adds phi args on this
8911 additional edge 'e' that new_head to second edge received as part of edge
8915 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8916 basic_block new_head
, edge e
)
8919 gphi_iterator psi1
, psi2
;
8921 edge e2
= find_edge (new_head
, second
);
8923 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8924 edge, we should always have an edge from NEW_HEAD to SECOND. */
8925 gcc_assert (e2
!= NULL
);
8927 /* Browse all 'second' basic block phi nodes and add phi args to
8928 edge 'e' for 'first' head. PHI args are always in correct order. */
8930 for (psi2
= gsi_start_phis (second
),
8931 psi1
= gsi_start_phis (first
);
8932 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8933 gsi_next (&psi2
), gsi_next (&psi1
))
8937 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8938 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8943 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8944 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8945 the destination of the ELSE part. */
8948 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8949 basic_block second_head ATTRIBUTE_UNUSED
,
8950 basic_block cond_bb
, void *cond_e
)
8952 gimple_stmt_iterator gsi
;
8953 gimple
*new_cond_expr
;
8954 tree cond_expr
= (tree
) cond_e
;
8957 /* Build new conditional expr */
8958 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8959 NULL_TREE
, NULL_TREE
);
8961 /* Add new cond in cond_bb. */
8962 gsi
= gsi_last_bb (cond_bb
);
8963 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8965 /* Adjust edges appropriately to connect new head with first head
8966 as well as second head. */
8967 e0
= single_succ_edge (cond_bb
);
8968 e0
->flags
&= ~EDGE_FALLTHRU
;
8969 e0
->flags
|= EDGE_FALSE_VALUE
;
8973 /* Do book-keeping of basic block BB for the profile consistency checker.
8974 Store the counting in RECORD. */
8976 gimple_account_profile_record (basic_block bb
,
8977 struct profile_record
*record
)
8979 gimple_stmt_iterator i
;
8980 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8983 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8984 if (bb
->count
.initialized_p ())
8986 += estimate_num_insns (gsi_stmt (i
),
8987 &eni_time_weights
) * bb
->count
.to_gcov_type ();
8988 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8990 += estimate_num_insns (gsi_stmt (i
),
8991 &eni_time_weights
) * bb
->count
.to_frequency (cfun
);
8995 struct cfg_hooks gimple_cfg_hooks
= {
8997 gimple_verify_flow_info
,
8998 gimple_dump_bb
, /* dump_bb */
8999 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
9000 create_bb
, /* create_basic_block */
9001 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
9002 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
9003 gimple_can_remove_branch_p
, /* can_remove_branch_p */
9004 remove_bb
, /* delete_basic_block */
9005 gimple_split_block
, /* split_block */
9006 gimple_move_block_after
, /* move_block_after */
9007 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
9008 gimple_merge_blocks
, /* merge_blocks */
9009 gimple_predict_edge
, /* predict_edge */
9010 gimple_predicted_by_p
, /* predicted_by_p */
9011 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
9012 gimple_duplicate_bb
, /* duplicate_block */
9013 gimple_split_edge
, /* split_edge */
9014 gimple_make_forwarder_block
, /* make_forward_block */
9015 NULL
, /* tidy_fallthru_edge */
9016 NULL
, /* force_nonfallthru */
9017 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
9018 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
9019 gimple_flow_call_edges_add
, /* flow_call_edges_add */
9020 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
9021 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
9022 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
9023 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
9024 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
9025 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
9026 flush_pending_stmts
, /* flush_pending_stmts */
9027 gimple_empty_block_p
, /* block_empty_p */
9028 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
9029 gimple_account_profile_record
,
9033 /* Split all critical edges. Split some extra (not necessarily critical) edges
9034 if FOR_EDGE_INSERTION_P is true. */
9037 split_critical_edges (bool for_edge_insertion_p
/* = false */)
9043 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9044 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9045 mappings around the calls to split_edge. */
9046 start_recording_case_labels ();
9047 FOR_ALL_BB_FN (bb
, cfun
)
9049 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9051 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
9053 /* PRE inserts statements to edges and expects that
9054 since split_critical_edges was done beforehand, committing edge
9055 insertions will not split more edges. In addition to critical
9056 edges we must split edges that have multiple successors and
9057 end by control flow statements, such as RESX.
9058 Go ahead and split them too. This matches the logic in
9059 gimple_find_edge_insert_loc. */
9060 else if (for_edge_insertion_p
9061 && (!single_pred_p (e
->dest
)
9062 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
9063 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
9064 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
9065 && !(e
->flags
& EDGE_ABNORMAL
))
9067 gimple_stmt_iterator gsi
;
9069 gsi
= gsi_last_bb (e
->src
);
9070 if (!gsi_end_p (gsi
)
9071 && stmt_ends_bb_p (gsi_stmt (gsi
))
9072 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
9073 && !gimple_call_builtin_p (gsi_stmt (gsi
),
9079 end_recording_case_labels ();
9085 const pass_data pass_data_split_crit_edges
=
9087 GIMPLE_PASS
, /* type */
9088 "crited", /* name */
9089 OPTGROUP_NONE
, /* optinfo_flags */
9090 TV_TREE_SPLIT_EDGES
, /* tv_id */
9091 PROP_cfg
, /* properties_required */
9092 PROP_no_crit_edges
, /* properties_provided */
9093 0, /* properties_destroyed */
9094 0, /* todo_flags_start */
9095 0, /* todo_flags_finish */
9098 class pass_split_crit_edges
: public gimple_opt_pass
9101 pass_split_crit_edges (gcc::context
*ctxt
)
9102 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
9105 /* opt_pass methods: */
9106 virtual unsigned int execute (function
*) { return split_critical_edges (); }
9108 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
9109 }; // class pass_split_crit_edges
9114 make_pass_split_crit_edges (gcc::context
*ctxt
)
9116 return new pass_split_crit_edges (ctxt
);
9120 /* Insert COND expression which is GIMPLE_COND after STMT
9121 in basic block BB with appropriate basic block split
9122 and creation of a new conditionally executed basic block.
9123 Update profile so the new bb is visited with probability PROB.
9124 Return created basic block. */
9126 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
,
9127 profile_probability prob
)
9129 edge fall
= split_block (bb
, stmt
);
9130 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
9133 /* Insert cond statement. */
9134 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
9135 if (gsi_end_p (iter
))
9136 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
9138 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
9140 /* Create conditionally executed block. */
9141 new_bb
= create_empty_bb (bb
);
9142 edge e
= make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
9143 e
->probability
= prob
;
9144 new_bb
->count
= e
->count ();
9145 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
9147 /* Fix edge for split bb. */
9148 fall
->flags
= EDGE_FALSE_VALUE
;
9149 fall
->probability
-= e
->probability
;
9151 /* Update dominance info. */
9152 if (dom_info_available_p (CDI_DOMINATORS
))
9154 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
9155 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
9158 /* Update loop info. */
9160 add_bb_to_loop (new_bb
, bb
->loop_father
);
9165 /* Build a ternary operation and gimplify it. Emit code before GSI.
9166 Return the gimple_val holding the result. */
9169 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
9170 tree type
, tree a
, tree b
, tree c
)
9173 location_t loc
= gimple_location (gsi_stmt (*gsi
));
9175 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
9176 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9180 /* Build a binary operation and gimplify it. Emit code before GSI.
9181 Return the gimple_val holding the result. */
9184 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
9185 tree type
, tree a
, tree b
)
9189 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
9190 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9194 /* Build a unary operation and gimplify it. Emit code before GSI.
9195 Return the gimple_val holding the result. */
9198 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
9203 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
9204 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
9210 /* Given a basic block B which ends with a conditional and has
9211 precisely two successors, determine which of the edges is taken if
9212 the conditional is true and which is taken if the conditional is
9213 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9216 extract_true_false_edges_from_block (basic_block b
,
9220 edge e
= EDGE_SUCC (b
, 0);
9222 if (e
->flags
& EDGE_TRUE_VALUE
)
9225 *false_edge
= EDGE_SUCC (b
, 1);
9230 *true_edge
= EDGE_SUCC (b
, 1);
9235 /* From a controlling predicate in the immediate dominator DOM of
9236 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9237 predicate evaluates to true and false and store them to
9238 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9239 they are non-NULL. Returns true if the edges can be determined,
9240 else return false. */
9243 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
9244 edge
*true_controlled_edge
,
9245 edge
*false_controlled_edge
)
9247 basic_block bb
= phiblock
;
9248 edge true_edge
, false_edge
, tem
;
9249 edge e0
= NULL
, e1
= NULL
;
9251 /* We have to verify that one edge into the PHI node is dominated
9252 by the true edge of the predicate block and the other edge
9253 dominated by the false edge. This ensures that the PHI argument
9254 we are going to take is completely determined by the path we
9255 take from the predicate block.
9256 We can only use BB dominance checks below if the destination of
9257 the true/false edges are dominated by their edge, thus only
9258 have a single predecessor. */
9259 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
9260 tem
= EDGE_PRED (bb
, 0);
9261 if (tem
== true_edge
9262 || (single_pred_p (true_edge
->dest
)
9263 && (tem
->src
== true_edge
->dest
9264 || dominated_by_p (CDI_DOMINATORS
,
9265 tem
->src
, true_edge
->dest
))))
9267 else if (tem
== false_edge
9268 || (single_pred_p (false_edge
->dest
)
9269 && (tem
->src
== false_edge
->dest
9270 || dominated_by_p (CDI_DOMINATORS
,
9271 tem
->src
, false_edge
->dest
))))
9275 tem
= EDGE_PRED (bb
, 1);
9276 if (tem
== true_edge
9277 || (single_pred_p (true_edge
->dest
)
9278 && (tem
->src
== true_edge
->dest
9279 || dominated_by_p (CDI_DOMINATORS
,
9280 tem
->src
, true_edge
->dest
))))
9282 else if (tem
== false_edge
9283 || (single_pred_p (false_edge
->dest
)
9284 && (tem
->src
== false_edge
->dest
9285 || dominated_by_p (CDI_DOMINATORS
,
9286 tem
->src
, false_edge
->dest
))))
9293 if (true_controlled_edge
)
9294 *true_controlled_edge
= e0
;
9295 if (false_controlled_edge
)
9296 *false_controlled_edge
= e1
;
9301 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9302 range [low, high]. Place associated stmts before *GSI. */
9305 generate_range_test (basic_block bb
, tree index
, tree low
, tree high
,
9306 tree
*lhs
, tree
*rhs
)
9308 tree type
= TREE_TYPE (index
);
9309 tree utype
= range_check_type (type
);
9311 low
= fold_convert (utype
, low
);
9312 high
= fold_convert (utype
, high
);
9314 gimple_seq seq
= NULL
;
9315 index
= gimple_convert (&seq
, utype
, index
);
9316 *lhs
= gimple_build (&seq
, MINUS_EXPR
, utype
, index
, low
);
9317 *rhs
= const_binop (MINUS_EXPR
, utype
, high
, low
);
9319 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9320 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
9323 /* Return the basic block that belongs to label numbered INDEX
9324 of a switch statement. */
9327 gimple_switch_label_bb (function
*ifun
, gswitch
*gs
, unsigned index
)
9329 return label_to_block (ifun
, CASE_LABEL (gimple_switch_label (gs
, index
)));
9332 /* Return the default basic block of a switch statement. */
9335 gimple_switch_default_bb (function
*ifun
, gswitch
*gs
)
9337 return gimple_switch_label_bb (ifun
, gs
, 0);
9340 /* Return the edge that belongs to label numbered INDEX
9341 of a switch statement. */
9344 gimple_switch_edge (function
*ifun
, gswitch
*gs
, unsigned index
)
9346 return find_edge (gimple_bb (gs
), gimple_switch_label_bb (ifun
, gs
, index
));
9349 /* Return the default edge of a switch statement. */
9352 gimple_switch_default_edge (function
*ifun
, gswitch
*gs
)
9354 return gimple_switch_edge (ifun
, gs
, 0);
9358 /* Emit return warnings. */
9362 const pass_data pass_data_warn_function_return
=
9364 GIMPLE_PASS
, /* type */
9365 "*warn_function_return", /* name */
9366 OPTGROUP_NONE
, /* optinfo_flags */
9367 TV_NONE
, /* tv_id */
9368 PROP_cfg
, /* properties_required */
9369 0, /* properties_provided */
9370 0, /* properties_destroyed */
9371 0, /* todo_flags_start */
9372 0, /* todo_flags_finish */
9375 class pass_warn_function_return
: public gimple_opt_pass
9378 pass_warn_function_return (gcc::context
*ctxt
)
9379 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
9382 /* opt_pass methods: */
9383 virtual unsigned int execute (function
*);
9385 }; // class pass_warn_function_return
9388 pass_warn_function_return::execute (function
*fun
)
9390 location_t location
;
9395 if (!targetm
.warn_func_return (fun
->decl
))
9398 /* If we have a path to EXIT, then we do return. */
9399 if (TREE_THIS_VOLATILE (fun
->decl
)
9400 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
9402 location
= UNKNOWN_LOCATION
;
9403 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
);
9404 (e
= ei_safe_edge (ei
)); )
9406 last
= last_stmt (e
->src
);
9407 if ((gimple_code (last
) == GIMPLE_RETURN
9408 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
9409 && location
== UNKNOWN_LOCATION
9410 && ((location
= LOCATION_LOCUS (gimple_location (last
)))
9411 != UNKNOWN_LOCATION
)
9414 /* When optimizing, replace return stmts in noreturn functions
9415 with __builtin_unreachable () call. */
9416 if (optimize
&& gimple_code (last
) == GIMPLE_RETURN
)
9418 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9419 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
9420 gimple_set_location (new_stmt
, gimple_location (last
));
9421 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9422 gsi_replace (&gsi
, new_stmt
, true);
9428 if (location
== UNKNOWN_LOCATION
)
9429 location
= cfun
->function_end_locus
;
9430 warning_at (location
, 0, "%<noreturn%> function does return");
9433 /* If we see "return;" in some basic block, then we do reach the end
9434 without returning a value. */
9435 else if (warn_return_type
> 0
9436 && !TREE_NO_WARNING (fun
->decl
)
9437 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
9439 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9441 gimple
*last
= last_stmt (e
->src
);
9442 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
9444 && gimple_return_retval (return_stmt
) == NULL
9445 && !gimple_no_warning_p (last
))
9447 location
= gimple_location (last
);
9448 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9449 location
= fun
->function_end_locus
;
9450 if (warning_at (location
, OPT_Wreturn_type
,
9451 "control reaches end of non-void function"))
9452 TREE_NO_WARNING (fun
->decl
) = 1;
9456 /* The C++ FE turns fallthrough from the end of non-void function
9457 into __builtin_unreachable () call with BUILTINS_LOCATION.
9458 Recognize those too. */
9460 if (!TREE_NO_WARNING (fun
->decl
))
9461 FOR_EACH_BB_FN (bb
, fun
)
9462 if (EDGE_COUNT (bb
->succs
) == 0)
9464 gimple
*last
= last_stmt (bb
);
9465 const enum built_in_function ubsan_missing_ret
9466 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN
;
9468 && ((LOCATION_LOCUS (gimple_location (last
))
9469 == BUILTINS_LOCATION
9470 && gimple_call_builtin_p (last
, BUILT_IN_UNREACHABLE
))
9471 || gimple_call_builtin_p (last
, ubsan_missing_ret
)))
9473 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
9474 gsi_prev_nondebug (&gsi
);
9475 gimple
*prev
= gsi_stmt (gsi
);
9477 location
= UNKNOWN_LOCATION
;
9479 location
= gimple_location (prev
);
9480 if (LOCATION_LOCUS (location
) == UNKNOWN_LOCATION
)
9481 location
= fun
->function_end_locus
;
9482 if (warning_at (location
, OPT_Wreturn_type
,
9483 "control reaches end of non-void function"))
9484 TREE_NO_WARNING (fun
->decl
) = 1;
9495 make_pass_warn_function_return (gcc::context
*ctxt
)
9497 return new pass_warn_function_return (ctxt
);
9500 /* Walk a gimplified function and warn for functions whose return value is
9501 ignored and attribute((warn_unused_result)) is set. This is done before
9502 inlining, so we don't have to worry about that. */
9505 do_warn_unused_result (gimple_seq seq
)
9508 gimple_stmt_iterator i
;
9510 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
9512 gimple
*g
= gsi_stmt (i
);
9514 switch (gimple_code (g
))
9517 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
9520 do_warn_unused_result (gimple_try_eval (g
));
9521 do_warn_unused_result (gimple_try_cleanup (g
));
9524 do_warn_unused_result (gimple_catch_handler (
9525 as_a
<gcatch
*> (g
)));
9527 case GIMPLE_EH_FILTER
:
9528 do_warn_unused_result (gimple_eh_filter_failure (g
));
9532 if (gimple_call_lhs (g
))
9534 if (gimple_call_internal_p (g
))
9537 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9538 LHS. All calls whose value is ignored should be
9539 represented like this. Look for the attribute. */
9540 fdecl
= gimple_call_fndecl (g
);
9541 ftype
= gimple_call_fntype (g
);
9543 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
9545 location_t loc
= gimple_location (g
);
9548 warning_at (loc
, OPT_Wunused_result
,
9549 "ignoring return value of %qD "
9550 "declared with attribute %<warn_unused_result%>",
9553 warning_at (loc
, OPT_Wunused_result
,
9554 "ignoring return value of function "
9555 "declared with attribute %<warn_unused_result%>");
9560 /* Not a container, not a call, or a call whose value is used. */
9568 const pass_data pass_data_warn_unused_result
=
9570 GIMPLE_PASS
, /* type */
9571 "*warn_unused_result", /* name */
9572 OPTGROUP_NONE
, /* optinfo_flags */
9573 TV_NONE
, /* tv_id */
9574 PROP_gimple_any
, /* properties_required */
9575 0, /* properties_provided */
9576 0, /* properties_destroyed */
9577 0, /* todo_flags_start */
9578 0, /* todo_flags_finish */
9581 class pass_warn_unused_result
: public gimple_opt_pass
9584 pass_warn_unused_result (gcc::context
*ctxt
)
9585 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9588 /* opt_pass methods: */
9589 virtual bool gate (function
*) { return flag_warn_unused_result
; }
9590 virtual unsigned int execute (function
*)
9592 do_warn_unused_result (gimple_body (current_function_decl
));
9596 }; // class pass_warn_unused_result
9601 make_pass_warn_unused_result (gcc::context
*ctxt
)
9603 return new pass_warn_unused_result (ctxt
);
9606 /* IPA passes, compilation of earlier functions or inlining
9607 might have changed some properties, such as marked functions nothrow,
9608 pure, const or noreturn.
9609 Remove redundant edges and basic blocks, and create new ones if necessary.
9611 This pass can't be executed as stand alone pass from pass manager, because
9612 in between inlining and this fixup the verify_flow_info would fail. */
9615 execute_fixup_cfg (void)
9618 gimple_stmt_iterator gsi
;
9620 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
9621 /* Same scaling is also done by ipa_merge_profiles. */
9622 profile_count num
= node
->count
;
9623 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
9624 bool scale
= num
.initialized_p () && !(num
== den
);
9628 profile_count::adjust_for_ipa_scaling (&num
, &den
);
9629 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
9630 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
9631 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
9634 FOR_EACH_BB_FN (bb
, cfun
)
9637 bb
->count
= bb
->count
.apply_scale (num
, den
);
9638 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
9640 gimple
*stmt
= gsi_stmt (gsi
);
9641 tree decl
= is_gimple_call (stmt
)
9642 ? gimple_call_fndecl (stmt
)
9646 int flags
= gimple_call_flags (stmt
);
9647 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9649 if (gimple_purge_dead_abnormal_call_edges (bb
))
9650 todo
|= TODO_cleanup_cfg
;
9652 if (gimple_in_ssa_p (cfun
))
9654 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9659 if (flags
& ECF_NORETURN
9660 && fixup_noreturn_call (stmt
))
9661 todo
|= TODO_cleanup_cfg
;
9664 /* Remove stores to variables we marked write-only.
9665 Keep access when store has side effect, i.e. in case when source
9667 if (gimple_store_p (stmt
)
9668 && !gimple_has_side_effects (stmt
)
9671 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9674 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9675 && varpool_node::get (lhs
)->writeonly
)
9677 unlink_stmt_vdef (stmt
);
9678 gsi_remove (&gsi
, true);
9679 release_defs (stmt
);
9680 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9684 /* For calls we can simply remove LHS when it is known
9685 to be write-only. */
9686 if (is_gimple_call (stmt
)
9687 && gimple_get_lhs (stmt
))
9689 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9692 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9693 && varpool_node::get (lhs
)->writeonly
)
9695 gimple_call_set_lhs (stmt
, NULL
);
9697 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9701 if (maybe_clean_eh_stmt (stmt
)
9702 && gimple_purge_dead_eh_edges (bb
))
9703 todo
|= TODO_cleanup_cfg
;
9707 /* If we have a basic block with no successors that does not
9708 end with a control statement or a noreturn call end it with
9709 a call to __builtin_unreachable. This situation can occur
9710 when inlining a noreturn call that does in fact return. */
9711 if (EDGE_COUNT (bb
->succs
) == 0)
9713 gimple
*stmt
= last_stmt (bb
);
9715 || (!is_ctrl_stmt (stmt
)
9716 && (!is_gimple_call (stmt
)
9717 || !gimple_call_noreturn_p (stmt
))))
9719 if (stmt
&& is_gimple_call (stmt
))
9720 gimple_call_set_ctrl_altering (stmt
, false);
9721 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9722 stmt
= gimple_build_call (fndecl
, 0);
9723 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9724 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9725 if (!cfun
->after_inlining
)
9727 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
9728 node
->create_edge (cgraph_node::get_create (fndecl
),
9729 call_stmt
, bb
->count
);
9736 update_max_bb_count ();
9737 compute_function_frequency ();
9741 && (todo
& TODO_cleanup_cfg
))
9742 loops_state_set (LOOPS_NEED_FIXUP
);
9749 const pass_data pass_data_fixup_cfg
=
9751 GIMPLE_PASS
, /* type */
9752 "fixup_cfg", /* name */
9753 OPTGROUP_NONE
, /* optinfo_flags */
9754 TV_NONE
, /* tv_id */
9755 PROP_cfg
, /* properties_required */
9756 0, /* properties_provided */
9757 0, /* properties_destroyed */
9758 0, /* todo_flags_start */
9759 0, /* todo_flags_finish */
9762 class pass_fixup_cfg
: public gimple_opt_pass
9765 pass_fixup_cfg (gcc::context
*ctxt
)
9766 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9769 /* opt_pass methods: */
9770 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9771 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9773 }; // class pass_fixup_cfg
9778 make_pass_fixup_cfg (gcc::context
*ctxt
)
9780 return new pass_fixup_cfg (ctxt
);
9783 /* Garbage collection support for edge_def. */
9785 extern void gt_ggc_mx (tree
&);
9786 extern void gt_ggc_mx (gimple
*&);
9787 extern void gt_ggc_mx (rtx
&);
9788 extern void gt_ggc_mx (basic_block
&);
9791 gt_ggc_mx (rtx_insn
*& x
)
9794 gt_ggc_mx_rtx_def ((void *) x
);
9798 gt_ggc_mx (edge_def
*e
)
9800 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9802 gt_ggc_mx (e
->dest
);
9803 if (current_ir_type () == IR_GIMPLE
)
9804 gt_ggc_mx (e
->insns
.g
);
9806 gt_ggc_mx (e
->insns
.r
);
9810 /* PCH support for edge_def. */
9812 extern void gt_pch_nx (tree
&);
9813 extern void gt_pch_nx (gimple
*&);
9814 extern void gt_pch_nx (rtx
&);
9815 extern void gt_pch_nx (basic_block
&);
9818 gt_pch_nx (rtx_insn
*& x
)
9821 gt_pch_nx_rtx_def ((void *) x
);
9825 gt_pch_nx (edge_def
*e
)
9827 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9829 gt_pch_nx (e
->dest
);
9830 if (current_ir_type () == IR_GIMPLE
)
9831 gt_pch_nx (e
->insns
.g
);
9833 gt_pch_nx (e
->insns
.r
);
9838 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9840 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9841 op (&(e
->src
), cookie
);
9842 op (&(e
->dest
), cookie
);
9843 if (current_ir_type () == IR_GIMPLE
)
9844 op (&(e
->insns
.g
), cookie
);
9846 op (&(e
->insns
.r
), cookie
);
9847 op (&(block
), cookie
);
9852 namespace selftest
{
9854 /* Helper function for CFG selftests: create a dummy function decl
9855 and push it as cfun. */
9858 push_fndecl (const char *name
)
9860 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9861 /* FIXME: this uses input_location: */
9862 tree fndecl
= build_fn_decl (name
, fn_type
);
9863 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9864 NULL_TREE
, integer_type_node
);
9865 DECL_RESULT (fndecl
) = retval
;
9866 push_struct_function (fndecl
);
9867 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9868 ASSERT_TRUE (fun
!= NULL
);
9869 init_empty_tree_cfg_for_function (fun
);
9870 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9871 ASSERT_EQ (0, n_edges_for_fn (fun
));
9875 /* These tests directly create CFGs.
9876 Compare with the static fns within tree-cfg.c:
9878 - make_blocks: calls create_basic_block (seq, bb);
9881 /* Verify a simple cfg of the form:
9882 ENTRY -> A -> B -> C -> EXIT. */
9885 test_linear_chain ()
9887 gimple_register_cfg_hooks ();
9889 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9890 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9892 /* Create some empty blocks. */
9893 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9894 basic_block bb_b
= create_empty_bb (bb_a
);
9895 basic_block bb_c
= create_empty_bb (bb_b
);
9897 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9898 ASSERT_EQ (0, n_edges_for_fn (fun
));
9900 /* Create some edges: a simple linear chain of BBs. */
9901 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9902 make_edge (bb_a
, bb_b
, 0);
9903 make_edge (bb_b
, bb_c
, 0);
9904 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9906 /* Verify the edges. */
9907 ASSERT_EQ (4, n_edges_for_fn (fun
));
9908 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9909 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9910 ASSERT_EQ (1, bb_a
->preds
->length ());
9911 ASSERT_EQ (1, bb_a
->succs
->length ());
9912 ASSERT_EQ (1, bb_b
->preds
->length ());
9913 ASSERT_EQ (1, bb_b
->succs
->length ());
9914 ASSERT_EQ (1, bb_c
->preds
->length ());
9915 ASSERT_EQ (1, bb_c
->succs
->length ());
9916 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9917 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9919 /* Verify the dominance information
9920 Each BB in our simple chain should be dominated by the one before
9922 calculate_dominance_info (CDI_DOMINATORS
);
9923 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9924 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9925 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9926 ASSERT_EQ (1, dom_by_b
.length ());
9927 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9928 free_dominance_info (CDI_DOMINATORS
);
9929 dom_by_b
.release ();
9931 /* Similarly for post-dominance: each BB in our chain is post-dominated
9932 by the one after it. */
9933 calculate_dominance_info (CDI_POST_DOMINATORS
);
9934 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9935 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9936 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9937 ASSERT_EQ (1, postdom_by_b
.length ());
9938 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9939 free_dominance_info (CDI_POST_DOMINATORS
);
9940 postdom_by_b
.release ();
9945 /* Verify a simple CFG of the form:
9961 gimple_register_cfg_hooks ();
9963 tree fndecl
= push_fndecl ("cfg_test_diamond");
9964 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9966 /* Create some empty blocks. */
9967 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9968 basic_block bb_b
= create_empty_bb (bb_a
);
9969 basic_block bb_c
= create_empty_bb (bb_a
);
9970 basic_block bb_d
= create_empty_bb (bb_b
);
9972 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9973 ASSERT_EQ (0, n_edges_for_fn (fun
));
9975 /* Create the edges. */
9976 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9977 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9978 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9979 make_edge (bb_b
, bb_d
, 0);
9980 make_edge (bb_c
, bb_d
, 0);
9981 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9983 /* Verify the edges. */
9984 ASSERT_EQ (6, n_edges_for_fn (fun
));
9985 ASSERT_EQ (1, bb_a
->preds
->length ());
9986 ASSERT_EQ (2, bb_a
->succs
->length ());
9987 ASSERT_EQ (1, bb_b
->preds
->length ());
9988 ASSERT_EQ (1, bb_b
->succs
->length ());
9989 ASSERT_EQ (1, bb_c
->preds
->length ());
9990 ASSERT_EQ (1, bb_c
->succs
->length ());
9991 ASSERT_EQ (2, bb_d
->preds
->length ());
9992 ASSERT_EQ (1, bb_d
->succs
->length ());
9994 /* Verify the dominance information. */
9995 calculate_dominance_info (CDI_DOMINATORS
);
9996 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9997 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9998 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9999 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
10000 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
10001 dom_by_a
.release ();
10002 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
10003 ASSERT_EQ (0, dom_by_b
.length ());
10004 dom_by_b
.release ();
10005 free_dominance_info (CDI_DOMINATORS
);
10007 /* Similarly for post-dominance. */
10008 calculate_dominance_info (CDI_POST_DOMINATORS
);
10009 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
10010 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
10011 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
10012 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
10013 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
10014 postdom_by_d
.release ();
10015 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
10016 ASSERT_EQ (0, postdom_by_b
.length ());
10017 postdom_by_b
.release ();
10018 free_dominance_info (CDI_POST_DOMINATORS
);
10023 /* Verify that we can handle a CFG containing a "complete" aka
10024 fully-connected subgraph (where A B C D below all have edges
10025 pointing to each other node, also to themselves).
10043 test_fully_connected ()
10045 gimple_register_cfg_hooks ();
10047 tree fndecl
= push_fndecl ("cfg_fully_connected");
10048 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
10052 /* Create some empty blocks. */
10053 auto_vec
<basic_block
> subgraph_nodes
;
10054 for (int i
= 0; i
< n
; i
++)
10055 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
10057 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
10058 ASSERT_EQ (0, n_edges_for_fn (fun
));
10060 /* Create the edges. */
10061 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
10062 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
10063 for (int i
= 0; i
< n
; i
++)
10064 for (int j
= 0; j
< n
; j
++)
10065 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
10067 /* Verify the edges. */
10068 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
10069 /* The first one is linked to ENTRY/EXIT as well as itself and
10070 everything else. */
10071 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
10072 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
10073 /* The other ones in the subgraph are linked to everything in
10074 the subgraph (including themselves). */
10075 for (int i
= 1; i
< n
; i
++)
10077 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
10078 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
10081 /* Verify the dominance information. */
10082 calculate_dominance_info (CDI_DOMINATORS
);
10083 /* The initial block in the subgraph should be dominated by ENTRY. */
10084 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
10085 get_immediate_dominator (CDI_DOMINATORS
,
10086 subgraph_nodes
[0]));
10087 /* Every other block in the subgraph should be dominated by the
10089 for (int i
= 1; i
< n
; i
++)
10090 ASSERT_EQ (subgraph_nodes
[0],
10091 get_immediate_dominator (CDI_DOMINATORS
,
10092 subgraph_nodes
[i
]));
10093 free_dominance_info (CDI_DOMINATORS
);
10095 /* Similarly for post-dominance. */
10096 calculate_dominance_info (CDI_POST_DOMINATORS
);
10097 /* The initial block in the subgraph should be postdominated by EXIT. */
10098 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
10099 get_immediate_dominator (CDI_POST_DOMINATORS
,
10100 subgraph_nodes
[0]));
10101 /* Every other block in the subgraph should be postdominated by the
10102 initial block, since that leads to EXIT. */
10103 for (int i
= 1; i
< n
; i
++)
10104 ASSERT_EQ (subgraph_nodes
[0],
10105 get_immediate_dominator (CDI_POST_DOMINATORS
,
10106 subgraph_nodes
[i
]));
10107 free_dominance_info (CDI_POST_DOMINATORS
);
10112 /* Run all of the selftests within this file. */
10115 tree_cfg_c_tests ()
10117 test_linear_chain ();
10119 test_fully_connected ();
10122 } // namespace selftest
10124 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10127 - switch statement (a block with many out-edges)
10128 - something that jumps to itself
10131 #endif /* CHECKING_P */