1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
65 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */
68 /* Local declarations. */
70 /* Initial capacity for the basic block array. */
71 static const int initial_cfg_capacity
= 20;
73 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
74 which use a particular edge. The CASE_LABEL_EXPRs are chained together
75 via their CASE_CHAIN field, which we clear after we're done with the
76 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
79 update the case vector in response to edge redirections.
81 Right now this table is set up and torn down at key points in the
82 compilation process. It would be nice if we could make the table
83 more persistent. The key is getting notification of changes to
84 the CFG (particularly edge removal, creation and redirection). */
86 static hash_map
<edge
, tree
> *edge_to_cases
;
88 /* If we record edge_to_cases, this bitmap will hold indexes
89 of basic blocks that end in a GIMPLE_SWITCH which we touched
90 due to edge manipulations. */
92 static bitmap touched_switch_bbs
;
97 long num_merged_labels
;
100 static struct cfg_stats_d cfg_stats
;
102 /* Data to pass to replace_block_vars_by_duplicates_1. */
103 struct replace_decls_d
105 hash_map
<tree
, tree
> *vars_map
;
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
116 /* Hashtable helpers. */
118 struct locus_discrim_hasher
: free_ptr_hash
<locus_discrim_map
>
120 static inline hashval_t
hash (const locus_discrim_map
*);
121 static inline bool equal (const locus_discrim_map
*,
122 const locus_discrim_map
*);
125 /* Trivial hash function for a location_t. ITEM is a pointer to
126 a hash table entry that maps a location_t to a discriminator. */
129 locus_discrim_hasher::hash (const locus_discrim_map
*item
)
131 return LOCATION_LINE (item
->locus
);
134 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */
138 locus_discrim_hasher::equal (const locus_discrim_map
*a
,
139 const locus_discrim_map
*b
)
141 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
144 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq
);
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block
);
153 static void make_gimple_switch_edges (gswitch
*, basic_block
);
154 static bool make_goto_expr_edges (basic_block
);
155 static void make_gimple_asm_edges (basic_block
);
156 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
157 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple
*, gimple
*);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge
);
163 static gimple
*first_non_label_stmt (basic_block
);
164 static bool verify_gimple_transaction (gtransaction
*);
165 static bool call_can_make_abnormal_goto (gimple
*);
167 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block
, basic_block
);
169 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
170 static void remove_bb (basic_block
);
171 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
172 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
173 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
174 static tree
find_case_label_for_value (gswitch
*, tree
);
175 static void lower_phi_internal_fn ();
178 init_empty_tree_cfg_for_function (struct function
*fn
)
180 /* Initialize the basic block array. */
182 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
183 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
184 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
185 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
187 initial_cfg_capacity
);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
192 initial_cfg_capacity
);
194 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
195 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
197 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn
);
199 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun
);
209 /*---------------------------------------------------------------------------
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
217 build_gimple_cfg (gimple_seq seq
)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
224 init_empty_tree_cfg ();
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun
)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun
))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
236 n_basic_blocks_for_fn (cfun
));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
249 assign_discriminators ();
250 lower_phi_internal_fn ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus
;
253 discriminator_per_locus
= NULL
;
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
261 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
263 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
264 gimple
*stmt
= gsi_stmt (gsi
);
266 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
269 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
271 stmt
= gsi_stmt (gsi
);
272 if (gimple_code (stmt
) != GIMPLE_CALL
)
274 if (!gimple_call_internal_p (stmt
)
275 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
278 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
280 case annot_expr_ivdep_kind
:
281 loop
->safelen
= INT_MAX
;
283 case annot_expr_no_vector_kind
:
284 loop
->dont_vectorize
= true;
286 case annot_expr_vector_kind
:
287 loop
->force_vectorize
= true;
288 cfun
->has_force_vectorize_loops
= true;
294 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
295 gimple_call_arg (stmt
, 0));
296 gsi_replace (&gsi
, stmt
, true);
300 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
301 them and propagate the information to the loop. We assume that the
302 annotations come immediately before the condition of the loop. */
305 replace_loop_annotate (void)
309 gimple_stmt_iterator gsi
;
312 FOR_EACH_LOOP (loop
, 0)
314 /* First look into the header. */
315 replace_loop_annotate_in_block (loop
->header
, loop
);
317 /* Then look into the latch, if any. */
319 replace_loop_annotate_in_block (loop
->latch
, loop
);
322 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
323 FOR_EACH_BB_FN (bb
, cfun
)
325 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
327 stmt
= gsi_stmt (gsi
);
328 if (gimple_code (stmt
) != GIMPLE_CALL
)
330 if (!gimple_call_internal_p (stmt
)
331 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
334 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
336 case annot_expr_ivdep_kind
:
337 case annot_expr_no_vector_kind
:
338 case annot_expr_vector_kind
:
344 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
345 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
346 gimple_call_arg (stmt
, 0));
347 gsi_replace (&gsi
, stmt
, true);
352 /* Lower internal PHI function from GIMPLE FE. */
355 lower_phi_internal_fn ()
357 basic_block bb
, pred
= NULL
;
358 gimple_stmt_iterator gsi
;
363 /* After edge creation, handle __PHI function from GIMPLE FE. */
364 FOR_EACH_BB_FN (bb
, cfun
)
366 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
);)
368 stmt
= gsi_stmt (gsi
);
369 if (! gimple_call_internal_p (stmt
, IFN_PHI
))
372 lhs
= gimple_call_lhs (stmt
);
373 phi_node
= create_phi_node (lhs
, bb
);
375 /* Add arguments to the PHI node. */
376 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
378 tree arg
= gimple_call_arg (stmt
, i
);
379 if (TREE_CODE (arg
) == LABEL_DECL
)
380 pred
= label_to_block (arg
);
383 edge e
= find_edge (pred
, bb
);
384 add_phi_arg (phi_node
, arg
, e
, UNKNOWN_LOCATION
);
388 gsi_remove (&gsi
, true);
394 execute_build_cfg (void)
396 gimple_seq body
= gimple_body (current_function_decl
);
398 build_gimple_cfg (body
);
399 gimple_set_body (current_function_decl
, NULL
);
400 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
402 fprintf (dump_file
, "Scope blocks:\n");
403 dump_scope_blocks (dump_file
, dump_flags
);
406 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
407 replace_loop_annotate ();
413 const pass_data pass_data_build_cfg
=
415 GIMPLE_PASS
, /* type */
417 OPTGROUP_NONE
, /* optinfo_flags */
418 TV_TREE_CFG
, /* tv_id */
419 PROP_gimple_leh
, /* properties_required */
420 ( PROP_cfg
| PROP_loops
), /* properties_provided */
421 0, /* properties_destroyed */
422 0, /* todo_flags_start */
423 0, /* todo_flags_finish */
426 class pass_build_cfg
: public gimple_opt_pass
429 pass_build_cfg (gcc::context
*ctxt
)
430 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
433 /* opt_pass methods: */
434 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
436 }; // class pass_build_cfg
441 make_pass_build_cfg (gcc::context
*ctxt
)
443 return new pass_build_cfg (ctxt
);
447 /* Return true if T is a computed goto. */
450 computed_goto_p (gimple
*t
)
452 return (gimple_code (t
) == GIMPLE_GOTO
453 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
456 /* Returns true if the sequence of statements STMTS only contains
457 a call to __builtin_unreachable (). */
460 gimple_seq_unreachable_p (gimple_seq stmts
)
465 gimple_stmt_iterator gsi
= gsi_last (stmts
);
467 if (!gimple_call_builtin_p (gsi_stmt (gsi
), BUILT_IN_UNREACHABLE
))
470 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
472 gimple
*stmt
= gsi_stmt (gsi
);
473 if (gimple_code (stmt
) != GIMPLE_LABEL
474 && !is_gimple_debug (stmt
)
475 && !gimple_clobber_p (stmt
))
481 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
482 the other edge points to a bb with just __builtin_unreachable ().
483 I.e. return true for C->M edge in:
491 __builtin_unreachable ();
495 assert_unreachable_fallthru_edge_p (edge e
)
497 basic_block pred_bb
= e
->src
;
498 gimple
*last
= last_stmt (pred_bb
);
499 if (last
&& gimple_code (last
) == GIMPLE_COND
)
501 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
502 if (other_bb
== e
->dest
)
503 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
504 if (EDGE_COUNT (other_bb
->succs
) == 0)
505 return gimple_seq_unreachable_p (bb_seq (other_bb
));
511 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
512 could alter control flow except via eh. We initialize the flag at
513 CFG build time and only ever clear it later. */
516 gimple_call_initialize_ctrl_altering (gimple
*stmt
)
518 int flags
= gimple_call_flags (stmt
);
520 /* A call alters control flow if it can make an abnormal goto. */
521 if (call_can_make_abnormal_goto (stmt
)
522 /* A call also alters control flow if it does not return. */
523 || flags
& ECF_NORETURN
524 /* TM ending statements have backedges out of the transaction.
525 Return true so we split the basic block containing them.
526 Note that the TM_BUILTIN test is merely an optimization. */
527 || ((flags
& ECF_TM_BUILTIN
)
528 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
529 /* BUILT_IN_RETURN call is same as return statement. */
530 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
)
531 /* IFN_UNIQUE should be the last insn, to make checking for it
532 as cheap as possible. */
533 || (gimple_call_internal_p (stmt
)
534 && gimple_call_internal_unique_p (stmt
)))
535 gimple_call_set_ctrl_altering (stmt
, true);
537 gimple_call_set_ctrl_altering (stmt
, false);
541 /* Insert SEQ after BB and build a flowgraph. */
544 make_blocks_1 (gimple_seq seq
, basic_block bb
)
546 gimple_stmt_iterator i
= gsi_start (seq
);
548 bool start_new_block
= true;
549 bool first_stmt_of_seq
= true;
551 while (!gsi_end_p (i
))
558 if (stmt
&& is_gimple_call (stmt
))
559 gimple_call_initialize_ctrl_altering (stmt
);
561 /* If the statement starts a new basic block or if we have determined
562 in a previous pass that we need to create a new block for STMT, do
564 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
566 if (!first_stmt_of_seq
)
567 gsi_split_seq_before (&i
, &seq
);
568 bb
= create_basic_block (seq
, bb
);
569 start_new_block
= false;
572 /* Now add STMT to BB and create the subgraphs for special statement
574 gimple_set_bb (stmt
, bb
);
576 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
578 if (stmt_ends_bb_p (stmt
))
580 /* If the stmt can make abnormal goto use a new temporary
581 for the assignment to the LHS. This makes sure the old value
582 of the LHS is available on the abnormal edge. Otherwise
583 we will end up with overlapping life-ranges for abnormal
585 if (gimple_has_lhs (stmt
)
586 && stmt_can_make_abnormal_goto (stmt
)
587 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
589 tree lhs
= gimple_get_lhs (stmt
);
590 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
591 gimple
*s
= gimple_build_assign (lhs
, tmp
);
592 gimple_set_location (s
, gimple_location (stmt
));
593 gimple_set_block (s
, gimple_block (stmt
));
594 gimple_set_lhs (stmt
, tmp
);
595 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
596 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
597 DECL_GIMPLE_REG_P (tmp
) = 1;
598 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
600 start_new_block
= true;
604 first_stmt_of_seq
= false;
609 /* Build a flowgraph for the sequence of stmts SEQ. */
612 make_blocks (gimple_seq seq
)
614 make_blocks_1 (seq
, ENTRY_BLOCK_PTR_FOR_FN (cfun
));
617 /* Create and return a new empty basic block after bb AFTER. */
620 create_bb (void *h
, void *e
, basic_block after
)
626 /* Create and initialize a new basic block. Since alloc_block uses
627 GC allocation that clears memory to allocate a basic block, we do
628 not have to clear the newly allocated basic block here. */
631 bb
->index
= last_basic_block_for_fn (cfun
);
633 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
635 /* Add the new block to the linked list of blocks. */
636 link_block (bb
, after
);
638 /* Grow the basic block array if needed. */
639 if ((size_t) last_basic_block_for_fn (cfun
)
640 == basic_block_info_for_fn (cfun
)->length ())
643 (last_basic_block_for_fn (cfun
)
644 + (last_basic_block_for_fn (cfun
) + 3) / 4);
645 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
648 /* Add the newly created block to the array. */
649 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
651 n_basic_blocks_for_fn (cfun
)++;
652 last_basic_block_for_fn (cfun
)++;
658 /*---------------------------------------------------------------------------
660 ---------------------------------------------------------------------------*/
662 /* If basic block BB has an abnormal edge to a basic block
663 containing IFN_ABNORMAL_DISPATCHER internal call, return
664 that the dispatcher's basic block, otherwise return NULL. */
667 get_abnormal_succ_dispatcher (basic_block bb
)
672 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
673 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
675 gimple_stmt_iterator gsi
676 = gsi_start_nondebug_after_labels_bb (e
->dest
);
677 gimple
*g
= gsi_stmt (gsi
);
678 if (g
&& gimple_call_internal_p (g
, IFN_ABNORMAL_DISPATCHER
))
684 /* Helper function for make_edges. Create a basic block with
685 with ABNORMAL_DISPATCHER internal call in it if needed, and
686 create abnormal edges from BBS to it and from it to FOR_BB
687 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
690 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
691 basic_block for_bb
, int *bb_to_omp_idx
,
692 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
694 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
695 unsigned int idx
= 0;
701 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
702 if (bb_to_omp_idx
[for_bb
->index
] != 0)
706 /* If the dispatcher has been created already, then there are basic
707 blocks with abnormal edges to it, so just make a new edge to
709 if (*dispatcher
== NULL
)
711 /* Check if there are any basic blocks that need to have
712 abnormal edges to this dispatcher. If there are none, return
714 if (bb_to_omp_idx
== NULL
)
716 if (bbs
->is_empty ())
721 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
722 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
728 /* Create the dispatcher bb. */
729 *dispatcher
= create_basic_block (NULL
, for_bb
);
732 /* Factor computed gotos into a common computed goto site. Also
733 record the location of that site so that we can un-factor the
734 gotos after we have converted back to normal form. */
735 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
737 /* Create the destination of the factored goto. Each original
738 computed goto will put its desired destination into this
739 variable and jump to the label we create immediately below. */
740 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
742 /* Build a label for the new block which will contain the
743 factored computed goto. */
744 tree factored_label_decl
745 = create_artificial_label (UNKNOWN_LOCATION
);
746 gimple
*factored_computed_goto_label
747 = gimple_build_label (factored_label_decl
);
748 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
750 /* Build our new computed goto. */
751 gimple
*factored_computed_goto
= gimple_build_goto (var
);
752 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
754 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
757 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
760 gsi
= gsi_last_bb (bb
);
761 gimple
*last
= gsi_stmt (gsi
);
763 gcc_assert (computed_goto_p (last
));
765 /* Copy the original computed goto's destination into VAR. */
767 = gimple_build_assign (var
, gimple_goto_dest (last
));
768 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
770 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
771 e
->goto_locus
= gimple_location (last
);
772 gsi_remove (&gsi
, true);
777 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
778 gimple
*g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
780 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
781 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
783 /* Create predecessor edges of the dispatcher. */
784 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
787 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
789 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
794 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
797 /* Creates outgoing edges for BB. Returns 1 when it ends with an
798 computed goto, returns 2 when it ends with a statement that
799 might return to this function via an nonlocal goto, otherwise
800 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
803 make_edges_bb (basic_block bb
, struct omp_region
**pcur_region
, int *pomp_index
)
805 gimple
*last
= last_stmt (bb
);
806 bool fallthru
= false;
812 switch (gimple_code (last
))
815 if (make_goto_expr_edges (bb
))
821 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
822 e
->goto_locus
= gimple_location (last
);
827 make_cond_expr_edges (bb
);
831 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
835 make_eh_edges (last
);
838 case GIMPLE_EH_DISPATCH
:
839 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
843 /* If this function receives a nonlocal goto, then we need to
844 make edges from this call site to all the nonlocal goto
846 if (stmt_can_make_abnormal_goto (last
))
849 /* If this statement has reachable exception handlers, then
850 create abnormal edges to them. */
851 make_eh_edges (last
);
853 /* BUILTIN_RETURN is really a return statement. */
854 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
856 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
859 /* Some calls are known not to return. */
861 fallthru
= !gimple_call_noreturn_p (last
);
865 /* A GIMPLE_ASSIGN may throw internally and thus be considered
867 if (is_ctrl_altering_stmt (last
))
868 make_eh_edges (last
);
873 make_gimple_asm_edges (bb
);
878 fallthru
= omp_make_gimple_edges (bb
, pcur_region
, pomp_index
);
881 case GIMPLE_TRANSACTION
:
883 gtransaction
*txn
= as_a
<gtransaction
*> (last
);
884 tree label1
= gimple_transaction_label_norm (txn
);
885 tree label2
= gimple_transaction_label_uninst (txn
);
888 make_edge (bb
, label_to_block (label1
), EDGE_FALLTHRU
);
890 make_edge (bb
, label_to_block (label2
),
891 EDGE_TM_UNINSTRUMENTED
| (label1
? 0 : EDGE_FALLTHRU
));
893 tree label3
= gimple_transaction_label_over (txn
);
894 if (gimple_transaction_subcode (txn
)
895 & (GTMA_HAVE_ABORT
| GTMA_IS_OUTER
))
896 make_edge (bb
, label_to_block (label3
), EDGE_TM_ABORT
);
903 gcc_assert (!stmt_ends_bb_p (last
));
909 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
914 /* Join all the blocks in the flowgraph. */
920 struct omp_region
*cur_region
= NULL
;
921 auto_vec
<basic_block
> ab_edge_goto
;
922 auto_vec
<basic_block
> ab_edge_call
;
923 int *bb_to_omp_idx
= NULL
;
924 int cur_omp_region_idx
= 0;
926 /* Create an edge from entry to the first block with executable
928 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
929 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
932 /* Traverse the basic block array placing edges. */
933 FOR_EACH_BB_FN (bb
, cfun
)
938 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
940 mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
942 ab_edge_goto
.safe_push (bb
);
944 ab_edge_call
.safe_push (bb
);
946 if (cur_region
&& bb_to_omp_idx
== NULL
)
947 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
950 /* Computed gotos are hell to deal with, especially if there are
951 lots of them with a large number of destinations. So we factor
952 them to a common computed goto location before we build the
953 edge list. After we convert back to normal form, we will un-factor
954 the computed gotos since factoring introduces an unwanted jump.
955 For non-local gotos and abnormal edges from calls to calls that return
956 twice or forced labels, factor the abnormal edges too, by having all
957 abnormal edges from the calls go to a common artificial basic block
958 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
959 basic block to all forced labels and calls returning twice.
960 We do this per-OpenMP structured block, because those regions
961 are guaranteed to be single entry single exit by the standard,
962 so it is not allowed to enter or exit such regions abnormally this way,
963 thus all computed gotos, non-local gotos and setjmp/longjmp calls
964 must not transfer control across SESE region boundaries. */
965 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
967 gimple_stmt_iterator gsi
;
968 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
969 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
970 int count
= n_basic_blocks_for_fn (cfun
);
973 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
975 FOR_EACH_BB_FN (bb
, cfun
)
977 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
979 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
985 target
= gimple_label_label (label_stmt
);
987 /* Make an edge to every label block that has been marked as a
988 potential target for a computed goto or a non-local goto. */
989 if (FORCED_LABEL (target
))
990 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
991 &ab_edge_goto
, true);
992 if (DECL_NONLOCAL (target
))
994 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
995 &ab_edge_call
, false);
1000 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
1001 gsi_next_nondebug (&gsi
);
1002 if (!gsi_end_p (gsi
))
1004 /* Make an edge to every setjmp-like call. */
1005 gimple
*call_stmt
= gsi_stmt (gsi
);
1006 if (is_gimple_call (call_stmt
)
1007 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
1008 || gimple_call_builtin_p (call_stmt
,
1009 BUILT_IN_SETJMP_RECEIVER
)))
1010 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
1011 &ab_edge_call
, false);
1016 XDELETE (dispatcher_bbs
);
1019 XDELETE (bb_to_omp_idx
);
1021 omp_free_regions ();
1024 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1025 needed. Returns true if new bbs were created.
1026 Note: This is transitional code, and should not be used for new code. We
1027 should be able to get rid of this by rewriting all target va-arg
1028 gimplification hooks to use an interface gimple_build_cond_value as described
1029 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1032 gimple_find_sub_bbs (gimple_seq seq
, gimple_stmt_iterator
*gsi
)
1034 gimple
*stmt
= gsi_stmt (*gsi
);
1035 basic_block bb
= gimple_bb (stmt
);
1036 basic_block lastbb
, afterbb
;
1037 int old_num_bbs
= n_basic_blocks_for_fn (cfun
);
1039 lastbb
= make_blocks_1 (seq
, bb
);
1040 if (old_num_bbs
== n_basic_blocks_for_fn (cfun
))
1042 e
= split_block (bb
, stmt
);
1043 /* Move e->dest to come after the new basic blocks. */
1045 unlink_block (afterbb
);
1046 link_block (afterbb
, lastbb
);
1047 redirect_edge_succ (e
, bb
->next_bb
);
1049 while (bb
!= afterbb
)
1051 struct omp_region
*cur_region
= NULL
;
1052 profile_count cnt
= profile_count::zero ();
1056 int cur_omp_region_idx
= 0;
1057 int mer
= make_edges_bb (bb
, &cur_region
, &cur_omp_region_idx
);
1058 gcc_assert (!mer
&& !cur_region
);
1059 add_bb_to_loop (bb
, afterbb
->loop_father
);
1063 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1065 if (e
->count
.initialized_p ())
1069 freq
+= EDGE_FREQUENCY (e
);
1071 tree_guess_outgoing_edge_probabilities (bb
);
1072 if (all
|| profile_status_for_fn (cfun
) == PROFILE_READ
)
1074 bb
->frequency
= freq
;
1075 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1076 e
->count
= bb
->count
.apply_probability (e
->probability
);
1083 /* Find the next available discriminator value for LOCUS. The
1084 discriminator distinguishes among several basic blocks that
1085 share a common locus, allowing for more accurate sample-based
1089 next_discriminator_for_locus (location_t locus
)
1091 struct locus_discrim_map item
;
1092 struct locus_discrim_map
**slot
;
1095 item
.discriminator
= 0;
1096 slot
= discriminator_per_locus
->find_slot_with_hash (
1097 &item
, LOCATION_LINE (locus
), INSERT
);
1099 if (*slot
== HTAB_EMPTY_ENTRY
)
1101 *slot
= XNEW (struct locus_discrim_map
);
1103 (*slot
)->locus
= locus
;
1104 (*slot
)->discriminator
= 0;
1106 (*slot
)->discriminator
++;
1107 return (*slot
)->discriminator
;
1110 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1113 same_line_p (location_t locus1
, location_t locus2
)
1115 expanded_location from
, to
;
1117 if (locus1
== locus2
)
1120 from
= expand_location (locus1
);
1121 to
= expand_location (locus2
);
1123 if (from
.line
!= to
.line
)
1125 if (from
.file
== to
.file
)
1127 return (from
.file
!= NULL
1129 && filename_cmp (from
.file
, to
.file
) == 0);
1132 /* Assign discriminators to each basic block. */
1135 assign_discriminators (void)
1139 FOR_EACH_BB_FN (bb
, cfun
)
1143 gimple
*last
= last_stmt (bb
);
1144 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1146 if (locus
== UNKNOWN_LOCATION
)
1149 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1151 gimple
*first
= first_non_label_stmt (e
->dest
);
1152 gimple
*last
= last_stmt (e
->dest
);
1153 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1154 || (last
&& same_line_p (locus
, gimple_location (last
))))
1156 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1157 bb
->discriminator
= next_discriminator_for_locus (locus
);
1159 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1165 /* Create the edges for a GIMPLE_COND starting at block BB. */
1168 make_cond_expr_edges (basic_block bb
)
1170 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1171 gimple
*then_stmt
, *else_stmt
;
1172 basic_block then_bb
, else_bb
;
1173 tree then_label
, else_label
;
1177 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1179 /* Entry basic blocks for each component. */
1180 then_label
= gimple_cond_true_label (entry
);
1181 else_label
= gimple_cond_false_label (entry
);
1182 then_bb
= label_to_block (then_label
);
1183 else_bb
= label_to_block (else_label
);
1184 then_stmt
= first_stmt (then_bb
);
1185 else_stmt
= first_stmt (else_bb
);
1187 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1188 e
->goto_locus
= gimple_location (then_stmt
);
1189 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1191 e
->goto_locus
= gimple_location (else_stmt
);
1193 /* We do not need the labels anymore. */
1194 gimple_cond_set_true_label (entry
, NULL_TREE
);
1195 gimple_cond_set_false_label (entry
, NULL_TREE
);
1199 /* Called for each element in the hash table (P) as we delete the
1200 edge to cases hash table.
1202 Clear all the CASE_CHAINs to prevent problems with copying of
1203 SWITCH_EXPRs and structure sharing rules, then free the hash table
1207 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1211 for (t
= value
; t
; t
= next
)
1213 next
= CASE_CHAIN (t
);
1214 CASE_CHAIN (t
) = NULL
;
1220 /* Start recording information mapping edges to case labels. */
1223 start_recording_case_labels (void)
1225 gcc_assert (edge_to_cases
== NULL
);
1226 edge_to_cases
= new hash_map
<edge
, tree
>;
1227 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1230 /* Return nonzero if we are recording information for case labels. */
1233 recording_case_labels_p (void)
1235 return (edge_to_cases
!= NULL
);
1238 /* Stop recording information mapping edges to case labels and
1239 remove any information we have recorded. */
1241 end_recording_case_labels (void)
1245 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1246 delete edge_to_cases
;
1247 edge_to_cases
= NULL
;
1248 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1250 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1253 gimple
*stmt
= last_stmt (bb
);
1254 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1255 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1258 BITMAP_FREE (touched_switch_bbs
);
1261 /* If we are inside a {start,end}_recording_cases block, then return
1262 a chain of CASE_LABEL_EXPRs from T which reference E.
1264 Otherwise return NULL. */
1267 get_cases_for_edge (edge e
, gswitch
*t
)
1272 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1273 chains available. Return NULL so the caller can detect this case. */
1274 if (!recording_case_labels_p ())
1277 slot
= edge_to_cases
->get (e
);
1281 /* If we did not find E in the hash table, then this must be the first
1282 time we have been queried for information about E & T. Add all the
1283 elements from T to the hash table then perform the query again. */
1285 n
= gimple_switch_num_labels (t
);
1286 for (i
= 0; i
< n
; i
++)
1288 tree elt
= gimple_switch_label (t
, i
);
1289 tree lab
= CASE_LABEL (elt
);
1290 basic_block label_bb
= label_to_block (lab
);
1291 edge this_edge
= find_edge (e
->src
, label_bb
);
1293 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1295 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1296 CASE_CHAIN (elt
) = s
;
1300 return *edge_to_cases
->get (e
);
1303 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1306 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1310 n
= gimple_switch_num_labels (entry
);
1312 for (i
= 0; i
< n
; ++i
)
1314 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1315 basic_block label_bb
= label_to_block (lab
);
1316 make_edge (bb
, label_bb
, 0);
1321 /* Return the basic block holding label DEST. */
1324 label_to_block_fn (struct function
*ifun
, tree dest
)
1326 int uid
= LABEL_DECL_UID (dest
);
1328 /* We would die hard when faced by an undefined label. Emit a label to
1329 the very first basic block. This will hopefully make even the dataflow
1330 and undefined variable warnings quite right. */
1331 if (seen_error () && uid
< 0)
1333 gimple_stmt_iterator gsi
=
1334 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1337 stmt
= gimple_build_label (dest
);
1338 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1339 uid
= LABEL_DECL_UID (dest
);
1341 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1343 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1346 /* Create edges for a goto statement at block BB. Returns true
1347 if abnormal edges should be created. */
1350 make_goto_expr_edges (basic_block bb
)
1352 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1353 gimple
*goto_t
= gsi_stmt (last
);
1355 /* A simple GOTO creates normal edges. */
1356 if (simple_goto_p (goto_t
))
1358 tree dest
= gimple_goto_dest (goto_t
);
1359 basic_block label_bb
= label_to_block (dest
);
1360 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1361 e
->goto_locus
= gimple_location (goto_t
);
1362 gsi_remove (&last
, true);
1366 /* A computed GOTO creates abnormal edges. */
1370 /* Create edges for an asm statement with labels at block BB. */
1373 make_gimple_asm_edges (basic_block bb
)
1375 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1376 int i
, n
= gimple_asm_nlabels (stmt
);
1378 for (i
= 0; i
< n
; ++i
)
1380 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1381 basic_block label_bb
= label_to_block (label
);
1382 make_edge (bb
, label_bb
, 0);
1386 /*---------------------------------------------------------------------------
1388 ---------------------------------------------------------------------------*/
1390 /* Cleanup useless labels in basic blocks. This is something we wish
1391 to do early because it allows us to group case labels before creating
1392 the edges for the CFG, and it speeds up block statement iterators in
1393 all passes later on.
1394 We rerun this pass after CFG is created, to get rid of the labels that
1395 are no longer referenced. After then we do not run it any more, since
1396 (almost) no new labels should be created. */
1398 /* A map from basic block index to the leading label of that block. */
1399 static struct label_record
1404 /* True if the label is referenced from somewhere. */
1408 /* Given LABEL return the first label in the same basic block. */
1411 main_block_label (tree label
)
1413 basic_block bb
= label_to_block (label
);
1414 tree main_label
= label_for_bb
[bb
->index
].label
;
1416 /* label_to_block possibly inserted undefined label into the chain. */
1419 label_for_bb
[bb
->index
].label
= label
;
1423 label_for_bb
[bb
->index
].used
= true;
1427 /* Clean up redundant labels within the exception tree. */
1430 cleanup_dead_labels_eh (void)
1437 if (cfun
->eh
== NULL
)
1440 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1441 if (lp
&& lp
->post_landing_pad
)
1443 lab
= main_block_label (lp
->post_landing_pad
);
1444 if (lab
!= lp
->post_landing_pad
)
1446 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1447 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1451 FOR_ALL_EH_REGION (r
)
1455 case ERT_MUST_NOT_THROW
:
1461 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1465 c
->label
= main_block_label (lab
);
1470 case ERT_ALLOWED_EXCEPTIONS
:
1471 lab
= r
->u
.allowed
.label
;
1473 r
->u
.allowed
.label
= main_block_label (lab
);
1479 /* Cleanup redundant labels. This is a three-step process:
1480 1) Find the leading label for each block.
1481 2) Redirect all references to labels to the leading labels.
1482 3) Cleanup all useless labels. */
1485 cleanup_dead_labels (void)
1488 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1490 /* Find a suitable label for each block. We use the first user-defined
1491 label if there is one, or otherwise just the first label we see. */
1492 FOR_EACH_BB_FN (bb
, cfun
)
1494 gimple_stmt_iterator i
;
1496 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1499 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1504 label
= gimple_label_label (label_stmt
);
1506 /* If we have not yet seen a label for the current block,
1507 remember this one and see if there are more labels. */
1508 if (!label_for_bb
[bb
->index
].label
)
1510 label_for_bb
[bb
->index
].label
= label
;
1514 /* If we did see a label for the current block already, but it
1515 is an artificially created label, replace it if the current
1516 label is a user defined label. */
1517 if (!DECL_ARTIFICIAL (label
)
1518 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1520 label_for_bb
[bb
->index
].label
= label
;
1526 /* Now redirect all jumps/branches to the selected label.
1527 First do so for each block ending in a control statement. */
1528 FOR_EACH_BB_FN (bb
, cfun
)
1530 gimple
*stmt
= last_stmt (bb
);
1531 tree label
, new_label
;
1536 switch (gimple_code (stmt
))
1540 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1541 label
= gimple_cond_true_label (cond_stmt
);
1544 new_label
= main_block_label (label
);
1545 if (new_label
!= label
)
1546 gimple_cond_set_true_label (cond_stmt
, new_label
);
1549 label
= gimple_cond_false_label (cond_stmt
);
1552 new_label
= main_block_label (label
);
1553 if (new_label
!= label
)
1554 gimple_cond_set_false_label (cond_stmt
, new_label
);
1561 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1562 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1564 /* Replace all destination labels. */
1565 for (i
= 0; i
< n
; ++i
)
1567 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1568 label
= CASE_LABEL (case_label
);
1569 new_label
= main_block_label (label
);
1570 if (new_label
!= label
)
1571 CASE_LABEL (case_label
) = new_label
;
1578 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1579 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1581 for (i
= 0; i
< n
; ++i
)
1583 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1584 tree label
= main_block_label (TREE_VALUE (cons
));
1585 TREE_VALUE (cons
) = label
;
1590 /* We have to handle gotos until they're removed, and we don't
1591 remove them until after we've created the CFG edges. */
1593 if (!computed_goto_p (stmt
))
1595 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1596 label
= gimple_goto_dest (goto_stmt
);
1597 new_label
= main_block_label (label
);
1598 if (new_label
!= label
)
1599 gimple_goto_set_dest (goto_stmt
, new_label
);
1603 case GIMPLE_TRANSACTION
:
1605 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
1607 label
= gimple_transaction_label_norm (txn
);
1610 new_label
= main_block_label (label
);
1611 if (new_label
!= label
)
1612 gimple_transaction_set_label_norm (txn
, new_label
);
1615 label
= gimple_transaction_label_uninst (txn
);
1618 new_label
= main_block_label (label
);
1619 if (new_label
!= label
)
1620 gimple_transaction_set_label_uninst (txn
, new_label
);
1623 label
= gimple_transaction_label_over (txn
);
1626 new_label
= main_block_label (label
);
1627 if (new_label
!= label
)
1628 gimple_transaction_set_label_over (txn
, new_label
);
1638 /* Do the same for the exception region tree labels. */
1639 cleanup_dead_labels_eh ();
1641 /* Finally, purge dead labels. All user-defined labels and labels that
1642 can be the target of non-local gotos and labels which have their
1643 address taken are preserved. */
1644 FOR_EACH_BB_FN (bb
, cfun
)
1646 gimple_stmt_iterator i
;
1647 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1649 if (!label_for_this_bb
)
1652 /* If the main label of the block is unused, we may still remove it. */
1653 if (!label_for_bb
[bb
->index
].used
)
1654 label_for_this_bb
= NULL
;
1656 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1659 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1664 label
= gimple_label_label (label_stmt
);
1666 if (label
== label_for_this_bb
1667 || !DECL_ARTIFICIAL (label
)
1668 || DECL_NONLOCAL (label
)
1669 || FORCED_LABEL (label
))
1672 gsi_remove (&i
, true);
1676 free (label_for_bb
);
1679 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1680 the ones jumping to the same label.
1681 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1684 group_case_labels_stmt (gswitch
*stmt
)
1686 int old_size
= gimple_switch_num_labels (stmt
);
1687 int i
, next_index
, new_size
;
1688 basic_block default_bb
= NULL
;
1690 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1692 /* Look for possible opportunities to merge cases. */
1694 while (i
< old_size
)
1696 tree base_case
, base_high
;
1697 basic_block base_bb
;
1699 base_case
= gimple_switch_label (stmt
, i
);
1701 gcc_assert (base_case
);
1702 base_bb
= label_to_block (CASE_LABEL (base_case
));
1704 /* Discard cases that have the same destination as the default case or
1705 whose destiniation blocks have already been removed as unreachable. */
1706 if (base_bb
== NULL
|| base_bb
== default_bb
)
1712 base_high
= CASE_HIGH (base_case
)
1713 ? CASE_HIGH (base_case
)
1714 : CASE_LOW (base_case
);
1717 /* Try to merge case labels. Break out when we reach the end
1718 of the label vector or when we cannot merge the next case
1719 label with the current one. */
1720 while (next_index
< old_size
)
1722 tree merge_case
= gimple_switch_label (stmt
, next_index
);
1723 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1724 wide_int bhp1
= wi::to_wide (base_high
) + 1;
1726 /* Merge the cases if they jump to the same place,
1727 and their ranges are consecutive. */
1728 if (merge_bb
== base_bb
1729 && wi::to_wide (CASE_LOW (merge_case
)) == bhp1
)
1731 base_high
= CASE_HIGH (merge_case
) ?
1732 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1733 CASE_HIGH (base_case
) = base_high
;
1740 /* Discard cases that have an unreachable destination block. */
1741 if (EDGE_COUNT (base_bb
->succs
) == 0
1742 && gimple_seq_unreachable_p (bb_seq (base_bb
)))
1744 edge base_edge
= find_edge (gimple_bb (stmt
), base_bb
);
1745 if (base_edge
!= NULL
)
1746 remove_edge_and_dominated_blocks (base_edge
);
1752 gimple_switch_set_label (stmt
, new_size
,
1753 gimple_switch_label (stmt
, i
));
1758 gcc_assert (new_size
<= old_size
);
1760 if (new_size
< old_size
)
1761 gimple_switch_set_num_labels (stmt
, new_size
);
1763 return new_size
< old_size
;
1766 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1767 and scan the sorted vector of cases. Combine the ones jumping to the
1771 group_case_labels (void)
1774 bool changed
= false;
1776 FOR_EACH_BB_FN (bb
, cfun
)
1778 gimple
*stmt
= last_stmt (bb
);
1779 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1780 changed
|= group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1786 /* Checks whether we can merge block B into block A. */
1789 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1793 if (!single_succ_p (a
))
1796 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1799 if (single_succ (a
) != b
)
1802 if (!single_pred_p (b
))
1805 if (a
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
1806 || b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1809 /* If A ends by a statement causing exceptions or something similar, we
1810 cannot merge the blocks. */
1811 stmt
= last_stmt (a
);
1812 if (stmt
&& stmt_ends_bb_p (stmt
))
1815 /* Do not allow a block with only a non-local label to be merged. */
1817 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1818 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1821 /* Examine the labels at the beginning of B. */
1822 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1826 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1829 lab
= gimple_label_label (label_stmt
);
1831 /* Do not remove user forced labels or for -O0 any user labels. */
1832 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1836 /* Protect simple loop latches. We only want to avoid merging
1837 the latch with the loop header or with a block in another
1838 loop in this case. */
1840 && b
->loop_father
->latch
== b
1841 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1842 && (b
->loop_father
->header
== a
1843 || b
->loop_father
!= a
->loop_father
))
1846 /* It must be possible to eliminate all phi nodes in B. If ssa form
1847 is not up-to-date and a name-mapping is registered, we cannot eliminate
1848 any phis. Symbols marked for renaming are never a problem though. */
1849 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1852 gphi
*phi
= gsi
.phi ();
1853 /* Technically only new names matter. */
1854 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1858 /* When not optimizing, don't merge if we'd lose goto_locus. */
1860 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1862 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1863 gimple_stmt_iterator prev
, next
;
1864 prev
= gsi_last_nondebug_bb (a
);
1865 next
= gsi_after_labels (b
);
1866 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1867 gsi_next_nondebug (&next
);
1868 if ((gsi_end_p (prev
)
1869 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1870 && (gsi_end_p (next
)
1871 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1878 /* Replaces all uses of NAME by VAL. */
1881 replace_uses_by (tree name
, tree val
)
1883 imm_use_iterator imm_iter
;
1888 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1890 /* Mark the block if we change the last stmt in it. */
1891 if (cfgcleanup_altered_bbs
1892 && stmt_ends_bb_p (stmt
))
1893 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1895 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1897 replace_exp (use
, val
);
1899 if (gimple_code (stmt
) == GIMPLE_PHI
)
1901 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1902 PHI_ARG_INDEX_FROM_USE (use
));
1903 if (e
->flags
& EDGE_ABNORMAL
1904 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1906 /* This can only occur for virtual operands, since
1907 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1908 would prevent replacement. */
1909 gcc_checking_assert (virtual_operand_p (name
));
1910 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1915 if (gimple_code (stmt
) != GIMPLE_PHI
)
1917 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1918 gimple
*orig_stmt
= stmt
;
1921 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1922 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1923 only change sth from non-invariant to invariant, and only
1924 when propagating constants. */
1925 if (is_gimple_min_invariant (val
))
1926 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1928 tree op
= gimple_op (stmt
, i
);
1929 /* Operands may be empty here. For example, the labels
1930 of a GIMPLE_COND are nulled out following the creation
1931 of the corresponding CFG edges. */
1932 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1933 recompute_tree_invariant_for_addr_expr (op
);
1936 if (fold_stmt (&gsi
))
1937 stmt
= gsi_stmt (gsi
);
1939 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1940 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1946 gcc_checking_assert (has_zero_uses (name
));
1948 /* Also update the trees stored in loop structures. */
1953 FOR_EACH_LOOP (loop
, 0)
1955 substitute_in_loop_info (loop
, name
, val
);
1960 /* Merge block B into block A. */
1963 gimple_merge_blocks (basic_block a
, basic_block b
)
1965 gimple_stmt_iterator last
, gsi
;
1969 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1971 /* Remove all single-valued PHI nodes from block B of the form
1972 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1973 gsi
= gsi_last_bb (a
);
1974 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1976 gimple
*phi
= gsi_stmt (psi
);
1977 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1979 bool may_replace_uses
= (virtual_operand_p (def
)
1980 || may_propagate_copy (def
, use
));
1982 /* In case we maintain loop closed ssa form, do not propagate arguments
1983 of loop exit phi nodes. */
1985 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1986 && !virtual_operand_p (def
)
1987 && TREE_CODE (use
) == SSA_NAME
1988 && a
->loop_father
!= b
->loop_father
)
1989 may_replace_uses
= false;
1991 if (!may_replace_uses
)
1993 gcc_assert (!virtual_operand_p (def
));
1995 /* Note that just emitting the copies is fine -- there is no problem
1996 with ordering of phi nodes. This is because A is the single
1997 predecessor of B, therefore results of the phi nodes cannot
1998 appear as arguments of the phi nodes. */
1999 copy
= gimple_build_assign (def
, use
);
2000 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
2001 remove_phi_node (&psi
, false);
2005 /* If we deal with a PHI for virtual operands, we can simply
2006 propagate these without fussing with folding or updating
2008 if (virtual_operand_p (def
))
2010 imm_use_iterator iter
;
2011 use_operand_p use_p
;
2014 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
2015 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2016 SET_USE (use_p
, use
);
2018 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2019 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
2022 replace_uses_by (def
, use
);
2024 remove_phi_node (&psi
, true);
2028 /* Ensure that B follows A. */
2029 move_block_after (b
, a
);
2031 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
2032 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
2034 /* Remove labels from B and set gimple_bb to A for other statements. */
2035 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
2037 gimple
*stmt
= gsi_stmt (gsi
);
2038 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2040 tree label
= gimple_label_label (label_stmt
);
2043 gsi_remove (&gsi
, false);
2045 /* Now that we can thread computed gotos, we might have
2046 a situation where we have a forced label in block B
2047 However, the label at the start of block B might still be
2048 used in other ways (think about the runtime checking for
2049 Fortran assigned gotos). So we can not just delete the
2050 label. Instead we move the label to the start of block A. */
2051 if (FORCED_LABEL (label
))
2053 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
2054 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
2056 /* Other user labels keep around in a form of a debug stmt. */
2057 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
2059 gimple
*dbg
= gimple_build_debug_bind (label
,
2062 gimple_debug_bind_reset_value (dbg
);
2063 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
2066 lp_nr
= EH_LANDING_PAD_NR (label
);
2069 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
2070 lp
->post_landing_pad
= NULL
;
2075 gimple_set_bb (stmt
, a
);
2080 /* When merging two BBs, if their counts are different, the larger count
2081 is selected as the new bb count. This is to handle inconsistent
2083 if (a
->loop_father
== b
->loop_father
)
2085 a
->count
= a
->count
.merge (b
->count
);
2086 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
2089 /* Merge the sequences. */
2090 last
= gsi_last_bb (a
);
2091 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
2092 set_bb_seq (b
, NULL
);
2094 if (cfgcleanup_altered_bbs
)
2095 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
2099 /* Return the one of two successors of BB that is not reachable by a
2100 complex edge, if there is one. Else, return BB. We use
2101 this in optimizations that use post-dominators for their heuristics,
2102 to catch the cases in C++ where function calls are involved. */
2105 single_noncomplex_succ (basic_block bb
)
2108 if (EDGE_COUNT (bb
->succs
) != 2)
2111 e0
= EDGE_SUCC (bb
, 0);
2112 e1
= EDGE_SUCC (bb
, 1);
2113 if (e0
->flags
& EDGE_COMPLEX
)
2115 if (e1
->flags
& EDGE_COMPLEX
)
2121 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2124 notice_special_calls (gcall
*call
)
2126 int flags
= gimple_call_flags (call
);
2128 if (flags
& ECF_MAY_BE_ALLOCA
)
2129 cfun
->calls_alloca
= true;
2130 if (flags
& ECF_RETURNS_TWICE
)
2131 cfun
->calls_setjmp
= true;
2135 /* Clear flags set by notice_special_calls. Used by dead code removal
2136 to update the flags. */
2139 clear_special_calls (void)
2141 cfun
->calls_alloca
= false;
2142 cfun
->calls_setjmp
= false;
2145 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2148 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2150 /* Since this block is no longer reachable, we can just delete all
2151 of its PHI nodes. */
2152 remove_phi_nodes (bb
);
2154 /* Remove edges to BB's successors. */
2155 while (EDGE_COUNT (bb
->succs
) > 0)
2156 remove_edge (EDGE_SUCC (bb
, 0));
2160 /* Remove statements of basic block BB. */
2163 remove_bb (basic_block bb
)
2165 gimple_stmt_iterator i
;
2169 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2170 if (dump_flags
& TDF_DETAILS
)
2172 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2173 fprintf (dump_file
, "\n");
2179 struct loop
*loop
= bb
->loop_father
;
2181 /* If a loop gets removed, clean up the information associated
2183 if (loop
->latch
== bb
2184 || loop
->header
== bb
)
2185 free_numbers_of_iterations_estimates (loop
);
2188 /* Remove all the instructions in the block. */
2189 if (bb_seq (bb
) != NULL
)
2191 /* Walk backwards so as to get a chance to substitute all
2192 released DEFs into debug stmts. See
2193 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2195 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2197 gimple
*stmt
= gsi_stmt (i
);
2198 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2200 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2201 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2204 gimple_stmt_iterator new_gsi
;
2206 /* A non-reachable non-local label may still be referenced.
2207 But it no longer needs to carry the extra semantics of
2209 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2211 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2212 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2215 new_bb
= bb
->prev_bb
;
2216 new_gsi
= gsi_start_bb (new_bb
);
2217 gsi_remove (&i
, false);
2218 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2222 /* Release SSA definitions. */
2223 release_defs (stmt
);
2224 gsi_remove (&i
, true);
2228 i
= gsi_last_bb (bb
);
2234 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2235 bb
->il
.gimple
.seq
= NULL
;
2236 bb
->il
.gimple
.phi_nodes
= NULL
;
2240 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2241 predicate VAL, return the edge that will be taken out of the block.
2242 If VAL does not match a unique edge, NULL is returned. */
2245 find_taken_edge (basic_block bb
, tree val
)
2249 stmt
= last_stmt (bb
);
2251 gcc_assert (is_ctrl_stmt (stmt
));
2253 if (gimple_code (stmt
) == GIMPLE_COND
)
2254 return find_taken_edge_cond_expr (bb
, val
);
2256 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2257 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2259 if (computed_goto_p (stmt
))
2261 /* Only optimize if the argument is a label, if the argument is
2262 not a label then we can not construct a proper CFG.
2264 It may be the case that we only need to allow the LABEL_REF to
2265 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2266 appear inside a LABEL_EXPR just to be safe. */
2268 && (TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2269 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2270 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2277 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2278 statement, determine which of the outgoing edges will be taken out of the
2279 block. Return NULL if either edge may be taken. */
2282 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2287 dest
= label_to_block (val
);
2290 e
= find_edge (bb
, dest
);
2291 gcc_assert (e
!= NULL
);
2297 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2298 statement, determine which of the two edges will be taken out of the
2299 block. Return NULL if either edge may be taken. */
2302 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2304 edge true_edge
, false_edge
;
2307 || TREE_CODE (val
) != INTEGER_CST
)
2310 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2312 return (integer_zerop (val
) ? false_edge
: true_edge
);
2315 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2316 statement, determine which edge will be taken out of the block. Return
2317 NULL if any edge may be taken. */
2320 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2323 basic_block dest_bb
;
2327 if (gimple_switch_num_labels (switch_stmt
) == 1)
2328 taken_case
= gimple_switch_default_label (switch_stmt
);
2329 else if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
2332 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2333 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2335 e
= find_edge (bb
, dest_bb
);
2341 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2342 We can make optimal use here of the fact that the case labels are
2343 sorted: We can do a binary search for a case matching VAL. */
2346 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2348 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2349 tree default_case
= gimple_switch_default_label (switch_stmt
);
2351 for (low
= 0, high
= n
; high
- low
> 1; )
2353 size_t i
= (high
+ low
) / 2;
2354 tree t
= gimple_switch_label (switch_stmt
, i
);
2357 /* Cache the result of comparing CASE_LOW and val. */
2358 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2365 if (CASE_HIGH (t
) == NULL
)
2367 /* A singe-valued case label. */
2373 /* A case range. We can only handle integer ranges. */
2374 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2379 return default_case
;
2383 /* Dump a basic block on stderr. */
2386 gimple_debug_bb (basic_block bb
)
2388 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2392 /* Dump basic block with index N on stderr. */
2395 gimple_debug_bb_n (int n
)
2397 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2398 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2402 /* Dump the CFG on stderr.
2404 FLAGS are the same used by the tree dumping functions
2405 (see TDF_* in dumpfile.h). */
2408 gimple_debug_cfg (dump_flags_t flags
)
2410 gimple_dump_cfg (stderr
, flags
);
2414 /* Dump the program showing basic block boundaries on the given FILE.
2416 FLAGS are the same used by the tree dumping functions (see TDF_* in
2420 gimple_dump_cfg (FILE *file
, dump_flags_t flags
)
2422 if (flags
& TDF_DETAILS
)
2424 dump_function_header (file
, current_function_decl
, flags
);
2425 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2426 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2427 last_basic_block_for_fn (cfun
));
2429 brief_dump_cfg (file
, flags
);
2430 fprintf (file
, "\n");
2433 if (flags
& TDF_STATS
)
2434 dump_cfg_stats (file
);
2436 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2440 /* Dump CFG statistics on FILE. */
2443 dump_cfg_stats (FILE *file
)
2445 static long max_num_merged_labels
= 0;
2446 unsigned long size
, total
= 0;
2449 const char * const fmt_str
= "%-30s%-13s%12s\n";
2450 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2451 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2452 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2453 const char *funcname
= current_function_name ();
2455 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2457 fprintf (file
, "---------------------------------------------------------\n");
2458 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2459 fprintf (file
, fmt_str
, "", " instances ", "used ");
2460 fprintf (file
, "---------------------------------------------------------\n");
2462 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2464 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2465 SCALE (size
), LABEL (size
));
2468 FOR_EACH_BB_FN (bb
, cfun
)
2469 num_edges
+= EDGE_COUNT (bb
->succs
);
2470 size
= num_edges
* sizeof (struct edge_def
);
2472 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2474 fprintf (file
, "---------------------------------------------------------\n");
2475 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2477 fprintf (file
, "---------------------------------------------------------\n");
2478 fprintf (file
, "\n");
2480 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2481 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2483 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2484 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2486 fprintf (file
, "\n");
2490 /* Dump CFG statistics on stderr. Keep extern so that it's always
2491 linked in the final executable. */
2494 debug_cfg_stats (void)
2496 dump_cfg_stats (stderr
);
2499 /*---------------------------------------------------------------------------
2500 Miscellaneous helpers
2501 ---------------------------------------------------------------------------*/
2503 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2504 flow. Transfers of control flow associated with EH are excluded. */
2507 call_can_make_abnormal_goto (gimple
*t
)
2509 /* If the function has no non-local labels, then a call cannot make an
2510 abnormal transfer of control. */
2511 if (!cfun
->has_nonlocal_label
2512 && !cfun
->calls_setjmp
)
2515 /* Likewise if the call has no side effects. */
2516 if (!gimple_has_side_effects (t
))
2519 /* Likewise if the called function is leaf. */
2520 if (gimple_call_flags (t
) & ECF_LEAF
)
2527 /* Return true if T can make an abnormal transfer of control flow.
2528 Transfers of control flow associated with EH are excluded. */
2531 stmt_can_make_abnormal_goto (gimple
*t
)
2533 if (computed_goto_p (t
))
2535 if (is_gimple_call (t
))
2536 return call_can_make_abnormal_goto (t
);
2541 /* Return true if T represents a stmt that always transfers control. */
2544 is_ctrl_stmt (gimple
*t
)
2546 switch (gimple_code (t
))
2560 /* Return true if T is a statement that may alter the flow of control
2561 (e.g., a call to a non-returning function). */
2564 is_ctrl_altering_stmt (gimple
*t
)
2568 switch (gimple_code (t
))
2571 /* Per stmt call flag indicates whether the call could alter
2573 if (gimple_call_ctrl_altering_p (t
))
2577 case GIMPLE_EH_DISPATCH
:
2578 /* EH_DISPATCH branches to the individual catch handlers at
2579 this level of a try or allowed-exceptions region. It can
2580 fallthru to the next statement as well. */
2584 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2589 /* OpenMP directives alter control flow. */
2592 case GIMPLE_TRANSACTION
:
2593 /* A transaction start alters control flow. */
2600 /* If a statement can throw, it alters control flow. */
2601 return stmt_can_throw_internal (t
);
2605 /* Return true if T is a simple local goto. */
2608 simple_goto_p (gimple
*t
)
2610 return (gimple_code (t
) == GIMPLE_GOTO
2611 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2615 /* Return true if STMT should start a new basic block. PREV_STMT is
2616 the statement preceding STMT. It is used when STMT is a label or a
2617 case label. Labels should only start a new basic block if their
2618 previous statement wasn't a label. Otherwise, sequence of labels
2619 would generate unnecessary basic blocks that only contain a single
2623 stmt_starts_bb_p (gimple
*stmt
, gimple
*prev_stmt
)
2628 /* Labels start a new basic block only if the preceding statement
2629 wasn't a label of the same type. This prevents the creation of
2630 consecutive blocks that have nothing but a single label. */
2631 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2633 /* Nonlocal and computed GOTO targets always start a new block. */
2634 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2635 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2638 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2640 if (DECL_NONLOCAL (gimple_label_label (
2641 as_a
<glabel
*> (prev_stmt
))))
2644 cfg_stats
.num_merged_labels
++;
2650 else if (gimple_code (stmt
) == GIMPLE_CALL
)
2652 if (gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2653 /* setjmp acts similar to a nonlocal GOTO target and thus should
2654 start a new block. */
2656 if (gimple_call_internal_p (stmt
, IFN_PHI
)
2658 && gimple_code (prev_stmt
) != GIMPLE_LABEL
2659 && (gimple_code (prev_stmt
) != GIMPLE_CALL
2660 || ! gimple_call_internal_p (prev_stmt
, IFN_PHI
)))
2661 /* PHI nodes start a new block unless preceeded by a label
2670 /* Return true if T should end a basic block. */
2673 stmt_ends_bb_p (gimple
*t
)
2675 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2678 /* Remove block annotations and other data structures. */
2681 delete_tree_cfg_annotations (struct function
*fn
)
2683 vec_free (label_to_block_map_for_fn (fn
));
2686 /* Return the virtual phi in BB. */
2689 get_virtual_phi (basic_block bb
)
2691 for (gphi_iterator gsi
= gsi_start_phis (bb
);
2695 gphi
*phi
= gsi
.phi ();
2697 if (virtual_operand_p (PHI_RESULT (phi
)))
2704 /* Return the first statement in basic block BB. */
2707 first_stmt (basic_block bb
)
2709 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2710 gimple
*stmt
= NULL
;
2712 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2720 /* Return the first non-label statement in basic block BB. */
2723 first_non_label_stmt (basic_block bb
)
2725 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2726 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2728 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2731 /* Return the last statement in basic block BB. */
2734 last_stmt (basic_block bb
)
2736 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2737 gimple
*stmt
= NULL
;
2739 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2747 /* Return the last statement of an otherwise empty block. Return NULL
2748 if the block is totally empty, or if it contains more than one
2752 last_and_only_stmt (basic_block bb
)
2754 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2755 gimple
*last
, *prev
;
2760 last
= gsi_stmt (i
);
2761 gsi_prev_nondebug (&i
);
2765 /* Empty statements should no longer appear in the instruction stream.
2766 Everything that might have appeared before should be deleted by
2767 remove_useless_stmts, and the optimizers should just gsi_remove
2768 instead of smashing with build_empty_stmt.
2770 Thus the only thing that should appear here in a block containing
2771 one executable statement is a label. */
2772 prev
= gsi_stmt (i
);
2773 if (gimple_code (prev
) == GIMPLE_LABEL
)
2779 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2782 reinstall_phi_args (edge new_edge
, edge old_edge
)
2788 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2792 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2793 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2794 i
++, gsi_next (&phis
))
2796 gphi
*phi
= phis
.phi ();
2797 tree result
= redirect_edge_var_map_result (vm
);
2798 tree arg
= redirect_edge_var_map_def (vm
);
2800 gcc_assert (result
== gimple_phi_result (phi
));
2802 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2805 redirect_edge_var_map_clear (old_edge
);
2808 /* Returns the basic block after which the new basic block created
2809 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2810 near its "logical" location. This is of most help to humans looking
2811 at debugging dumps. */
2814 split_edge_bb_loc (edge edge_in
)
2816 basic_block dest
= edge_in
->dest
;
2817 basic_block dest_prev
= dest
->prev_bb
;
2821 edge e
= find_edge (dest_prev
, dest
);
2822 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2823 return edge_in
->src
;
2828 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2829 Abort on abnormal edges. */
2832 gimple_split_edge (edge edge_in
)
2834 basic_block new_bb
, after_bb
, dest
;
2837 /* Abnormal edges cannot be split. */
2838 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2840 dest
= edge_in
->dest
;
2842 after_bb
= split_edge_bb_loc (edge_in
);
2844 new_bb
= create_empty_bb (after_bb
);
2845 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2846 new_bb
->count
= edge_in
->count
;
2848 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2849 gcc_assert (e
== edge_in
);
2851 new_edge
= make_single_succ_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2852 reinstall_phi_args (new_edge
, e
);
2858 /* Verify properties of the address expression T with base object BASE. */
2861 verify_address (tree t
, tree base
)
2864 bool old_side_effects
;
2866 bool new_side_effects
;
2868 old_constant
= TREE_CONSTANT (t
);
2869 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2871 recompute_tree_invariant_for_addr_expr (t
);
2872 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2873 new_constant
= TREE_CONSTANT (t
);
2875 if (old_constant
!= new_constant
)
2877 error ("constant not recomputed when ADDR_EXPR changed");
2880 if (old_side_effects
!= new_side_effects
)
2882 error ("side effects not recomputed when ADDR_EXPR changed");
2887 || TREE_CODE (base
) == PARM_DECL
2888 || TREE_CODE (base
) == RESULT_DECL
))
2891 if (DECL_GIMPLE_REG_P (base
))
2893 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2900 /* Callback for walk_tree, check that all elements with address taken are
2901 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2902 inside a PHI node. */
2905 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2912 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2913 #define CHECK_OP(N, MSG) \
2914 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2915 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2917 switch (TREE_CODE (t
))
2920 if (SSA_NAME_IN_FREE_LIST (t
))
2922 error ("SSA name in freelist but still referenced");
2931 tree context
= decl_function_context (t
);
2932 if (context
!= cfun
->decl
2933 && !SCOPE_FILE_SCOPE_P (context
)
2935 && !DECL_EXTERNAL (t
))
2937 error ("Local declaration from a different function");
2944 error ("INDIRECT_REF in gimple IL");
2948 x
= TREE_OPERAND (t
, 0);
2949 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2950 || !is_gimple_mem_ref_addr (x
))
2952 error ("invalid first operand of MEM_REF");
2955 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2956 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2958 error ("invalid offset operand of MEM_REF");
2959 return TREE_OPERAND (t
, 1);
2961 if (TREE_CODE (x
) == ADDR_EXPR
)
2963 tree va
= verify_address (x
, TREE_OPERAND (x
, 0));
2966 x
= TREE_OPERAND (x
, 0);
2968 walk_tree (&x
, verify_expr
, data
, NULL
);
2973 x
= fold (ASSERT_EXPR_COND (t
));
2974 if (x
== boolean_false_node
)
2976 error ("ASSERT_EXPR with an always-false condition");
2982 error ("MODIFY_EXPR not expected while having tuples");
2989 gcc_assert (is_gimple_address (t
));
2991 /* Skip any references (they will be checked when we recurse down the
2992 tree) and ensure that any variable used as a prefix is marked
2994 for (x
= TREE_OPERAND (t
, 0);
2995 handled_component_p (x
);
2996 x
= TREE_OPERAND (x
, 0))
2999 if ((tem
= verify_address (t
, x
)))
3003 || TREE_CODE (x
) == PARM_DECL
3004 || TREE_CODE (x
) == RESULT_DECL
))
3007 if (!TREE_ADDRESSABLE (x
))
3009 error ("address taken, but ADDRESSABLE bit not set");
3017 x
= COND_EXPR_COND (t
);
3018 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
3020 error ("non-integral used in condition");
3023 if (!is_gimple_condexpr (x
))
3025 error ("invalid conditional operand");
3030 case NON_LVALUE_EXPR
:
3031 case TRUTH_NOT_EXPR
:
3035 case FIX_TRUNC_EXPR
:
3040 CHECK_OP (0, "invalid operand to unary operator");
3046 if (!is_gimple_reg_type (TREE_TYPE (t
)))
3048 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3052 if (TREE_CODE (t
) == BIT_FIELD_REF
)
3054 tree t0
= TREE_OPERAND (t
, 0);
3055 tree t1
= TREE_OPERAND (t
, 1);
3056 tree t2
= TREE_OPERAND (t
, 2);
3057 if (!tree_fits_uhwi_p (t1
)
3058 || !tree_fits_uhwi_p (t2
)
3059 || !types_compatible_p (bitsizetype
, TREE_TYPE (t1
))
3060 || !types_compatible_p (bitsizetype
, TREE_TYPE (t2
)))
3062 error ("invalid position or size operand to BIT_FIELD_REF");
3065 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3066 && (TYPE_PRECISION (TREE_TYPE (t
))
3067 != tree_to_uhwi (t1
)))
3069 error ("integral result type precision does not match "
3070 "field size of BIT_FIELD_REF");
3073 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
3074 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
3075 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t
)))
3076 != tree_to_uhwi (t1
)))
3078 error ("mode size of non-integral result does not "
3079 "match field size of BIT_FIELD_REF");
3082 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
3083 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
3084 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
3086 error ("position plus size exceeds size of referenced object in "
3091 t
= TREE_OPERAND (t
, 0);
3096 case ARRAY_RANGE_REF
:
3097 case VIEW_CONVERT_EXPR
:
3098 /* We have a nest of references. Verify that each of the operands
3099 that determine where to reference is either a constant or a variable,
3100 verify that the base is valid, and then show we've already checked
3102 while (handled_component_p (t
))
3104 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
3105 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3106 else if (TREE_CODE (t
) == ARRAY_REF
3107 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
3109 CHECK_OP (1, "invalid array index");
3110 if (TREE_OPERAND (t
, 2))
3111 CHECK_OP (2, "invalid array lower bound");
3112 if (TREE_OPERAND (t
, 3))
3113 CHECK_OP (3, "invalid array stride");
3115 else if (TREE_CODE (t
) == BIT_FIELD_REF
3116 || TREE_CODE (t
) == REALPART_EXPR
3117 || TREE_CODE (t
) == IMAGPART_EXPR
)
3119 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3124 t
= TREE_OPERAND (t
, 0);
3127 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
3129 error ("invalid reference prefix");
3132 walk_tree (&t
, verify_expr
, data
, NULL
);
3137 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3138 POINTER_PLUS_EXPR. */
3139 if (POINTER_TYPE_P (TREE_TYPE (t
)))
3141 error ("invalid operand to plus/minus, type is a pointer");
3144 CHECK_OP (0, "invalid operand to binary operator");
3145 CHECK_OP (1, "invalid operand to binary operator");
3148 case POINTER_PLUS_EXPR
:
3149 /* Check to make sure the first operand is a pointer or reference type. */
3150 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
3152 error ("invalid operand to pointer plus, first operand is not a pointer");
3155 /* Check to make sure the second operand is a ptrofftype. */
3156 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
3158 error ("invalid operand to pointer plus, second operand is not an "
3159 "integer type of appropriate width");
3169 case UNORDERED_EXPR
:
3178 case TRUNC_DIV_EXPR
:
3180 case FLOOR_DIV_EXPR
:
3181 case ROUND_DIV_EXPR
:
3182 case TRUNC_MOD_EXPR
:
3184 case FLOOR_MOD_EXPR
:
3185 case ROUND_MOD_EXPR
:
3187 case EXACT_DIV_EXPR
:
3197 CHECK_OP (0, "invalid operand to binary operator");
3198 CHECK_OP (1, "invalid operand to binary operator");
3202 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3206 case CASE_LABEL_EXPR
:
3209 error ("invalid CASE_CHAIN");
3223 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3224 Returns true if there is an error, otherwise false. */
3227 verify_types_in_gimple_min_lval (tree expr
)
3231 if (is_gimple_id (expr
))
3234 if (TREE_CODE (expr
) != TARGET_MEM_REF
3235 && TREE_CODE (expr
) != MEM_REF
)
3237 error ("invalid expression for min lvalue");
3241 /* TARGET_MEM_REFs are strange beasts. */
3242 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3245 op
= TREE_OPERAND (expr
, 0);
3246 if (!is_gimple_val (op
))
3248 error ("invalid operand in indirect reference");
3249 debug_generic_stmt (op
);
3252 /* Memory references now generally can involve a value conversion. */
3257 /* Verify if EXPR is a valid GIMPLE reference expression. If
3258 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3259 if there is an error, otherwise false. */
3262 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3264 while (handled_component_p (expr
))
3266 tree op
= TREE_OPERAND (expr
, 0);
3268 if (TREE_CODE (expr
) == ARRAY_REF
3269 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3271 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3272 || (TREE_OPERAND (expr
, 2)
3273 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3274 || (TREE_OPERAND (expr
, 3)
3275 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3277 error ("invalid operands to array reference");
3278 debug_generic_stmt (expr
);
3283 /* Verify if the reference array element types are compatible. */
3284 if (TREE_CODE (expr
) == ARRAY_REF
3285 && !useless_type_conversion_p (TREE_TYPE (expr
),
3286 TREE_TYPE (TREE_TYPE (op
))))
3288 error ("type mismatch in array reference");
3289 debug_generic_stmt (TREE_TYPE (expr
));
3290 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3293 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3294 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3295 TREE_TYPE (TREE_TYPE (op
))))
3297 error ("type mismatch in array range reference");
3298 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3299 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3303 if ((TREE_CODE (expr
) == REALPART_EXPR
3304 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3305 && !useless_type_conversion_p (TREE_TYPE (expr
),
3306 TREE_TYPE (TREE_TYPE (op
))))
3308 error ("type mismatch in real/imagpart reference");
3309 debug_generic_stmt (TREE_TYPE (expr
));
3310 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3314 if (TREE_CODE (expr
) == COMPONENT_REF
3315 && !useless_type_conversion_p (TREE_TYPE (expr
),
3316 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3318 error ("type mismatch in component reference");
3319 debug_generic_stmt (TREE_TYPE (expr
));
3320 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3324 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3326 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3327 that their operand is not an SSA name or an invariant when
3328 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3329 bug). Otherwise there is nothing to verify, gross mismatches at
3330 most invoke undefined behavior. */
3332 && (TREE_CODE (op
) == SSA_NAME
3333 || is_gimple_min_invariant (op
)))
3335 error ("conversion of an SSA_NAME on the left hand side");
3336 debug_generic_stmt (expr
);
3339 else if (TREE_CODE (op
) == SSA_NAME
3340 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3342 error ("conversion of register to a different size");
3343 debug_generic_stmt (expr
);
3346 else if (!handled_component_p (op
))
3353 if (TREE_CODE (expr
) == MEM_REF
)
3355 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3357 error ("invalid address operand in MEM_REF");
3358 debug_generic_stmt (expr
);
3361 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3362 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3364 error ("invalid offset operand in MEM_REF");
3365 debug_generic_stmt (expr
);
3369 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3371 if (!TMR_BASE (expr
)
3372 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3374 error ("invalid address operand in TARGET_MEM_REF");
3377 if (!TMR_OFFSET (expr
)
3378 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3379 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3381 error ("invalid offset operand in TARGET_MEM_REF");
3382 debug_generic_stmt (expr
);
3387 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3388 && verify_types_in_gimple_min_lval (expr
));
3391 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3392 list of pointer-to types that is trivially convertible to DEST. */
3395 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3399 if (!TYPE_POINTER_TO (src_obj
))
3402 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3403 if (useless_type_conversion_p (dest
, src
))
3409 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3410 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3413 valid_fixed_convert_types_p (tree type1
, tree type2
)
3415 return (FIXED_POINT_TYPE_P (type1
)
3416 && (INTEGRAL_TYPE_P (type2
)
3417 || SCALAR_FLOAT_TYPE_P (type2
)
3418 || FIXED_POINT_TYPE_P (type2
)));
3421 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3422 is a problem, otherwise false. */
3425 verify_gimple_call (gcall
*stmt
)
3427 tree fn
= gimple_call_fn (stmt
);
3428 tree fntype
, fndecl
;
3431 if (gimple_call_internal_p (stmt
))
3435 error ("gimple call has two targets");
3436 debug_generic_stmt (fn
);
3439 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3440 else if (gimple_call_internal_fn (stmt
) == IFN_PHI
)
3449 error ("gimple call has no target");
3454 if (fn
&& !is_gimple_call_addr (fn
))
3456 error ("invalid function in gimple call");
3457 debug_generic_stmt (fn
);
3462 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3463 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3464 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3466 error ("non-function in gimple call");
3470 fndecl
= gimple_call_fndecl (stmt
);
3472 && TREE_CODE (fndecl
) == FUNCTION_DECL
3473 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3474 && !DECL_PURE_P (fndecl
)
3475 && !TREE_READONLY (fndecl
))
3477 error ("invalid pure const state for function");
3481 tree lhs
= gimple_call_lhs (stmt
);
3483 && (!is_gimple_lvalue (lhs
)
3484 || verify_types_in_gimple_reference (lhs
, true)))
3486 error ("invalid LHS in gimple call");
3490 if (gimple_call_ctrl_altering_p (stmt
)
3491 && gimple_call_noreturn_p (stmt
)
3492 && should_remove_lhs_p (lhs
))
3494 error ("LHS in noreturn call");
3498 fntype
= gimple_call_fntype (stmt
);
3501 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (fntype
))
3502 /* ??? At least C++ misses conversions at assignments from
3503 void * call results.
3504 For now simply allow arbitrary pointer type conversions. */
3505 && !(POINTER_TYPE_P (TREE_TYPE (lhs
))
3506 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3508 error ("invalid conversion in gimple call");
3509 debug_generic_stmt (TREE_TYPE (lhs
));
3510 debug_generic_stmt (TREE_TYPE (fntype
));
3514 if (gimple_call_chain (stmt
)
3515 && !is_gimple_val (gimple_call_chain (stmt
)))
3517 error ("invalid static chain in gimple call");
3518 debug_generic_stmt (gimple_call_chain (stmt
));
3522 /* If there is a static chain argument, the call should either be
3523 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3524 if (gimple_call_chain (stmt
)
3526 && !DECL_STATIC_CHAIN (fndecl
))
3528 error ("static chain with function that doesn%'t use one");
3532 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3534 switch (DECL_FUNCTION_CODE (fndecl
))
3536 case BUILT_IN_UNREACHABLE
:
3538 if (gimple_call_num_args (stmt
) > 0)
3540 /* Built-in unreachable with parameters might not be caught by
3541 undefined behavior sanitizer. Front-ends do check users do not
3542 call them that way but we also produce calls to
3543 __builtin_unreachable internally, for example when IPA figures
3544 out a call cannot happen in a legal program. In such cases,
3545 we must make sure arguments are stripped off. */
3546 error ("__builtin_unreachable or __builtin_trap call with "
3556 /* ??? The C frontend passes unpromoted arguments in case it
3557 didn't see a function declaration before the call. So for now
3558 leave the call arguments mostly unverified. Once we gimplify
3559 unit-at-a-time we have a chance to fix this. */
3561 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3563 tree arg
= gimple_call_arg (stmt
, i
);
3564 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3565 && !is_gimple_val (arg
))
3566 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3567 && !is_gimple_lvalue (arg
)))
3569 error ("invalid argument to gimple call");
3570 debug_generic_expr (arg
);
3578 /* Verifies the gimple comparison with the result type TYPE and
3579 the operands OP0 and OP1, comparison code is CODE. */
3582 verify_gimple_comparison (tree type
, tree op0
, tree op1
, enum tree_code code
)
3584 tree op0_type
= TREE_TYPE (op0
);
3585 tree op1_type
= TREE_TYPE (op1
);
3587 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3589 error ("invalid operands in gimple comparison");
3593 /* For comparisons we do not have the operations type as the
3594 effective type the comparison is carried out in. Instead
3595 we require that either the first operand is trivially
3596 convertible into the second, or the other way around.
3597 Because we special-case pointers to void we allow
3598 comparisons of pointers with the same mode as well. */
3599 if (!useless_type_conversion_p (op0_type
, op1_type
)
3600 && !useless_type_conversion_p (op1_type
, op0_type
)
3601 && (!POINTER_TYPE_P (op0_type
)
3602 || !POINTER_TYPE_P (op1_type
)
3603 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3605 error ("mismatching comparison operand types");
3606 debug_generic_expr (op0_type
);
3607 debug_generic_expr (op1_type
);
3611 /* The resulting type of a comparison may be an effective boolean type. */
3612 if (INTEGRAL_TYPE_P (type
)
3613 && (TREE_CODE (type
) == BOOLEAN_TYPE
3614 || TYPE_PRECISION (type
) == 1))
3616 if ((TREE_CODE (op0_type
) == VECTOR_TYPE
3617 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3618 && code
!= EQ_EXPR
&& code
!= NE_EXPR
3619 && !VECTOR_BOOLEAN_TYPE_P (op0_type
)
3620 && !VECTOR_INTEGER_TYPE_P (op0_type
))
3622 error ("unsupported operation or type for vector comparison"
3623 " returning a boolean");
3624 debug_generic_expr (op0_type
);
3625 debug_generic_expr (op1_type
);
3629 /* Or a boolean vector type with the same element count
3630 as the comparison operand types. */
3631 else if (TREE_CODE (type
) == VECTOR_TYPE
3632 && TREE_CODE (TREE_TYPE (type
)) == BOOLEAN_TYPE
)
3634 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3635 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3637 error ("non-vector operands in vector comparison");
3638 debug_generic_expr (op0_type
);
3639 debug_generic_expr (op1_type
);
3643 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
))
3645 error ("invalid vector comparison resulting type");
3646 debug_generic_expr (type
);
3652 error ("bogus comparison result type");
3653 debug_generic_expr (type
);
3660 /* Verify a gimple assignment statement STMT with an unary rhs.
3661 Returns true if anything is wrong. */
3664 verify_gimple_assign_unary (gassign
*stmt
)
3666 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3667 tree lhs
= gimple_assign_lhs (stmt
);
3668 tree lhs_type
= TREE_TYPE (lhs
);
3669 tree rhs1
= gimple_assign_rhs1 (stmt
);
3670 tree rhs1_type
= TREE_TYPE (rhs1
);
3672 if (!is_gimple_reg (lhs
))
3674 error ("non-register as LHS of unary operation");
3678 if (!is_gimple_val (rhs1
))
3680 error ("invalid operand in unary operation");
3684 /* First handle conversions. */
3689 /* Allow conversions from pointer type to integral type only if
3690 there is no sign or zero extension involved.
3691 For targets were the precision of ptrofftype doesn't match that
3692 of pointers we need to allow arbitrary conversions to ptrofftype. */
3693 if ((POINTER_TYPE_P (lhs_type
)
3694 && INTEGRAL_TYPE_P (rhs1_type
))
3695 || (POINTER_TYPE_P (rhs1_type
)
3696 && INTEGRAL_TYPE_P (lhs_type
)
3697 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3698 || ptrofftype_p (sizetype
))))
3701 /* Allow conversion from integral to offset type and vice versa. */
3702 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3703 && INTEGRAL_TYPE_P (rhs1_type
))
3704 || (INTEGRAL_TYPE_P (lhs_type
)
3705 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3708 /* Otherwise assert we are converting between types of the
3710 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3712 error ("invalid types in nop conversion");
3713 debug_generic_expr (lhs_type
);
3714 debug_generic_expr (rhs1_type
);
3721 case ADDR_SPACE_CONVERT_EXPR
:
3723 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3724 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3725 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3727 error ("invalid types in address space conversion");
3728 debug_generic_expr (lhs_type
);
3729 debug_generic_expr (rhs1_type
);
3736 case FIXED_CONVERT_EXPR
:
3738 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3739 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3741 error ("invalid types in fixed-point conversion");
3742 debug_generic_expr (lhs_type
);
3743 debug_generic_expr (rhs1_type
);
3752 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3753 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3754 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3756 error ("invalid types in conversion to floating point");
3757 debug_generic_expr (lhs_type
);
3758 debug_generic_expr (rhs1_type
);
3765 case FIX_TRUNC_EXPR
:
3767 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3768 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3769 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3771 error ("invalid types in conversion to integer");
3772 debug_generic_expr (lhs_type
);
3773 debug_generic_expr (rhs1_type
);
3779 case REDUC_MAX_EXPR
:
3780 case REDUC_MIN_EXPR
:
3781 case REDUC_PLUS_EXPR
:
3782 if (!VECTOR_TYPE_P (rhs1_type
)
3783 || !useless_type_conversion_p (lhs_type
, TREE_TYPE (rhs1_type
)))
3785 error ("reduction should convert from vector to element type");
3786 debug_generic_expr (lhs_type
);
3787 debug_generic_expr (rhs1_type
);
3792 case VEC_UNPACK_HI_EXPR
:
3793 case VEC_UNPACK_LO_EXPR
:
3794 case VEC_UNPACK_FLOAT_HI_EXPR
:
3795 case VEC_UNPACK_FLOAT_LO_EXPR
:
3810 /* For the remaining codes assert there is no conversion involved. */
3811 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3813 error ("non-trivial conversion in unary operation");
3814 debug_generic_expr (lhs_type
);
3815 debug_generic_expr (rhs1_type
);
3822 /* Verify a gimple assignment statement STMT with a binary rhs.
3823 Returns true if anything is wrong. */
3826 verify_gimple_assign_binary (gassign
*stmt
)
3828 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3829 tree lhs
= gimple_assign_lhs (stmt
);
3830 tree lhs_type
= TREE_TYPE (lhs
);
3831 tree rhs1
= gimple_assign_rhs1 (stmt
);
3832 tree rhs1_type
= TREE_TYPE (rhs1
);
3833 tree rhs2
= gimple_assign_rhs2 (stmt
);
3834 tree rhs2_type
= TREE_TYPE (rhs2
);
3836 if (!is_gimple_reg (lhs
))
3838 error ("non-register as LHS of binary operation");
3842 if (!is_gimple_val (rhs1
)
3843 || !is_gimple_val (rhs2
))
3845 error ("invalid operands in binary operation");
3849 /* First handle operations that involve different types. */
3854 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3855 || !(INTEGRAL_TYPE_P (rhs1_type
)
3856 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3857 || !(INTEGRAL_TYPE_P (rhs2_type
)
3858 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3860 error ("type mismatch in complex expression");
3861 debug_generic_expr (lhs_type
);
3862 debug_generic_expr (rhs1_type
);
3863 debug_generic_expr (rhs2_type
);
3875 /* Shifts and rotates are ok on integral types, fixed point
3876 types and integer vector types. */
3877 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3878 && !FIXED_POINT_TYPE_P (rhs1_type
)
3879 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3880 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3881 || (!INTEGRAL_TYPE_P (rhs2_type
)
3882 /* Vector shifts of vectors are also ok. */
3883 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3884 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3885 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3886 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3887 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3889 error ("type mismatch in shift expression");
3890 debug_generic_expr (lhs_type
);
3891 debug_generic_expr (rhs1_type
);
3892 debug_generic_expr (rhs2_type
);
3899 case WIDEN_LSHIFT_EXPR
:
3901 if (!INTEGRAL_TYPE_P (lhs_type
)
3902 || !INTEGRAL_TYPE_P (rhs1_type
)
3903 || TREE_CODE (rhs2
) != INTEGER_CST
3904 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3906 error ("type mismatch in widening vector shift expression");
3907 debug_generic_expr (lhs_type
);
3908 debug_generic_expr (rhs1_type
);
3909 debug_generic_expr (rhs2_type
);
3916 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3917 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3919 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3920 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3921 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3922 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3923 || TREE_CODE (rhs2
) != INTEGER_CST
3924 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3925 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3927 error ("type mismatch in widening vector shift expression");
3928 debug_generic_expr (lhs_type
);
3929 debug_generic_expr (rhs1_type
);
3930 debug_generic_expr (rhs2_type
);
3940 tree lhs_etype
= lhs_type
;
3941 tree rhs1_etype
= rhs1_type
;
3942 tree rhs2_etype
= rhs2_type
;
3943 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3945 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3946 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3948 error ("invalid non-vector operands to vector valued plus");
3951 lhs_etype
= TREE_TYPE (lhs_type
);
3952 rhs1_etype
= TREE_TYPE (rhs1_type
);
3953 rhs2_etype
= TREE_TYPE (rhs2_type
);
3955 if (POINTER_TYPE_P (lhs_etype
)
3956 || POINTER_TYPE_P (rhs1_etype
)
3957 || POINTER_TYPE_P (rhs2_etype
))
3959 error ("invalid (pointer) operands to plus/minus");
3963 /* Continue with generic binary expression handling. */
3967 case POINTER_PLUS_EXPR
:
3969 if (!POINTER_TYPE_P (rhs1_type
)
3970 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3971 || !ptrofftype_p (rhs2_type
))
3973 error ("type mismatch in pointer plus expression");
3974 debug_generic_stmt (lhs_type
);
3975 debug_generic_stmt (rhs1_type
);
3976 debug_generic_stmt (rhs2_type
);
3983 case TRUTH_ANDIF_EXPR
:
3984 case TRUTH_ORIF_EXPR
:
3985 case TRUTH_AND_EXPR
:
3987 case TRUTH_XOR_EXPR
:
3997 case UNORDERED_EXPR
:
4005 /* Comparisons are also binary, but the result type is not
4006 connected to the operand types. */
4007 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
, rhs_code
);
4009 case WIDEN_MULT_EXPR
:
4010 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
4012 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
4013 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
4015 case WIDEN_SUM_EXPR
:
4017 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4018 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4019 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4020 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4021 || (!INTEGRAL_TYPE_P (lhs_type
)
4022 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4023 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4024 || (GET_MODE_SIZE (element_mode (rhs2_type
))
4025 < 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4027 error ("type mismatch in widening sum reduction");
4028 debug_generic_expr (lhs_type
);
4029 debug_generic_expr (rhs1_type
);
4030 debug_generic_expr (rhs2_type
);
4036 case VEC_WIDEN_MULT_HI_EXPR
:
4037 case VEC_WIDEN_MULT_LO_EXPR
:
4038 case VEC_WIDEN_MULT_EVEN_EXPR
:
4039 case VEC_WIDEN_MULT_ODD_EXPR
:
4041 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4042 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4043 || !types_compatible_p (rhs1_type
, rhs2_type
)
4044 || (GET_MODE_SIZE (element_mode (lhs_type
))
4045 != 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4047 error ("type mismatch in vector widening multiplication");
4048 debug_generic_expr (lhs_type
);
4049 debug_generic_expr (rhs1_type
);
4050 debug_generic_expr (rhs2_type
);
4056 case VEC_PACK_TRUNC_EXPR
:
4057 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4058 vector boolean types. */
4059 if (VECTOR_BOOLEAN_TYPE_P (lhs_type
)
4060 && VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4061 && types_compatible_p (rhs1_type
, rhs2_type
)
4062 && (TYPE_VECTOR_SUBPARTS (lhs_type
)
4063 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type
)))
4067 case VEC_PACK_SAT_EXPR
:
4068 case VEC_PACK_FIX_TRUNC_EXPR
:
4070 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4071 || TREE_CODE (lhs_type
) != VECTOR_TYPE
4072 || !((rhs_code
== VEC_PACK_FIX_TRUNC_EXPR
4073 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
))
4074 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
)))
4075 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
4076 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))))
4077 || !types_compatible_p (rhs1_type
, rhs2_type
)
4078 || (GET_MODE_SIZE (element_mode (rhs1_type
))
4079 != 2 * GET_MODE_SIZE (element_mode (lhs_type
))))
4081 error ("type mismatch in vector pack expression");
4082 debug_generic_expr (lhs_type
);
4083 debug_generic_expr (rhs1_type
);
4084 debug_generic_expr (rhs2_type
);
4092 case MULT_HIGHPART_EXPR
:
4093 case TRUNC_DIV_EXPR
:
4095 case FLOOR_DIV_EXPR
:
4096 case ROUND_DIV_EXPR
:
4097 case TRUNC_MOD_EXPR
:
4099 case FLOOR_MOD_EXPR
:
4100 case ROUND_MOD_EXPR
:
4102 case EXACT_DIV_EXPR
:
4108 /* Continue with generic binary expression handling. */
4115 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4116 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4118 error ("type mismatch in binary expression");
4119 debug_generic_stmt (lhs_type
);
4120 debug_generic_stmt (rhs1_type
);
4121 debug_generic_stmt (rhs2_type
);
4128 /* Verify a gimple assignment statement STMT with a ternary rhs.
4129 Returns true if anything is wrong. */
4132 verify_gimple_assign_ternary (gassign
*stmt
)
4134 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4135 tree lhs
= gimple_assign_lhs (stmt
);
4136 tree lhs_type
= TREE_TYPE (lhs
);
4137 tree rhs1
= gimple_assign_rhs1 (stmt
);
4138 tree rhs1_type
= TREE_TYPE (rhs1
);
4139 tree rhs2
= gimple_assign_rhs2 (stmt
);
4140 tree rhs2_type
= TREE_TYPE (rhs2
);
4141 tree rhs3
= gimple_assign_rhs3 (stmt
);
4142 tree rhs3_type
= TREE_TYPE (rhs3
);
4144 if (!is_gimple_reg (lhs
))
4146 error ("non-register as LHS of ternary operation");
4150 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
4151 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
4152 || !is_gimple_val (rhs2
)
4153 || !is_gimple_val (rhs3
))
4155 error ("invalid operands in ternary operation");
4159 /* First handle operations that involve different types. */
4162 case WIDEN_MULT_PLUS_EXPR
:
4163 case WIDEN_MULT_MINUS_EXPR
:
4164 if ((!INTEGRAL_TYPE_P (rhs1_type
)
4165 && !FIXED_POINT_TYPE_P (rhs1_type
))
4166 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
4167 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4168 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
4169 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
4171 error ("type mismatch in widening multiply-accumulate expression");
4172 debug_generic_expr (lhs_type
);
4173 debug_generic_expr (rhs1_type
);
4174 debug_generic_expr (rhs2_type
);
4175 debug_generic_expr (rhs3_type
);
4181 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4182 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
4183 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4185 error ("type mismatch in fused multiply-add expression");
4186 debug_generic_expr (lhs_type
);
4187 debug_generic_expr (rhs1_type
);
4188 debug_generic_expr (rhs2_type
);
4189 debug_generic_expr (rhs3_type
);
4195 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type
)
4196 || TYPE_VECTOR_SUBPARTS (rhs1_type
)
4197 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4199 error ("the first argument of a VEC_COND_EXPR must be of a "
4200 "boolean vector type of the same number of elements "
4202 debug_generic_expr (lhs_type
);
4203 debug_generic_expr (rhs1_type
);
4208 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
4209 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
4211 error ("type mismatch in conditional expression");
4212 debug_generic_expr (lhs_type
);
4213 debug_generic_expr (rhs2_type
);
4214 debug_generic_expr (rhs3_type
);
4220 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
4221 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
4223 error ("type mismatch in vector permute expression");
4224 debug_generic_expr (lhs_type
);
4225 debug_generic_expr (rhs1_type
);
4226 debug_generic_expr (rhs2_type
);
4227 debug_generic_expr (rhs3_type
);
4231 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4232 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4233 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4235 error ("vector types expected in vector permute expression");
4236 debug_generic_expr (lhs_type
);
4237 debug_generic_expr (rhs1_type
);
4238 debug_generic_expr (rhs2_type
);
4239 debug_generic_expr (rhs3_type
);
4243 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
4244 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
4245 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
4246 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
4247 != TYPE_VECTOR_SUBPARTS (lhs_type
))
4249 error ("vectors with different element number found "
4250 "in vector permute expression");
4251 debug_generic_expr (lhs_type
);
4252 debug_generic_expr (rhs1_type
);
4253 debug_generic_expr (rhs2_type
);
4254 debug_generic_expr (rhs3_type
);
4258 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
4259 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type
)))
4260 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type
))))
4262 error ("invalid mask type in vector permute expression");
4263 debug_generic_expr (lhs_type
);
4264 debug_generic_expr (rhs1_type
);
4265 debug_generic_expr (rhs2_type
);
4266 debug_generic_expr (rhs3_type
);
4273 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4274 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4275 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
)))
4276 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type
))))
4278 error ("type mismatch in sad expression");
4279 debug_generic_expr (lhs_type
);
4280 debug_generic_expr (rhs1_type
);
4281 debug_generic_expr (rhs2_type
);
4282 debug_generic_expr (rhs3_type
);
4286 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4287 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4288 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4290 error ("vector types expected in sad expression");
4291 debug_generic_expr (lhs_type
);
4292 debug_generic_expr (rhs1_type
);
4293 debug_generic_expr (rhs2_type
);
4294 debug_generic_expr (rhs3_type
);
4300 case BIT_INSERT_EXPR
:
4301 if (! useless_type_conversion_p (lhs_type
, rhs1_type
))
4303 error ("type mismatch in BIT_INSERT_EXPR");
4304 debug_generic_expr (lhs_type
);
4305 debug_generic_expr (rhs1_type
);
4308 if (! ((INTEGRAL_TYPE_P (rhs1_type
)
4309 && INTEGRAL_TYPE_P (rhs2_type
))
4310 || (VECTOR_TYPE_P (rhs1_type
)
4311 && types_compatible_p (TREE_TYPE (rhs1_type
), rhs2_type
))))
4313 error ("not allowed type combination in BIT_INSERT_EXPR");
4314 debug_generic_expr (rhs1_type
);
4315 debug_generic_expr (rhs2_type
);
4318 if (! tree_fits_uhwi_p (rhs3
)
4319 || ! types_compatible_p (bitsizetype
, TREE_TYPE (rhs3
))
4320 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type
)))
4322 error ("invalid position or size in BIT_INSERT_EXPR");
4325 if (INTEGRAL_TYPE_P (rhs1_type
))
4327 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4328 if (bitpos
>= TYPE_PRECISION (rhs1_type
)
4329 || (bitpos
+ TYPE_PRECISION (rhs2_type
)
4330 > TYPE_PRECISION (rhs1_type
)))
4332 error ("insertion out of range in BIT_INSERT_EXPR");
4336 else if (VECTOR_TYPE_P (rhs1_type
))
4338 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (rhs3
);
4339 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (TYPE_SIZE (rhs2_type
));
4340 if (bitpos
% bitsize
!= 0)
4342 error ("vector insertion not at element boundary");
4350 if (((TREE_CODE (rhs1_type
) != VECTOR_TYPE
4351 || TREE_CODE (lhs_type
) != VECTOR_TYPE
)
4352 && ((!INTEGRAL_TYPE_P (rhs1_type
)
4353 && !SCALAR_FLOAT_TYPE_P (rhs1_type
))
4354 || (!INTEGRAL_TYPE_P (lhs_type
)
4355 && !SCALAR_FLOAT_TYPE_P (lhs_type
))))
4356 || !types_compatible_p (rhs1_type
, rhs2_type
)
4357 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4358 || (GET_MODE_SIZE (element_mode (rhs3_type
))
4359 < 2 * GET_MODE_SIZE (element_mode (rhs1_type
))))
4361 error ("type mismatch in dot product reduction");
4362 debug_generic_expr (lhs_type
);
4363 debug_generic_expr (rhs1_type
);
4364 debug_generic_expr (rhs2_type
);
4370 case REALIGN_LOAD_EXPR
:
4380 /* Verify a gimple assignment statement STMT with a single rhs.
4381 Returns true if anything is wrong. */
4384 verify_gimple_assign_single (gassign
*stmt
)
4386 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4387 tree lhs
= gimple_assign_lhs (stmt
);
4388 tree lhs_type
= TREE_TYPE (lhs
);
4389 tree rhs1
= gimple_assign_rhs1 (stmt
);
4390 tree rhs1_type
= TREE_TYPE (rhs1
);
4393 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4395 error ("non-trivial conversion at assignment");
4396 debug_generic_expr (lhs_type
);
4397 debug_generic_expr (rhs1_type
);
4401 if (gimple_clobber_p (stmt
)
4402 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4404 error ("non-decl/MEM_REF LHS in clobber statement");
4405 debug_generic_expr (lhs
);
4409 if (handled_component_p (lhs
)
4410 || TREE_CODE (lhs
) == MEM_REF
4411 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4412 res
|= verify_types_in_gimple_reference (lhs
, true);
4414 /* Special codes we cannot handle via their class. */
4419 tree op
= TREE_OPERAND (rhs1
, 0);
4420 if (!is_gimple_addressable (op
))
4422 error ("invalid operand in unary expression");
4426 /* Technically there is no longer a need for matching types, but
4427 gimple hygiene asks for this check. In LTO we can end up
4428 combining incompatible units and thus end up with addresses
4429 of globals that change their type to a common one. */
4431 && !types_compatible_p (TREE_TYPE (op
),
4432 TREE_TYPE (TREE_TYPE (rhs1
)))
4433 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4436 error ("type mismatch in address expression");
4437 debug_generic_stmt (TREE_TYPE (rhs1
));
4438 debug_generic_stmt (TREE_TYPE (op
));
4442 return verify_types_in_gimple_reference (op
, true);
4447 error ("INDIRECT_REF in gimple IL");
4453 case ARRAY_RANGE_REF
:
4454 case VIEW_CONVERT_EXPR
:
4457 case TARGET_MEM_REF
:
4459 if (!is_gimple_reg (lhs
)
4460 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4462 error ("invalid rhs for gimple memory store");
4463 debug_generic_stmt (lhs
);
4464 debug_generic_stmt (rhs1
);
4467 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4479 /* tcc_declaration */
4484 if (!is_gimple_reg (lhs
)
4485 && !is_gimple_reg (rhs1
)
4486 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4488 error ("invalid rhs for gimple memory store");
4489 debug_generic_stmt (lhs
);
4490 debug_generic_stmt (rhs1
);
4496 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4499 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4501 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4503 /* For vector CONSTRUCTORs we require that either it is empty
4504 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4505 (then the element count must be correct to cover the whole
4506 outer vector and index must be NULL on all elements, or it is
4507 a CONSTRUCTOR of scalar elements, where we as an exception allow
4508 smaller number of elements (assuming zero filling) and
4509 consecutive indexes as compared to NULL indexes (such
4510 CONSTRUCTORs can appear in the IL from FEs). */
4511 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4513 if (elt_t
== NULL_TREE
)
4515 elt_t
= TREE_TYPE (elt_v
);
4516 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4518 tree elt_t
= TREE_TYPE (elt_v
);
4519 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4522 error ("incorrect type of vector CONSTRUCTOR"
4524 debug_generic_stmt (rhs1
);
4527 else if (CONSTRUCTOR_NELTS (rhs1
)
4528 * TYPE_VECTOR_SUBPARTS (elt_t
)
4529 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4531 error ("incorrect number of vector CONSTRUCTOR"
4533 debug_generic_stmt (rhs1
);
4537 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4540 error ("incorrect type of vector CONSTRUCTOR elements");
4541 debug_generic_stmt (rhs1
);
4544 else if (CONSTRUCTOR_NELTS (rhs1
)
4545 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4547 error ("incorrect number of vector CONSTRUCTOR elements");
4548 debug_generic_stmt (rhs1
);
4552 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4554 error ("incorrect type of vector CONSTRUCTOR elements");
4555 debug_generic_stmt (rhs1
);
4558 if (elt_i
!= NULL_TREE
4559 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4560 || TREE_CODE (elt_i
) != INTEGER_CST
4561 || compare_tree_int (elt_i
, i
) != 0))
4563 error ("vector CONSTRUCTOR with non-NULL element index");
4564 debug_generic_stmt (rhs1
);
4567 if (!is_gimple_val (elt_v
))
4569 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4570 debug_generic_stmt (rhs1
);
4575 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4577 error ("non-vector CONSTRUCTOR with elements");
4578 debug_generic_stmt (rhs1
);
4584 case WITH_SIZE_EXPR
:
4594 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4595 is a problem, otherwise false. */
4598 verify_gimple_assign (gassign
*stmt
)
4600 switch (gimple_assign_rhs_class (stmt
))
4602 case GIMPLE_SINGLE_RHS
:
4603 return verify_gimple_assign_single (stmt
);
4605 case GIMPLE_UNARY_RHS
:
4606 return verify_gimple_assign_unary (stmt
);
4608 case GIMPLE_BINARY_RHS
:
4609 return verify_gimple_assign_binary (stmt
);
4611 case GIMPLE_TERNARY_RHS
:
4612 return verify_gimple_assign_ternary (stmt
);
4619 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4620 is a problem, otherwise false. */
4623 verify_gimple_return (greturn
*stmt
)
4625 tree op
= gimple_return_retval (stmt
);
4626 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4628 /* We cannot test for present return values as we do not fix up missing
4629 return values from the original source. */
4633 if (!is_gimple_val (op
)
4634 && TREE_CODE (op
) != RESULT_DECL
)
4636 error ("invalid operand in return statement");
4637 debug_generic_stmt (op
);
4641 if ((TREE_CODE (op
) == RESULT_DECL
4642 && DECL_BY_REFERENCE (op
))
4643 || (TREE_CODE (op
) == SSA_NAME
4644 && SSA_NAME_VAR (op
)
4645 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4646 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4647 op
= TREE_TYPE (op
);
4649 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4651 error ("invalid conversion in return statement");
4652 debug_generic_stmt (restype
);
4653 debug_generic_stmt (TREE_TYPE (op
));
4661 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4662 is a problem, otherwise false. */
4665 verify_gimple_goto (ggoto
*stmt
)
4667 tree dest
= gimple_goto_dest (stmt
);
4669 /* ??? We have two canonical forms of direct goto destinations, a
4670 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4671 if (TREE_CODE (dest
) != LABEL_DECL
4672 && (!is_gimple_val (dest
)
4673 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4675 error ("goto destination is neither a label nor a pointer");
4682 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4683 is a problem, otherwise false. */
4686 verify_gimple_switch (gswitch
*stmt
)
4689 tree elt
, prev_upper_bound
= NULL_TREE
;
4690 tree index_type
, elt_type
= NULL_TREE
;
4692 if (!is_gimple_val (gimple_switch_index (stmt
)))
4694 error ("invalid operand to switch statement");
4695 debug_generic_stmt (gimple_switch_index (stmt
));
4699 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4700 if (! INTEGRAL_TYPE_P (index_type
))
4702 error ("non-integral type switch statement");
4703 debug_generic_expr (index_type
);
4707 elt
= gimple_switch_label (stmt
, 0);
4708 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4710 error ("invalid default case label in switch statement");
4711 debug_generic_expr (elt
);
4715 n
= gimple_switch_num_labels (stmt
);
4716 for (i
= 1; i
< n
; i
++)
4718 elt
= gimple_switch_label (stmt
, i
);
4720 if (! CASE_LOW (elt
))
4722 error ("invalid case label in switch statement");
4723 debug_generic_expr (elt
);
4727 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4729 error ("invalid case range in switch statement");
4730 debug_generic_expr (elt
);
4736 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4737 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4739 error ("type mismatch for case label in switch statement");
4740 debug_generic_expr (elt
);
4746 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4747 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4749 error ("type precision mismatch in switch statement");
4754 if (prev_upper_bound
)
4756 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4758 error ("case labels not sorted in switch statement");
4763 prev_upper_bound
= CASE_HIGH (elt
);
4764 if (! prev_upper_bound
)
4765 prev_upper_bound
= CASE_LOW (elt
);
4771 /* Verify a gimple debug statement STMT.
4772 Returns true if anything is wrong. */
4775 verify_gimple_debug (gimple
*stmt ATTRIBUTE_UNUSED
)
4777 /* There isn't much that could be wrong in a gimple debug stmt. A
4778 gimple debug bind stmt, for example, maps a tree, that's usually
4779 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4780 component or member of an aggregate type, to another tree, that
4781 can be an arbitrary expression. These stmts expand into debug
4782 insns, and are converted to debug notes by var-tracking.c. */
4786 /* Verify a gimple label statement STMT.
4787 Returns true if anything is wrong. */
4790 verify_gimple_label (glabel
*stmt
)
4792 tree decl
= gimple_label_label (stmt
);
4796 if (TREE_CODE (decl
) != LABEL_DECL
)
4798 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4799 && DECL_CONTEXT (decl
) != current_function_decl
)
4801 error ("label's context is not the current function decl");
4805 uid
= LABEL_DECL_UID (decl
);
4808 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4810 error ("incorrect entry in label_to_block_map");
4814 uid
= EH_LANDING_PAD_NR (decl
);
4817 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4818 if (decl
!= lp
->post_landing_pad
)
4820 error ("incorrect setting of landing pad number");
4828 /* Verify a gimple cond statement STMT.
4829 Returns true if anything is wrong. */
4832 verify_gimple_cond (gcond
*stmt
)
4834 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4836 error ("invalid comparison code in gimple cond");
4839 if (!(!gimple_cond_true_label (stmt
)
4840 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4841 || !(!gimple_cond_false_label (stmt
)
4842 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4844 error ("invalid labels in gimple cond");
4848 return verify_gimple_comparison (boolean_type_node
,
4849 gimple_cond_lhs (stmt
),
4850 gimple_cond_rhs (stmt
),
4851 gimple_cond_code (stmt
));
4854 /* Verify the GIMPLE statement STMT. Returns true if there is an
4855 error, otherwise false. */
4858 verify_gimple_stmt (gimple
*stmt
)
4860 switch (gimple_code (stmt
))
4863 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4866 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4869 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4872 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4875 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4878 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4881 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4886 case GIMPLE_TRANSACTION
:
4887 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4889 /* Tuples that do not have tree operands. */
4891 case GIMPLE_PREDICT
:
4893 case GIMPLE_EH_DISPATCH
:
4894 case GIMPLE_EH_MUST_NOT_THROW
:
4898 /* OpenMP directives are validated by the FE and never operated
4899 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4900 non-gimple expressions when the main index variable has had
4901 its address taken. This does not affect the loop itself
4902 because the header of an GIMPLE_OMP_FOR is merely used to determine
4903 how to setup the parallel iteration. */
4907 return verify_gimple_debug (stmt
);
4914 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4915 and false otherwise. */
4918 verify_gimple_phi (gimple
*phi
)
4922 tree phi_result
= gimple_phi_result (phi
);
4927 error ("invalid PHI result");
4931 virtual_p
= virtual_operand_p (phi_result
);
4932 if (TREE_CODE (phi_result
) != SSA_NAME
4934 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4936 error ("invalid PHI result");
4940 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4942 tree t
= gimple_phi_arg_def (phi
, i
);
4946 error ("missing PHI def");
4950 /* Addressable variables do have SSA_NAMEs but they
4951 are not considered gimple values. */
4952 else if ((TREE_CODE (t
) == SSA_NAME
4953 && virtual_p
!= virtual_operand_p (t
))
4955 && (TREE_CODE (t
) != SSA_NAME
4956 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4958 && !is_gimple_val (t
)))
4960 error ("invalid PHI argument");
4961 debug_generic_expr (t
);
4964 #ifdef ENABLE_TYPES_CHECKING
4965 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4967 error ("incompatible types in PHI argument %u", i
);
4968 debug_generic_stmt (TREE_TYPE (phi_result
));
4969 debug_generic_stmt (TREE_TYPE (t
));
4978 /* Verify the GIMPLE statements inside the sequence STMTS. */
4981 verify_gimple_in_seq_2 (gimple_seq stmts
)
4983 gimple_stmt_iterator ittr
;
4986 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4988 gimple
*stmt
= gsi_stmt (ittr
);
4990 switch (gimple_code (stmt
))
4993 err
|= verify_gimple_in_seq_2 (
4994 gimple_bind_body (as_a
<gbind
*> (stmt
)));
4998 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4999 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
5002 case GIMPLE_EH_FILTER
:
5003 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
5006 case GIMPLE_EH_ELSE
:
5008 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
5009 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
5010 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
5015 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
5016 as_a
<gcatch
*> (stmt
)));
5019 case GIMPLE_TRANSACTION
:
5020 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
5025 bool err2
= verify_gimple_stmt (stmt
);
5027 debug_gimple_stmt (stmt
);
5036 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5037 is a problem, otherwise false. */
5040 verify_gimple_transaction (gtransaction
*stmt
)
5044 lab
= gimple_transaction_label_norm (stmt
);
5045 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5047 lab
= gimple_transaction_label_uninst (stmt
);
5048 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5050 lab
= gimple_transaction_label_over (stmt
);
5051 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
5054 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
5058 /* Verify the GIMPLE statements inside the statement list STMTS. */
5061 verify_gimple_in_seq (gimple_seq stmts
)
5063 timevar_push (TV_TREE_STMT_VERIFY
);
5064 if (verify_gimple_in_seq_2 (stmts
))
5065 internal_error ("verify_gimple failed");
5066 timevar_pop (TV_TREE_STMT_VERIFY
);
5069 /* Return true when the T can be shared. */
5072 tree_node_can_be_shared (tree t
)
5074 if (IS_TYPE_OR_DECL_P (t
)
5075 || is_gimple_min_invariant (t
)
5076 || TREE_CODE (t
) == SSA_NAME
5077 || t
== error_mark_node
5078 || TREE_CODE (t
) == IDENTIFIER_NODE
)
5081 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
5090 /* Called via walk_tree. Verify tree sharing. */
5093 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5095 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
5097 if (tree_node_can_be_shared (*tp
))
5099 *walk_subtrees
= false;
5103 if (visited
->add (*tp
))
5109 /* Called via walk_gimple_stmt. Verify tree sharing. */
5112 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
5114 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5115 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
5118 static bool eh_error_found
;
5120 verify_eh_throw_stmt_node (gimple
*const &stmt
, const int &,
5121 hash_set
<gimple
*> *visited
)
5123 if (!visited
->contains (stmt
))
5125 error ("dead STMT in EH table");
5126 debug_gimple_stmt (stmt
);
5127 eh_error_found
= true;
5132 /* Verify if the location LOCs block is in BLOCKS. */
5135 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
5137 tree block
= LOCATION_BLOCK (loc
);
5138 if (block
!= NULL_TREE
5139 && !blocks
->contains (block
))
5141 error ("location references block not in block tree");
5144 if (block
!= NULL_TREE
)
5145 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
5149 /* Called via walk_tree. Verify that expressions have no blocks. */
5152 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
5156 *walk_subtrees
= false;
5160 location_t loc
= EXPR_LOCATION (*tp
);
5161 if (LOCATION_BLOCK (loc
) != NULL
)
5167 /* Called via walk_tree. Verify locations of expressions. */
5170 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
5172 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
5174 if (VAR_P (*tp
) && DECL_HAS_DEBUG_EXPR_P (*tp
))
5176 tree t
= DECL_DEBUG_EXPR (*tp
);
5177 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5182 || TREE_CODE (*tp
) == PARM_DECL
5183 || TREE_CODE (*tp
) == RESULT_DECL
)
5184 && DECL_HAS_VALUE_EXPR_P (*tp
))
5186 tree t
= DECL_VALUE_EXPR (*tp
);
5187 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
5194 *walk_subtrees
= false;
5198 location_t loc
= EXPR_LOCATION (*tp
);
5199 if (verify_location (blocks
, loc
))
5205 /* Called via walk_gimple_op. Verify locations of expressions. */
5208 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
5210 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5211 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
5214 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5217 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
5220 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
5223 collect_subblocks (blocks
, t
);
5227 /* Verify the GIMPLE statements in the CFG of FN. */
5230 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
5235 timevar_push (TV_TREE_STMT_VERIFY
);
5236 hash_set
<void *> visited
;
5237 hash_set
<gimple
*> visited_stmts
;
5239 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5240 hash_set
<tree
> blocks
;
5241 if (DECL_INITIAL (fn
->decl
))
5243 blocks
.add (DECL_INITIAL (fn
->decl
));
5244 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
5247 FOR_EACH_BB_FN (bb
, fn
)
5249 gimple_stmt_iterator gsi
;
5251 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5255 gphi
*phi
= gpi
.phi ();
5259 visited_stmts
.add (phi
);
5261 if (gimple_bb (phi
) != bb
)
5263 error ("gimple_bb (phi) is set to a wrong basic block");
5267 err2
|= verify_gimple_phi (phi
);
5269 /* Only PHI arguments have locations. */
5270 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
5272 error ("PHI node with location");
5276 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5278 tree arg
= gimple_phi_arg_def (phi
, i
);
5279 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
5283 error ("incorrect sharing of tree nodes");
5284 debug_generic_expr (addr
);
5287 location_t loc
= gimple_phi_arg_location (phi
, i
);
5288 if (virtual_operand_p (gimple_phi_result (phi
))
5289 && loc
!= UNKNOWN_LOCATION
)
5291 error ("virtual PHI with argument locations");
5294 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
5297 debug_generic_expr (addr
);
5300 err2
|= verify_location (&blocks
, loc
);
5304 debug_gimple_stmt (phi
);
5308 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5310 gimple
*stmt
= gsi_stmt (gsi
);
5312 struct walk_stmt_info wi
;
5316 visited_stmts
.add (stmt
);
5318 if (gimple_bb (stmt
) != bb
)
5320 error ("gimple_bb (stmt) is set to a wrong basic block");
5324 err2
|= verify_gimple_stmt (stmt
);
5325 err2
|= verify_location (&blocks
, gimple_location (stmt
));
5327 memset (&wi
, 0, sizeof (wi
));
5328 wi
.info
= (void *) &visited
;
5329 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
5332 error ("incorrect sharing of tree nodes");
5333 debug_generic_expr (addr
);
5337 memset (&wi
, 0, sizeof (wi
));
5338 wi
.info
= (void *) &blocks
;
5339 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5342 debug_generic_expr (addr
);
5346 /* ??? Instead of not checking these stmts at all the walker
5347 should know its context via wi. */
5348 if (!is_gimple_debug (stmt
)
5349 && !is_gimple_omp (stmt
))
5351 memset (&wi
, 0, sizeof (wi
));
5352 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5355 debug_generic_expr (addr
);
5356 inform (gimple_location (stmt
), "in statement");
5361 /* If the statement is marked as part of an EH region, then it is
5362 expected that the statement could throw. Verify that when we
5363 have optimizations that simplify statements such that we prove
5364 that they cannot throw, that we update other data structures
5366 lp_nr
= lookup_stmt_eh_lp (stmt
);
5369 if (!stmt_could_throw_p (stmt
))
5373 error ("statement marked for throw, but doesn%'t");
5377 else if (!gsi_one_before_end_p (gsi
))
5379 error ("statement marked for throw in middle of block");
5385 debug_gimple_stmt (stmt
);
5390 eh_error_found
= false;
5391 hash_map
<gimple
*, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5393 eh_table
->traverse
<hash_set
<gimple
*> *, verify_eh_throw_stmt_node
>
5396 if (err
|| eh_error_found
)
5397 internal_error ("verify_gimple failed");
5399 verify_histograms ();
5400 timevar_pop (TV_TREE_STMT_VERIFY
);
5404 /* Verifies that the flow information is OK. */
5407 gimple_verify_flow_info (void)
5411 gimple_stmt_iterator gsi
;
5416 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5417 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5419 error ("ENTRY_BLOCK has IL associated with it");
5423 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5424 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5426 error ("EXIT_BLOCK has IL associated with it");
5430 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5431 if (e
->flags
& EDGE_FALLTHRU
)
5433 error ("fallthru to exit from bb %d", e
->src
->index
);
5437 FOR_EACH_BB_FN (bb
, cfun
)
5439 bool found_ctrl_stmt
= false;
5443 /* Skip labels on the start of basic block. */
5444 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5447 gimple
*prev_stmt
= stmt
;
5449 stmt
= gsi_stmt (gsi
);
5451 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5454 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5455 if (prev_stmt
&& DECL_NONLOCAL (label
))
5457 error ("nonlocal label ");
5458 print_generic_expr (stderr
, label
);
5459 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5464 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5466 error ("EH landing pad label ");
5467 print_generic_expr (stderr
, label
);
5468 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5473 if (label_to_block (label
) != bb
)
5476 print_generic_expr (stderr
, label
);
5477 fprintf (stderr
, " to block does not match in bb %d",
5482 if (decl_function_context (label
) != current_function_decl
)
5485 print_generic_expr (stderr
, label
);
5486 fprintf (stderr
, " has incorrect context in bb %d",
5492 /* Verify that body of basic block BB is free of control flow. */
5493 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5495 gimple
*stmt
= gsi_stmt (gsi
);
5497 if (found_ctrl_stmt
)
5499 error ("control flow in the middle of basic block %d",
5504 if (stmt_ends_bb_p (stmt
))
5505 found_ctrl_stmt
= true;
5507 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5510 print_generic_expr (stderr
, gimple_label_label (label_stmt
));
5511 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5516 gsi
= gsi_last_bb (bb
);
5517 if (gsi_end_p (gsi
))
5520 stmt
= gsi_stmt (gsi
);
5522 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5525 err
|= verify_eh_edges (stmt
);
5527 if (is_ctrl_stmt (stmt
))
5529 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5530 if (e
->flags
& EDGE_FALLTHRU
)
5532 error ("fallthru edge after a control statement in bb %d",
5538 if (gimple_code (stmt
) != GIMPLE_COND
)
5540 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5541 after anything else but if statement. */
5542 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5543 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5545 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5551 switch (gimple_code (stmt
))
5558 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5562 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5563 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5564 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5565 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5566 || EDGE_COUNT (bb
->succs
) >= 3)
5568 error ("wrong outgoing edge flags at end of bb %d",
5576 if (simple_goto_p (stmt
))
5578 error ("explicit goto at end of bb %d", bb
->index
);
5583 /* FIXME. We should double check that the labels in the
5584 destination blocks have their address taken. */
5585 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5586 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5587 | EDGE_FALSE_VALUE
))
5588 || !(e
->flags
& EDGE_ABNORMAL
))
5590 error ("wrong outgoing edge flags at end of bb %d",
5598 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5602 if (!single_succ_p (bb
)
5603 || (single_succ_edge (bb
)->flags
5604 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5605 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5607 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5610 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5612 error ("return edge does not point to exit in bb %d",
5620 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5625 n
= gimple_switch_num_labels (switch_stmt
);
5627 /* Mark all the destination basic blocks. */
5628 for (i
= 0; i
< n
; ++i
)
5630 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5631 basic_block label_bb
= label_to_block (lab
);
5632 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5633 label_bb
->aux
= (void *)1;
5636 /* Verify that the case labels are sorted. */
5637 prev
= gimple_switch_label (switch_stmt
, 0);
5638 for (i
= 1; i
< n
; ++i
)
5640 tree c
= gimple_switch_label (switch_stmt
, i
);
5643 error ("found default case not at the start of "
5649 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5651 error ("case labels not sorted: ");
5652 print_generic_expr (stderr
, prev
);
5653 fprintf (stderr
," is greater than ");
5654 print_generic_expr (stderr
, c
);
5655 fprintf (stderr
," but comes before it.\n");
5660 /* VRP will remove the default case if it can prove it will
5661 never be executed. So do not verify there always exists
5662 a default case here. */
5664 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5668 error ("extra outgoing edge %d->%d",
5669 bb
->index
, e
->dest
->index
);
5673 e
->dest
->aux
= (void *)2;
5674 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5675 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5677 error ("wrong outgoing edge flags at end of bb %d",
5683 /* Check that we have all of them. */
5684 for (i
= 0; i
< n
; ++i
)
5686 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5687 basic_block label_bb
= label_to_block (lab
);
5689 if (label_bb
->aux
!= (void *)2)
5691 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5696 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5697 e
->dest
->aux
= (void *)0;
5701 case GIMPLE_EH_DISPATCH
:
5702 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5710 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5711 verify_dominators (CDI_DOMINATORS
);
5717 /* Updates phi nodes after creating a forwarder block joined
5718 by edge FALLTHRU. */
5721 gimple_make_forwarder_block (edge fallthru
)
5725 basic_block dummy
, bb
;
5729 dummy
= fallthru
->src
;
5730 bb
= fallthru
->dest
;
5732 if (single_pred_p (bb
))
5735 /* If we redirected a branch we must create new PHI nodes at the
5737 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5739 gphi
*phi
, *new_phi
;
5742 var
= gimple_phi_result (phi
);
5743 new_phi
= create_phi_node (var
, bb
);
5744 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5745 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5749 /* Add the arguments we have stored on edges. */
5750 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5755 flush_pending_stmts (e
);
5760 /* Return a non-special label in the head of basic block BLOCK.
5761 Create one if it doesn't exist. */
5764 gimple_block_label (basic_block bb
)
5766 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5771 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5773 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5776 label
= gimple_label_label (stmt
);
5777 if (!DECL_NONLOCAL (label
))
5780 gsi_move_before (&i
, &s
);
5785 label
= create_artificial_label (UNKNOWN_LOCATION
);
5786 stmt
= gimple_build_label (label
);
5787 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5792 /* Attempt to perform edge redirection by replacing a possibly complex
5793 jump instruction by a goto or by removing the jump completely.
5794 This can apply only if all edges now point to the same block. The
5795 parameters and return values are equivalent to
5796 redirect_edge_and_branch. */
5799 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5801 basic_block src
= e
->src
;
5802 gimple_stmt_iterator i
;
5805 /* We can replace or remove a complex jump only when we have exactly
5807 if (EDGE_COUNT (src
->succs
) != 2
5808 /* Verify that all targets will be TARGET. Specifically, the
5809 edge that is not E must also go to TARGET. */
5810 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5813 i
= gsi_last_bb (src
);
5817 stmt
= gsi_stmt (i
);
5819 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5821 gsi_remove (&i
, true);
5822 e
= ssa_redirect_edge (e
, target
);
5823 e
->flags
= EDGE_FALLTHRU
;
5831 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5832 edge representing the redirected branch. */
5835 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5837 basic_block bb
= e
->src
;
5838 gimple_stmt_iterator gsi
;
5842 if (e
->flags
& EDGE_ABNORMAL
)
5845 if (e
->dest
== dest
)
5848 if (e
->flags
& EDGE_EH
)
5849 return redirect_eh_edge (e
, dest
);
5851 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5853 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5858 gsi
= gsi_last_bb (bb
);
5859 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5861 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5864 /* For COND_EXPR, we only need to redirect the edge. */
5868 /* No non-abnormal edges should lead from a non-simple goto, and
5869 simple ones should be represented implicitly. */
5874 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5875 tree label
= gimple_block_label (dest
);
5876 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5878 /* If we have a list of cases associated with E, then use it
5879 as it's a lot faster than walking the entire case vector. */
5882 edge e2
= find_edge (e
->src
, dest
);
5889 CASE_LABEL (cases
) = label
;
5890 cases
= CASE_CHAIN (cases
);
5893 /* If there was already an edge in the CFG, then we need
5894 to move all the cases associated with E to E2. */
5897 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5899 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5900 CASE_CHAIN (cases2
) = first
;
5902 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5906 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5908 for (i
= 0; i
< n
; i
++)
5910 tree elt
= gimple_switch_label (switch_stmt
, i
);
5911 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5912 CASE_LABEL (elt
) = label
;
5920 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5921 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5924 for (i
= 0; i
< n
; ++i
)
5926 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5927 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5930 label
= gimple_block_label (dest
);
5931 TREE_VALUE (cons
) = label
;
5935 /* If we didn't find any label matching the former edge in the
5936 asm labels, we must be redirecting the fallthrough
5938 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5943 gsi_remove (&gsi
, true);
5944 e
->flags
|= EDGE_FALLTHRU
;
5947 case GIMPLE_OMP_RETURN
:
5948 case GIMPLE_OMP_CONTINUE
:
5949 case GIMPLE_OMP_SECTIONS_SWITCH
:
5950 case GIMPLE_OMP_FOR
:
5951 /* The edges from OMP constructs can be simply redirected. */
5954 case GIMPLE_EH_DISPATCH
:
5955 if (!(e
->flags
& EDGE_FALLTHRU
))
5956 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5959 case GIMPLE_TRANSACTION
:
5960 if (e
->flags
& EDGE_TM_ABORT
)
5961 gimple_transaction_set_label_over (as_a
<gtransaction
*> (stmt
),
5962 gimple_block_label (dest
));
5963 else if (e
->flags
& EDGE_TM_UNINSTRUMENTED
)
5964 gimple_transaction_set_label_uninst (as_a
<gtransaction
*> (stmt
),
5965 gimple_block_label (dest
));
5967 gimple_transaction_set_label_norm (as_a
<gtransaction
*> (stmt
),
5968 gimple_block_label (dest
));
5972 /* Otherwise it must be a fallthru edge, and we don't need to
5973 do anything besides redirecting it. */
5974 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5978 /* Update/insert PHI nodes as necessary. */
5980 /* Now update the edges in the CFG. */
5981 e
= ssa_redirect_edge (e
, dest
);
5986 /* Returns true if it is possible to remove edge E by redirecting
5987 it to the destination of the other edge from E->src. */
5990 gimple_can_remove_branch_p (const_edge e
)
5992 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5998 /* Simple wrapper, as we can always redirect fallthru edges. */
6001 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
6003 e
= gimple_redirect_edge_and_branch (e
, dest
);
6010 /* Splits basic block BB after statement STMT (but at least after the
6011 labels). If STMT is NULL, BB is split just after the labels. */
6014 gimple_split_block (basic_block bb
, void *stmt
)
6016 gimple_stmt_iterator gsi
;
6017 gimple_stmt_iterator gsi_tgt
;
6023 new_bb
= create_empty_bb (bb
);
6025 /* Redirect the outgoing edges. */
6026 new_bb
->succs
= bb
->succs
;
6028 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
6031 /* Get a stmt iterator pointing to the first stmt to move. */
6032 if (!stmt
|| gimple_code ((gimple
*) stmt
) == GIMPLE_LABEL
)
6033 gsi
= gsi_after_labels (bb
);
6036 gsi
= gsi_for_stmt ((gimple
*) stmt
);
6040 /* Move everything from GSI to the new basic block. */
6041 if (gsi_end_p (gsi
))
6044 /* Split the statement list - avoid re-creating new containers as this
6045 brings ugly quadratic memory consumption in the inliner.
6046 (We are still quadratic since we need to update stmt BB pointers,
6048 gsi_split_seq_before (&gsi
, &list
);
6049 set_bb_seq (new_bb
, list
);
6050 for (gsi_tgt
= gsi_start (list
);
6051 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
6052 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
6058 /* Moves basic block BB after block AFTER. */
6061 gimple_move_block_after (basic_block bb
, basic_block after
)
6063 if (bb
->prev_bb
== after
)
6067 link_block (bb
, after
);
6073 /* Return TRUE if block BB has no executable statements, otherwise return
6077 gimple_empty_block_p (basic_block bb
)
6079 /* BB must have no executable statements. */
6080 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
6083 if (gsi_end_p (gsi
))
6085 if (is_gimple_debug (gsi_stmt (gsi
)))
6086 gsi_next_nondebug (&gsi
);
6087 return gsi_end_p (gsi
);
6091 /* Split a basic block if it ends with a conditional branch and if the
6092 other part of the block is not empty. */
6095 gimple_split_block_before_cond_jump (basic_block bb
)
6097 gimple
*last
, *split_point
;
6098 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
6099 if (gsi_end_p (gsi
))
6101 last
= gsi_stmt (gsi
);
6102 if (gimple_code (last
) != GIMPLE_COND
6103 && gimple_code (last
) != GIMPLE_SWITCH
)
6106 split_point
= gsi_stmt (gsi
);
6107 return split_block (bb
, split_point
)->dest
;
6111 /* Return true if basic_block can be duplicated. */
6114 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
6119 /* Create a duplicate of the basic block BB. NOTE: This does not
6120 preserve SSA form. */
6123 gimple_duplicate_bb (basic_block bb
)
6126 gimple_stmt_iterator gsi_tgt
;
6128 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
6130 /* Copy the PHI nodes. We ignore PHI node arguments here because
6131 the incoming edges have not been setup yet. */
6132 for (gphi_iterator gpi
= gsi_start_phis (bb
);
6138 copy
= create_phi_node (NULL_TREE
, new_bb
);
6139 create_new_def_for (gimple_phi_result (phi
), copy
,
6140 gimple_phi_result_ptr (copy
));
6141 gimple_set_uid (copy
, gimple_uid (phi
));
6144 gsi_tgt
= gsi_start_bb (new_bb
);
6145 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
6149 def_operand_p def_p
;
6150 ssa_op_iter op_iter
;
6152 gimple
*stmt
, *copy
;
6154 stmt
= gsi_stmt (gsi
);
6155 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6158 /* Don't duplicate label debug stmts. */
6159 if (gimple_debug_bind_p (stmt
)
6160 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
6164 /* Create a new copy of STMT and duplicate STMT's virtual
6166 copy
= gimple_copy (stmt
);
6167 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
6169 maybe_duplicate_eh_stmt (copy
, stmt
);
6170 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
6172 /* When copying around a stmt writing into a local non-user
6173 aggregate, make sure it won't share stack slot with other
6175 lhs
= gimple_get_lhs (stmt
);
6176 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
6178 tree base
= get_base_address (lhs
);
6180 && (VAR_P (base
) || TREE_CODE (base
) == RESULT_DECL
)
6181 && DECL_IGNORED_P (base
)
6182 && !TREE_STATIC (base
)
6183 && !DECL_EXTERNAL (base
)
6184 && (!VAR_P (base
) || !DECL_HAS_VALUE_EXPR_P (base
)))
6185 DECL_NONSHAREABLE (base
) = 1;
6188 /* Create new names for all the definitions created by COPY and
6189 add replacement mappings for each new name. */
6190 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
6191 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
6197 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6200 add_phi_args_after_copy_edge (edge e_copy
)
6202 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
6205 gphi
*phi
, *phi_copy
;
6207 gphi_iterator psi
, psi_copy
;
6209 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
6212 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
6214 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
6215 dest
= get_bb_original (e_copy
->dest
);
6217 dest
= e_copy
->dest
;
6219 e
= find_edge (bb
, dest
);
6222 /* During loop unrolling the target of the latch edge is copied.
6223 In this case we are not looking for edge to dest, but to
6224 duplicated block whose original was dest. */
6225 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6227 if ((e
->dest
->flags
& BB_DUPLICATED
)
6228 && get_bb_original (e
->dest
) == dest
)
6232 gcc_assert (e
!= NULL
);
6235 for (psi
= gsi_start_phis (e
->dest
),
6236 psi_copy
= gsi_start_phis (e_copy
->dest
);
6238 gsi_next (&psi
), gsi_next (&psi_copy
))
6241 phi_copy
= psi_copy
.phi ();
6242 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
6243 add_phi_arg (phi_copy
, def
, e_copy
,
6244 gimple_phi_arg_location_from_edge (phi
, e
));
6249 /* Basic block BB_COPY was created by code duplication. Add phi node
6250 arguments for edges going out of BB_COPY. The blocks that were
6251 duplicated have BB_DUPLICATED set. */
6254 add_phi_args_after_copy_bb (basic_block bb_copy
)
6259 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
6261 add_phi_args_after_copy_edge (e_copy
);
6265 /* Blocks in REGION_COPY array of length N_REGION were created by
6266 duplication of basic blocks. Add phi node arguments for edges
6267 going from these blocks. If E_COPY is not NULL, also add
6268 phi node arguments for its destination.*/
6271 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
6276 for (i
= 0; i
< n_region
; i
++)
6277 region_copy
[i
]->flags
|= BB_DUPLICATED
;
6279 for (i
= 0; i
< n_region
; i
++)
6280 add_phi_args_after_copy_bb (region_copy
[i
]);
6282 add_phi_args_after_copy_edge (e_copy
);
6284 for (i
= 0; i
< n_region
; i
++)
6285 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
6288 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6289 important exit edge EXIT. By important we mean that no SSA name defined
6290 inside region is live over the other exit edges of the region. All entry
6291 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6292 to the duplicate of the region. Dominance and loop information is
6293 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6294 UPDATE_DOMINANCE is false then we assume that the caller will update the
6295 dominance information after calling this function. The new basic
6296 blocks are stored to REGION_COPY in the same order as they had in REGION,
6297 provided that REGION_COPY is not NULL.
6298 The function returns false if it is unable to copy the region,
6302 gimple_duplicate_sese_region (edge entry
, edge exit
,
6303 basic_block
*region
, unsigned n_region
,
6304 basic_block
*region_copy
,
6305 bool update_dominance
)
6308 bool free_region_copy
= false, copying_header
= false;
6309 struct loop
*loop
= entry
->dest
->loop_father
;
6311 vec
<basic_block
> doms
;
6313 int total_freq
= 0, entry_freq
= 0;
6314 profile_count total_count
= profile_count::uninitialized ();
6315 profile_count entry_count
= profile_count::uninitialized ();
6317 if (!can_copy_bbs_p (region
, n_region
))
6320 /* Some sanity checking. Note that we do not check for all possible
6321 missuses of the functions. I.e. if you ask to copy something weird,
6322 it will work, but the state of structures probably will not be
6324 for (i
= 0; i
< n_region
; i
++)
6326 /* We do not handle subloops, i.e. all the blocks must belong to the
6328 if (region
[i
]->loop_father
!= loop
)
6331 if (region
[i
] != entry
->dest
6332 && region
[i
] == loop
->header
)
6336 /* In case the function is used for loop header copying (which is the primary
6337 use), ensure that EXIT and its copy will be new latch and entry edges. */
6338 if (loop
->header
== entry
->dest
)
6340 copying_header
= true;
6342 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6345 for (i
= 0; i
< n_region
; i
++)
6346 if (region
[i
] != exit
->src
6347 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6351 initialize_original_copy_tables ();
6354 set_loop_copy (loop
, loop_outer (loop
));
6356 set_loop_copy (loop
, loop
);
6360 region_copy
= XNEWVEC (basic_block
, n_region
);
6361 free_region_copy
= true;
6364 /* Record blocks outside the region that are dominated by something
6366 if (update_dominance
)
6369 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6372 if (entry
->dest
->count
.initialized_p ())
6374 total_count
= entry
->dest
->count
;
6375 entry_count
= entry
->count
;
6376 /* Fix up corner cases, to avoid division by zero or creation of negative
6378 if (entry_count
> total_count
)
6379 entry_count
= total_count
;
6381 if (!(total_count
> 0) || !(entry_count
> 0))
6383 total_freq
= entry
->dest
->frequency
;
6384 entry_freq
= EDGE_FREQUENCY (entry
);
6385 /* Fix up corner cases, to avoid division by zero or creation of negative
6387 if (total_freq
== 0)
6389 else if (entry_freq
> total_freq
)
6390 entry_freq
= total_freq
;
6393 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6394 split_edge_bb_loc (entry
), update_dominance
);
6395 if (total_count
> 0 && entry_count
> 0)
6397 scale_bbs_frequencies_profile_count (region
, n_region
,
6398 total_count
- entry_count
,
6400 scale_bbs_frequencies_profile_count (region_copy
, n_region
, entry_count
,
6405 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6407 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6412 loop
->header
= exit
->dest
;
6413 loop
->latch
= exit
->src
;
6416 /* Redirect the entry and add the phi node arguments. */
6417 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6418 gcc_assert (redirected
!= NULL
);
6419 flush_pending_stmts (entry
);
6421 /* Concerning updating of dominators: We must recount dominators
6422 for entry block and its copy. Anything that is outside of the
6423 region, but was dominated by something inside needs recounting as
6425 if (update_dominance
)
6427 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6428 doms
.safe_push (get_bb_original (entry
->dest
));
6429 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6433 /* Add the other PHI node arguments. */
6434 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6436 if (free_region_copy
)
6439 free_original_copy_tables ();
6443 /* Checks if BB is part of the region defined by N_REGION BBS. */
6445 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6449 for (n
= 0; n
< n_region
; n
++)
6457 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6458 are stored to REGION_COPY in the same order in that they appear
6459 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6460 the region, EXIT an exit from it. The condition guarding EXIT
6461 is moved to ENTRY. Returns true if duplication succeeds, false
6487 gimple_duplicate_sese_tail (edge entry
, edge exit
,
6488 basic_block
*region
, unsigned n_region
,
6489 basic_block
*region_copy
)
6492 bool free_region_copy
= false;
6493 struct loop
*loop
= exit
->dest
->loop_father
;
6494 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6495 basic_block switch_bb
, entry_bb
, nentry_bb
;
6496 vec
<basic_block
> doms
;
6497 int total_freq
= 0, exit_freq
= 0;
6498 profile_count total_count
= profile_count::uninitialized (),
6499 exit_count
= profile_count::uninitialized ();
6500 edge exits
[2], nexits
[2], e
;
6501 gimple_stmt_iterator gsi
;
6504 basic_block exit_bb
;
6508 struct loop
*target
, *aloop
, *cloop
;
6510 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6512 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6514 if (!can_copy_bbs_p (region
, n_region
))
6517 initialize_original_copy_tables ();
6518 set_loop_copy (orig_loop
, loop
);
6521 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6523 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6525 cloop
= duplicate_loop (aloop
, target
);
6526 duplicate_subloops (aloop
, cloop
);
6532 region_copy
= XNEWVEC (basic_block
, n_region
);
6533 free_region_copy
= true;
6536 gcc_assert (!need_ssa_update_p (cfun
));
6538 /* Record blocks outside the region that are dominated by something
6540 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6542 if (exit
->src
->count
> 0)
6544 total_count
= exit
->src
->count
;
6545 exit_count
= exit
->count
;
6546 /* Fix up corner cases, to avoid division by zero or creation of negative
6548 if (exit_count
> total_count
)
6549 exit_count
= total_count
;
6553 total_freq
= exit
->src
->frequency
;
6554 exit_freq
= EDGE_FREQUENCY (exit
);
6555 /* Fix up corner cases, to avoid division by zero or creation of negative
6557 if (total_freq
== 0)
6559 if (exit_freq
> total_freq
)
6560 exit_freq
= total_freq
;
6563 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6564 split_edge_bb_loc (exit
), true);
6565 if (total_count
.initialized_p ())
6567 scale_bbs_frequencies_profile_count (region
, n_region
,
6568 total_count
- exit_count
,
6570 scale_bbs_frequencies_profile_count (region_copy
, n_region
, exit_count
,
6575 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6577 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6580 /* Create the switch block, and put the exit condition to it. */
6581 entry_bb
= entry
->dest
;
6582 nentry_bb
= get_bb_copy (entry_bb
);
6583 if (!last_stmt (entry
->src
)
6584 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6585 switch_bb
= entry
->src
;
6587 switch_bb
= split_edge (entry
);
6588 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6590 gsi
= gsi_last_bb (switch_bb
);
6591 cond_stmt
= last_stmt (exit
->src
);
6592 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6593 cond_stmt
= gimple_copy (cond_stmt
);
6595 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6597 sorig
= single_succ_edge (switch_bb
);
6598 sorig
->flags
= exits
[1]->flags
;
6599 sorig
->probability
= exits
[1]->probability
;
6600 sorig
->count
= exits
[1]->count
;
6601 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6602 snew
->probability
= exits
[0]->probability
;
6603 snew
->count
= exits
[1]->count
;
6606 /* Register the new edge from SWITCH_BB in loop exit lists. */
6607 rescan_loop_exit (snew
, true, false);
6609 /* Add the PHI node arguments. */
6610 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6612 /* Get rid of now superfluous conditions and associated edges (and phi node
6614 exit_bb
= exit
->dest
;
6616 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6617 PENDING_STMT (e
) = NULL
;
6619 /* The latch of ORIG_LOOP was copied, and so was the backedge
6620 to the original header. We redirect this backedge to EXIT_BB. */
6621 for (i
= 0; i
< n_region
; i
++)
6622 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6624 gcc_assert (single_succ_edge (region_copy
[i
]));
6625 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6626 PENDING_STMT (e
) = NULL
;
6627 for (psi
= gsi_start_phis (exit_bb
);
6632 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6633 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6636 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6637 PENDING_STMT (e
) = NULL
;
6639 /* Anything that is outside of the region, but was dominated by something
6640 inside needs to update dominance info. */
6641 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6643 /* Update the SSA web. */
6644 update_ssa (TODO_update_ssa
);
6646 if (free_region_copy
)
6649 free_original_copy_tables ();
6653 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6654 adding blocks when the dominator traversal reaches EXIT. This
6655 function silently assumes that ENTRY strictly dominates EXIT. */
6658 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6659 vec
<basic_block
> *bbs_p
)
6663 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6665 son
= next_dom_son (CDI_DOMINATORS
, son
))
6667 bbs_p
->safe_push (son
);
6669 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6673 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6674 The duplicates are recorded in VARS_MAP. */
6677 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6680 tree t
= *tp
, new_t
;
6681 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6683 if (DECL_CONTEXT (t
) == to_context
)
6687 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6693 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6694 add_local_decl (f
, new_t
);
6698 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6699 new_t
= copy_node (t
);
6701 DECL_CONTEXT (new_t
) = to_context
;
6712 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6713 VARS_MAP maps old ssa names and var_decls to the new ones. */
6716 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6721 gcc_assert (!virtual_operand_p (name
));
6723 tree
*loc
= vars_map
->get (name
);
6727 tree decl
= SSA_NAME_VAR (name
);
6730 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name
));
6731 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6732 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6733 decl
, SSA_NAME_DEF_STMT (name
));
6736 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6737 name
, SSA_NAME_DEF_STMT (name
));
6739 /* Now that we've used the def stmt to define new_name, make sure it
6740 doesn't define name anymore. */
6741 SSA_NAME_DEF_STMT (name
) = NULL
;
6743 vars_map
->put (name
, new_name
);
6757 hash_map
<tree
, tree
> *vars_map
;
6758 htab_t new_label_map
;
6759 hash_map
<void *, void *> *eh_map
;
6763 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6764 contained in *TP if it has been ORIG_BLOCK previously and change the
6765 DECL_CONTEXT of every local variable referenced in *TP. */
6768 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6770 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6771 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6776 tree block
= TREE_BLOCK (t
);
6777 if (block
== NULL_TREE
)
6779 else if (block
== p
->orig_block
6780 || p
->orig_block
== NULL_TREE
)
6781 TREE_SET_BLOCK (t
, p
->new_block
);
6782 else if (flag_checking
)
6784 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6785 block
= BLOCK_SUPERCONTEXT (block
);
6786 gcc_assert (block
== p
->orig_block
);
6789 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6791 if (TREE_CODE (t
) == SSA_NAME
)
6792 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6793 else if (TREE_CODE (t
) == PARM_DECL
6794 && gimple_in_ssa_p (cfun
))
6795 *tp
= *(p
->vars_map
->get (t
));
6796 else if (TREE_CODE (t
) == LABEL_DECL
)
6798 if (p
->new_label_map
)
6800 struct tree_map in
, *out
;
6802 out
= (struct tree_map
*)
6803 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6808 /* For FORCED_LABELs we can end up with references from other
6809 functions if some SESE regions are outlined. It is UB to
6810 jump in between them, but they could be used just for printing
6811 addresses etc. In that case, DECL_CONTEXT on the label should
6812 be the function containing the glabel stmt with that LABEL_DECL,
6813 rather than whatever function a reference to the label was seen
6815 if (!FORCED_LABEL (t
) && !DECL_NONLOCAL (t
))
6816 DECL_CONTEXT (t
) = p
->to_context
;
6818 else if (p
->remap_decls_p
)
6820 /* Replace T with its duplicate. T should no longer appear in the
6821 parent function, so this looks wasteful; however, it may appear
6822 in referenced_vars, and more importantly, as virtual operands of
6823 statements, and in alias lists of other variables. It would be
6824 quite difficult to expunge it from all those places. ??? It might
6825 suffice to do this for addressable variables. */
6826 if ((VAR_P (t
) && !is_global_var (t
))
6827 || TREE_CODE (t
) == CONST_DECL
)
6828 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6832 else if (TYPE_P (t
))
6838 /* Helper for move_stmt_r. Given an EH region number for the source
6839 function, map that to the duplicate EH regio number in the dest. */
6842 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6844 eh_region old_r
, new_r
;
6846 old_r
= get_eh_region_from_number (old_nr
);
6847 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6849 return new_r
->index
;
6852 /* Similar, but operate on INTEGER_CSTs. */
6855 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6859 old_nr
= tree_to_shwi (old_t_nr
);
6860 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6862 return build_int_cst (integer_type_node
, new_nr
);
6865 /* Like move_stmt_op, but for gimple statements.
6867 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6868 contained in the current statement in *GSI_P and change the
6869 DECL_CONTEXT of every local variable referenced in the current
6873 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6874 struct walk_stmt_info
*wi
)
6876 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6877 gimple
*stmt
= gsi_stmt (*gsi_p
);
6878 tree block
= gimple_block (stmt
);
6880 if (block
== p
->orig_block
6881 || (p
->orig_block
== NULL_TREE
6882 && block
!= NULL_TREE
))
6883 gimple_set_block (stmt
, p
->new_block
);
6885 switch (gimple_code (stmt
))
6888 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6890 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6891 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6892 switch (DECL_FUNCTION_CODE (fndecl
))
6894 case BUILT_IN_EH_COPY_VALUES
:
6895 r
= gimple_call_arg (stmt
, 1);
6896 r
= move_stmt_eh_region_tree_nr (r
, p
);
6897 gimple_call_set_arg (stmt
, 1, r
);
6900 case BUILT_IN_EH_POINTER
:
6901 case BUILT_IN_EH_FILTER
:
6902 r
= gimple_call_arg (stmt
, 0);
6903 r
= move_stmt_eh_region_tree_nr (r
, p
);
6904 gimple_call_set_arg (stmt
, 0, r
);
6915 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6916 int r
= gimple_resx_region (resx_stmt
);
6917 r
= move_stmt_eh_region_nr (r
, p
);
6918 gimple_resx_set_region (resx_stmt
, r
);
6922 case GIMPLE_EH_DISPATCH
:
6924 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6925 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6926 r
= move_stmt_eh_region_nr (r
, p
);
6927 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6931 case GIMPLE_OMP_RETURN
:
6932 case GIMPLE_OMP_CONTINUE
:
6937 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6938 so that such labels can be referenced from other regions.
6939 Make sure to update it when seeing a GIMPLE_LABEL though,
6940 that is the owner of the label. */
6941 walk_gimple_op (stmt
, move_stmt_op
, wi
);
6942 *handled_ops_p
= true;
6943 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
6944 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
6945 DECL_CONTEXT (label
) = p
->to_context
;
6950 if (is_gimple_omp (stmt
))
6952 /* Do not remap variables inside OMP directives. Variables
6953 referenced in clauses and directive header belong to the
6954 parent function and should not be moved into the child
6956 bool save_remap_decls_p
= p
->remap_decls_p
;
6957 p
->remap_decls_p
= false;
6958 *handled_ops_p
= true;
6960 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6963 p
->remap_decls_p
= save_remap_decls_p
;
6971 /* Move basic block BB from function CFUN to function DEST_FN. The
6972 block is moved out of the original linked list and placed after
6973 block AFTER in the new list. Also, the block is removed from the
6974 original array of blocks and placed in DEST_FN's array of blocks.
6975 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6976 updated to reflect the moved edges.
6978 The local variables are remapped to new instances, VARS_MAP is used
6979 to record the mapping. */
6982 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6983 basic_block after
, bool update_edge_count_p
,
6984 struct move_stmt_d
*d
)
6986 struct control_flow_graph
*cfg
;
6989 gimple_stmt_iterator si
;
6990 unsigned old_len
, new_len
;
6992 /* Remove BB from dominance structures. */
6993 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6995 /* Move BB from its current loop to the copy in the new function. */
6998 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
7000 bb
->loop_father
= new_loop
;
7003 /* Link BB to the new linked list. */
7004 move_block_after (bb
, after
);
7006 /* Update the edge count in the corresponding flowgraphs. */
7007 if (update_edge_count_p
)
7008 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7010 cfun
->cfg
->x_n_edges
--;
7011 dest_cfun
->cfg
->x_n_edges
++;
7014 /* Remove BB from the original basic block array. */
7015 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
7016 cfun
->cfg
->x_n_basic_blocks
--;
7018 /* Grow DEST_CFUN's basic block array if needed. */
7019 cfg
= dest_cfun
->cfg
;
7020 cfg
->x_n_basic_blocks
++;
7021 if (bb
->index
>= cfg
->x_last_basic_block
)
7022 cfg
->x_last_basic_block
= bb
->index
+ 1;
7024 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
7025 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
7027 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
7028 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
7031 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
7033 /* Remap the variables in phi nodes. */
7034 for (gphi_iterator psi
= gsi_start_phis (bb
);
7037 gphi
*phi
= psi
.phi ();
7039 tree op
= PHI_RESULT (phi
);
7043 if (virtual_operand_p (op
))
7045 /* Remove the phi nodes for virtual operands (alias analysis will be
7046 run for the new function, anyway). */
7047 remove_phi_node (&psi
, true);
7051 SET_PHI_RESULT (phi
,
7052 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7053 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
7055 op
= USE_FROM_PTR (use
);
7056 if (TREE_CODE (op
) == SSA_NAME
)
7057 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
7060 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
7062 location_t locus
= gimple_phi_arg_location (phi
, i
);
7063 tree block
= LOCATION_BLOCK (locus
);
7065 if (locus
== UNKNOWN_LOCATION
)
7067 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
7069 locus
= set_block (locus
, d
->new_block
);
7070 gimple_phi_arg_set_location (phi
, i
, locus
);
7077 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7079 gimple
*stmt
= gsi_stmt (si
);
7080 struct walk_stmt_info wi
;
7082 memset (&wi
, 0, sizeof (wi
));
7084 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
7086 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
7088 tree label
= gimple_label_label (label_stmt
);
7089 int uid
= LABEL_DECL_UID (label
);
7091 gcc_assert (uid
> -1);
7093 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
7094 if (old_len
<= (unsigned) uid
)
7096 new_len
= 3 * uid
/ 2 + 1;
7097 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
7100 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
7101 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
7103 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
7105 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
7106 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
7109 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
7110 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
7112 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
7113 gimple_remove_stmt_histograms (cfun
, stmt
);
7115 /* We cannot leave any operands allocated from the operand caches of
7116 the current function. */
7117 free_stmt_operands (cfun
, stmt
);
7118 push_cfun (dest_cfun
);
7123 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7124 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
7126 tree block
= LOCATION_BLOCK (e
->goto_locus
);
7127 if (d
->orig_block
== NULL_TREE
7128 || block
== d
->orig_block
)
7129 e
->goto_locus
= set_block (e
->goto_locus
, d
->new_block
);
7133 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7134 the outermost EH region. Use REGION as the incoming base EH region. */
7137 find_outermost_region_in_block (struct function
*src_cfun
,
7138 basic_block bb
, eh_region region
)
7140 gimple_stmt_iterator si
;
7142 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
7144 gimple
*stmt
= gsi_stmt (si
);
7145 eh_region stmt_region
;
7148 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
7149 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
7153 region
= stmt_region
;
7154 else if (stmt_region
!= region
)
7156 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
7157 gcc_assert (region
!= NULL
);
7166 new_label_mapper (tree decl
, void *data
)
7168 htab_t hash
= (htab_t
) data
;
7172 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
7174 m
= XNEW (struct tree_map
);
7175 m
->hash
= DECL_UID (decl
);
7176 m
->base
.from
= decl
;
7177 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
7178 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
7179 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
7180 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
7182 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
7183 gcc_assert (*slot
== NULL
);
7190 /* Tree walker to replace the decls used inside value expressions by
7194 replace_block_vars_by_duplicates_1 (tree
*tp
, int *walk_subtrees
, void *data
)
7196 struct replace_decls_d
*rd
= (struct replace_decls_d
*)data
;
7198 switch (TREE_CODE (*tp
))
7203 replace_by_duplicate_decl (tp
, rd
->vars_map
, rd
->to_context
);
7209 if (IS_TYPE_OR_DECL_P (*tp
))
7210 *walk_subtrees
= false;
7215 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7219 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
7224 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
7227 if (!VAR_P (t
) && TREE_CODE (t
) != CONST_DECL
)
7229 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
7232 if (VAR_P (*tp
) && DECL_HAS_VALUE_EXPR_P (*tp
))
7234 tree x
= DECL_VALUE_EXPR (*tp
);
7235 struct replace_decls_d rd
= { vars_map
, to_context
};
7237 walk_tree (&x
, replace_block_vars_by_duplicates_1
, &rd
, NULL
);
7238 SET_DECL_VALUE_EXPR (t
, x
);
7239 DECL_HAS_VALUE_EXPR_P (t
) = 1;
7241 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
7246 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
7247 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
7250 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7254 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
7257 /* Discard it from the old loop array. */
7258 (*get_loops (fn1
))[loop
->num
] = NULL
;
7260 /* Place it in the new loop array, assigning it a new number. */
7261 loop
->num
= number_of_loops (fn2
);
7262 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
7264 /* Recurse to children. */
7265 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
7266 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
7269 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7270 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7273 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
7278 bitmap bbs
= BITMAP_ALLOC (NULL
);
7281 gcc_assert (entry
!= NULL
);
7282 gcc_assert (entry
!= exit
);
7283 gcc_assert (bbs_p
!= NULL
);
7285 gcc_assert (bbs_p
->length () > 0);
7287 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7288 bitmap_set_bit (bbs
, bb
->index
);
7290 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
7291 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
7293 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
7297 gcc_assert (single_pred_p (entry
));
7298 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
7301 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
7304 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
7309 gcc_assert (single_succ_p (exit
));
7310 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
7313 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
7316 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
7323 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7326 gather_ssa_name_hash_map_from (tree
const &from
, tree
const &, void *data
)
7328 bitmap release_names
= (bitmap
)data
;
7330 if (TREE_CODE (from
) != SSA_NAME
)
7333 bitmap_set_bit (release_names
, SSA_NAME_VERSION (from
));
7337 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7338 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7339 single basic block in the original CFG and the new basic block is
7340 returned. DEST_CFUN must not have a CFG yet.
7342 Note that the region need not be a pure SESE region. Blocks inside
7343 the region may contain calls to abort/exit. The only restriction
7344 is that ENTRY_BB should be the only entry point and it must
7347 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7348 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7349 to the new function.
7351 All local variables referenced in the region are assumed to be in
7352 the corresponding BLOCK_VARS and unexpanded variable lists
7353 associated with DEST_CFUN.
7355 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7356 reimplement move_sese_region_to_fn by duplicating the region rather than
7360 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
7361 basic_block exit_bb
, tree orig_block
)
7363 vec
<basic_block
> bbs
, dom_bbs
;
7364 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
7365 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
7366 struct function
*saved_cfun
= cfun
;
7367 int *entry_flag
, *exit_flag
;
7368 profile_probability
*entry_prob
, *exit_prob
;
7369 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
7372 htab_t new_label_map
;
7373 hash_map
<void *, void *> *eh_map
;
7374 struct loop
*loop
= entry_bb
->loop_father
;
7375 struct loop
*loop0
= get_loop (saved_cfun
, 0);
7376 struct move_stmt_d d
;
7378 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7380 gcc_assert (entry_bb
!= exit_bb
7382 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
7384 /* Collect all the blocks in the region. Manually add ENTRY_BB
7385 because it won't be added by dfs_enumerate_from. */
7387 bbs
.safe_push (entry_bb
);
7388 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
7391 verify_sese (entry_bb
, exit_bb
, &bbs
);
7393 /* The blocks that used to be dominated by something in BBS will now be
7394 dominated by the new block. */
7395 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
7399 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7400 the predecessor edges to ENTRY_BB and the successor edges to
7401 EXIT_BB so that we can re-attach them to the new basic block that
7402 will replace the region. */
7403 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
7404 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
7405 entry_flag
= XNEWVEC (int, num_entry_edges
);
7406 entry_prob
= XNEWVEC (profile_probability
, num_entry_edges
);
7408 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7410 entry_prob
[i
] = e
->probability
;
7411 entry_flag
[i
] = e
->flags
;
7412 entry_pred
[i
++] = e
->src
;
7418 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7419 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7420 exit_flag
= XNEWVEC (int, num_exit_edges
);
7421 exit_prob
= XNEWVEC (profile_probability
, num_exit_edges
);
7423 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7425 exit_prob
[i
] = e
->probability
;
7426 exit_flag
[i
] = e
->flags
;
7427 exit_succ
[i
++] = e
->dest
;
7439 /* Switch context to the child function to initialize DEST_FN's CFG. */
7440 gcc_assert (dest_cfun
->cfg
== NULL
);
7441 push_cfun (dest_cfun
);
7443 init_empty_tree_cfg ();
7445 /* Initialize EH information for the new function. */
7447 new_label_map
= NULL
;
7450 eh_region region
= NULL
;
7452 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7453 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7455 init_eh_for_function ();
7458 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7459 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7460 new_label_mapper
, new_label_map
);
7464 /* Initialize an empty loop tree. */
7465 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7466 init_loops_structure (dest_cfun
, loops
, 1);
7467 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7468 set_loops_for_fn (dest_cfun
, loops
);
7470 /* Move the outlined loop tree part. */
7471 num_nodes
= bbs
.length ();
7472 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7474 if (bb
->loop_father
->header
== bb
)
7476 struct loop
*this_loop
= bb
->loop_father
;
7477 struct loop
*outer
= loop_outer (this_loop
);
7479 /* If the SESE region contains some bbs ending with
7480 a noreturn call, those are considered to belong
7481 to the outermost loop in saved_cfun, rather than
7482 the entry_bb's loop_father. */
7486 num_nodes
-= this_loop
->num_nodes
;
7487 flow_loop_tree_node_remove (bb
->loop_father
);
7488 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7489 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7492 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7495 /* Remove loop exits from the outlined region. */
7496 if (loops_for_fn (saved_cfun
)->exits
)
7497 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7499 struct loops
*l
= loops_for_fn (saved_cfun
);
7501 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7504 l
->exits
->clear_slot (slot
);
7509 /* Adjust the number of blocks in the tree root of the outlined part. */
7510 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7512 /* Setup a mapping to be used by move_block_to_fn. */
7513 loop
->aux
= current_loops
->tree_root
;
7514 loop0
->aux
= current_loops
->tree_root
;
7518 /* Move blocks from BBS into DEST_CFUN. */
7519 gcc_assert (bbs
.length () >= 2);
7520 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7521 hash_map
<tree
, tree
> vars_map
;
7523 memset (&d
, 0, sizeof (d
));
7524 d
.orig_block
= orig_block
;
7525 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7526 d
.from_context
= cfun
->decl
;
7527 d
.to_context
= dest_cfun
->decl
;
7528 d
.vars_map
= &vars_map
;
7529 d
.new_label_map
= new_label_map
;
7531 d
.remap_decls_p
= true;
7533 if (gimple_in_ssa_p (cfun
))
7534 for (tree arg
= DECL_ARGUMENTS (d
.to_context
); arg
; arg
= DECL_CHAIN (arg
))
7536 tree narg
= make_ssa_name_fn (dest_cfun
, arg
, gimple_build_nop ());
7537 set_ssa_default_def (dest_cfun
, arg
, narg
);
7538 vars_map
.put (arg
, narg
);
7541 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7543 /* No need to update edge counts on the last block. It has
7544 already been updated earlier when we detached the region from
7545 the original CFG. */
7546 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7552 /* Loop sizes are no longer correct, fix them up. */
7553 loop
->num_nodes
-= num_nodes
;
7554 for (struct loop
*outer
= loop_outer (loop
);
7555 outer
; outer
= loop_outer (outer
))
7556 outer
->num_nodes
-= num_nodes
;
7557 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7559 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7562 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7567 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7569 dest_cfun
->has_simduid_loops
= true;
7571 if (aloop
->force_vectorize
)
7572 dest_cfun
->has_force_vectorize_loops
= true;
7576 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7580 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7582 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7583 = BLOCK_SUBBLOCKS (orig_block
);
7584 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7585 block
; block
= BLOCK_CHAIN (block
))
7586 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7587 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7590 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7591 &vars_map
, dest_cfun
->decl
);
7594 htab_delete (new_label_map
);
7598 if (gimple_in_ssa_p (cfun
))
7600 /* We need to release ssa-names in a defined order, so first find them,
7601 and then iterate in ascending version order. */
7602 bitmap release_names
= BITMAP_ALLOC (NULL
);
7603 vars_map
.traverse
<void *, gather_ssa_name_hash_map_from
> (release_names
);
7606 EXECUTE_IF_SET_IN_BITMAP (release_names
, 0, i
, bi
)
7607 release_ssa_name (ssa_name (i
));
7608 BITMAP_FREE (release_names
);
7611 /* Rewire the entry and exit blocks. The successor to the entry
7612 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7613 the child function. Similarly, the predecessor of DEST_FN's
7614 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7615 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7616 various CFG manipulation function get to the right CFG.
7618 FIXME, this is silly. The CFG ought to become a parameter to
7620 push_cfun (dest_cfun
);
7621 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7623 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7626 /* Back in the original function, the SESE region has disappeared,
7627 create a new basic block in its place. */
7628 bb
= create_empty_bb (entry_pred
[0]);
7630 add_bb_to_loop (bb
, loop
);
7631 for (i
= 0; i
< num_entry_edges
; i
++)
7633 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7634 e
->probability
= entry_prob
[i
];
7637 for (i
= 0; i
< num_exit_edges
; i
++)
7639 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7640 e
->probability
= exit_prob
[i
];
7643 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7644 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7645 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7662 /* Dump default def DEF to file FILE using FLAGS and indentation
7666 dump_default_def (FILE *file
, tree def
, int spc
, dump_flags_t flags
)
7668 for (int i
= 0; i
< spc
; ++i
)
7669 fprintf (file
, " ");
7670 dump_ssaname_info_to_file (file
, def
, spc
);
7672 print_generic_expr (file
, TREE_TYPE (def
), flags
);
7673 fprintf (file
, " ");
7674 print_generic_expr (file
, def
, flags
);
7675 fprintf (file
, " = ");
7676 print_generic_expr (file
, SSA_NAME_VAR (def
), flags
);
7677 fprintf (file
, ";\n");
7680 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7683 print_no_sanitize_attr_value (FILE *file
, tree value
)
7685 unsigned int flags
= tree_to_uhwi (value
);
7687 for (int i
= 0; sanitizer_opts
[i
].name
!= NULL
; ++i
)
7689 if ((sanitizer_opts
[i
].flag
& flags
) == sanitizer_opts
[i
].flag
)
7692 fprintf (file
, " | ");
7693 fprintf (file
, "%s", sanitizer_opts
[i
].name
);
7699 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7703 dump_function_to_file (tree fndecl
, FILE *file
, dump_flags_t flags
)
7705 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7706 struct function
*dsf
;
7707 bool ignore_topmost_bind
= false, any_var
= false;
7710 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7711 && decl_is_tm_clone (fndecl
));
7712 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7714 if (DECL_ATTRIBUTES (fndecl
) != NULL_TREE
)
7716 fprintf (file
, "__attribute__((");
7720 for (chain
= DECL_ATTRIBUTES (fndecl
); chain
;
7721 first
= false, chain
= TREE_CHAIN (chain
))
7724 fprintf (file
, ", ");
7726 tree name
= get_attribute_name (chain
);
7727 print_generic_expr (file
, name
, dump_flags
);
7728 if (TREE_VALUE (chain
) != NULL_TREE
)
7730 fprintf (file
, " (");
7732 if (strstr (IDENTIFIER_POINTER (name
), "no_sanitize"))
7733 print_no_sanitize_attr_value (file
, TREE_VALUE (chain
));
7735 print_generic_expr (file
, TREE_VALUE (chain
), dump_flags
);
7736 fprintf (file
, ")");
7740 fprintf (file
, "))\n");
7743 current_function_decl
= fndecl
;
7744 if (flags
& TDF_GIMPLE
)
7746 print_generic_expr (file
, TREE_TYPE (TREE_TYPE (fndecl
)),
7747 dump_flags
| TDF_SLIM
);
7748 fprintf (file
, " __GIMPLE ()\n%s (", function_name (fun
));
7751 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7753 arg
= DECL_ARGUMENTS (fndecl
);
7756 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7757 fprintf (file
, " ");
7758 print_generic_expr (file
, arg
, dump_flags
);
7759 if (DECL_CHAIN (arg
))
7760 fprintf (file
, ", ");
7761 arg
= DECL_CHAIN (arg
);
7763 fprintf (file
, ")\n");
7765 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7766 if (dsf
&& (flags
& TDF_EH
))
7767 dump_eh_tree (file
, dsf
);
7769 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7771 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7772 current_function_decl
= old_current_fndecl
;
7776 /* When GIMPLE is lowered, the variables are no longer available in
7777 BIND_EXPRs, so display them separately. */
7778 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7781 ignore_topmost_bind
= true;
7783 fprintf (file
, "{\n");
7784 if (gimple_in_ssa_p (fun
)
7785 && (flags
& TDF_ALIAS
))
7787 for (arg
= DECL_ARGUMENTS (fndecl
); arg
!= NULL
;
7788 arg
= DECL_CHAIN (arg
))
7790 tree def
= ssa_default_def (fun
, arg
);
7792 dump_default_def (file
, def
, 2, flags
);
7795 tree res
= DECL_RESULT (fun
->decl
);
7796 if (res
!= NULL_TREE
7797 && DECL_BY_REFERENCE (res
))
7799 tree def
= ssa_default_def (fun
, res
);
7801 dump_default_def (file
, def
, 2, flags
);
7804 tree static_chain
= fun
->static_chain_decl
;
7805 if (static_chain
!= NULL_TREE
)
7807 tree def
= ssa_default_def (fun
, static_chain
);
7809 dump_default_def (file
, def
, 2, flags
);
7813 if (!vec_safe_is_empty (fun
->local_decls
))
7814 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7816 print_generic_decl (file
, var
, flags
);
7817 fprintf (file
, "\n");
7824 if (gimple_in_ssa_p (cfun
))
7825 FOR_EACH_SSA_NAME (ix
, name
, cfun
)
7827 if (!SSA_NAME_VAR (name
))
7829 fprintf (file
, " ");
7830 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7831 fprintf (file
, " ");
7832 print_generic_expr (file
, name
, flags
);
7833 fprintf (file
, ";\n");
7840 if (fun
&& fun
->decl
== fndecl
7842 && basic_block_info_for_fn (fun
))
7844 /* If the CFG has been built, emit a CFG-based dump. */
7845 if (!ignore_topmost_bind
)
7846 fprintf (file
, "{\n");
7848 if (any_var
&& n_basic_blocks_for_fn (fun
))
7849 fprintf (file
, "\n");
7851 FOR_EACH_BB_FN (bb
, fun
)
7852 dump_bb (file
, bb
, 2, flags
);
7854 fprintf (file
, "}\n");
7856 else if (fun
->curr_properties
& PROP_gimple_any
)
7858 /* The function is now in GIMPLE form but the CFG has not been
7859 built yet. Emit the single sequence of GIMPLE statements
7860 that make up its body. */
7861 gimple_seq body
= gimple_body (fndecl
);
7863 if (gimple_seq_first_stmt (body
)
7864 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7865 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7866 print_gimple_seq (file
, body
, 0, flags
);
7869 if (!ignore_topmost_bind
)
7870 fprintf (file
, "{\n");
7873 fprintf (file
, "\n");
7875 print_gimple_seq (file
, body
, 2, flags
);
7876 fprintf (file
, "}\n");
7883 /* Make a tree based dump. */
7884 chain
= DECL_SAVED_TREE (fndecl
);
7885 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7887 if (ignore_topmost_bind
)
7889 chain
= BIND_EXPR_BODY (chain
);
7897 if (!ignore_topmost_bind
)
7899 fprintf (file
, "{\n");
7900 /* No topmost bind, pretend it's ignored for later. */
7901 ignore_topmost_bind
= true;
7907 fprintf (file
, "\n");
7909 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7910 if (ignore_topmost_bind
)
7911 fprintf (file
, "}\n");
7914 if (flags
& TDF_ENUMERATE_LOCALS
)
7915 dump_enumerated_decls (file
, flags
);
7916 fprintf (file
, "\n\n");
7918 current_function_decl
= old_current_fndecl
;
7921 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7924 debug_function (tree fn
, dump_flags_t flags
)
7926 dump_function_to_file (fn
, stderr
, flags
);
7930 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7933 print_pred_bbs (FILE *file
, basic_block bb
)
7938 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7939 fprintf (file
, "bb_%d ", e
->src
->index
);
7943 /* Print on FILE the indexes for the successors of basic_block BB. */
7946 print_succ_bbs (FILE *file
, basic_block bb
)
7951 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7952 fprintf (file
, "bb_%d ", e
->dest
->index
);
7955 /* Print to FILE the basic block BB following the VERBOSITY level. */
7958 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7960 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7961 memset ((void *) s_indent
, ' ', (size_t) indent
);
7962 s_indent
[indent
] = '\0';
7964 /* Print basic_block's header. */
7967 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7968 print_pred_bbs (file
, bb
);
7969 fprintf (file
, "}, succs = {");
7970 print_succ_bbs (file
, bb
);
7971 fprintf (file
, "})\n");
7974 /* Print basic_block's body. */
7977 fprintf (file
, "%s {\n", s_indent
);
7978 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7979 fprintf (file
, "%s }\n", s_indent
);
7983 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7985 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7986 VERBOSITY level this outputs the contents of the loop, or just its
7990 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7998 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7999 memset ((void *) s_indent
, ' ', (size_t) indent
);
8000 s_indent
[indent
] = '\0';
8002 /* Print loop's header. */
8003 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
8005 fprintf (file
, "header = %d", loop
->header
->index
);
8008 fprintf (file
, "deleted)\n");
8012 fprintf (file
, ", latch = %d", loop
->latch
->index
);
8014 fprintf (file
, ", multiple latches");
8015 fprintf (file
, ", niter = ");
8016 print_generic_expr (file
, loop
->nb_iterations
);
8018 if (loop
->any_upper_bound
)
8020 fprintf (file
, ", upper_bound = ");
8021 print_decu (loop
->nb_iterations_upper_bound
, file
);
8023 if (loop
->any_likely_upper_bound
)
8025 fprintf (file
, ", likely_upper_bound = ");
8026 print_decu (loop
->nb_iterations_likely_upper_bound
, file
);
8029 if (loop
->any_estimate
)
8031 fprintf (file
, ", estimate = ");
8032 print_decu (loop
->nb_iterations_estimate
, file
);
8034 fprintf (file
, ")\n");
8036 /* Print loop's body. */
8039 fprintf (file
, "%s{\n", s_indent
);
8040 FOR_EACH_BB_FN (bb
, cfun
)
8041 if (bb
->loop_father
== loop
)
8042 print_loops_bb (file
, bb
, indent
, verbosity
);
8044 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
8045 fprintf (file
, "%s}\n", s_indent
);
8049 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8050 spaces. Following VERBOSITY level this outputs the contents of the
8051 loop, or just its structure. */
8054 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
8060 print_loop (file
, loop
, indent
, verbosity
);
8061 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
8064 /* Follow a CFG edge from the entry point of the program, and on entry
8065 of a loop, pretty print the loop structure on FILE. */
8068 print_loops (FILE *file
, int verbosity
)
8072 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
8073 fprintf (file
, "\nLoops in function: %s\n", current_function_name ());
8074 if (bb
&& bb
->loop_father
)
8075 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
8081 debug (struct loop
&ref
)
8083 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
8087 debug (struct loop
*ptr
)
8092 fprintf (stderr
, "<nil>\n");
8095 /* Dump a loop verbosely. */
8098 debug_verbose (struct loop
&ref
)
8100 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
8104 debug_verbose (struct loop
*ptr
)
8109 fprintf (stderr
, "<nil>\n");
8113 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8116 debug_loops (int verbosity
)
8118 print_loops (stderr
, verbosity
);
8121 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8124 debug_loop (struct loop
*loop
, int verbosity
)
8126 print_loop (stderr
, loop
, 0, verbosity
);
8129 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8133 debug_loop_num (unsigned num
, int verbosity
)
8135 debug_loop (get_loop (cfun
, num
), verbosity
);
8138 /* Return true if BB ends with a call, possibly followed by some
8139 instructions that must stay with the call. Return false,
8143 gimple_block_ends_with_call_p (basic_block bb
)
8145 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8146 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
8150 /* Return true if BB ends with a conditional branch. Return false,
8154 gimple_block_ends_with_condjump_p (const_basic_block bb
)
8156 gimple
*stmt
= last_stmt (CONST_CAST_BB (bb
));
8157 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
8161 /* Return true if statement T may terminate execution of BB in ways not
8162 explicitly represtented in the CFG. */
8165 stmt_can_terminate_bb_p (gimple
*t
)
8167 tree fndecl
= NULL_TREE
;
8170 /* Eh exception not handled internally terminates execution of the whole
8172 if (stmt_can_throw_external (t
))
8175 /* NORETURN and LONGJMP calls already have an edge to exit.
8176 CONST and PURE calls do not need one.
8177 We don't currently check for CONST and PURE here, although
8178 it would be a good idea, because those attributes are
8179 figured out from the RTL in mark_constant_function, and
8180 the counter incrementation code from -fprofile-arcs
8181 leads to different results from -fbranch-probabilities. */
8182 if (is_gimple_call (t
))
8184 fndecl
= gimple_call_fndecl (t
);
8185 call_flags
= gimple_call_flags (t
);
8188 if (is_gimple_call (t
)
8190 && DECL_BUILT_IN (fndecl
)
8191 && (call_flags
& ECF_NOTHROW
)
8192 && !(call_flags
& ECF_RETURNS_TWICE
)
8193 /* fork() doesn't really return twice, but the effect of
8194 wrapping it in __gcov_fork() which calls __gcov_flush()
8195 and clears the counters before forking has the same
8196 effect as returning twice. Force a fake edge. */
8197 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
8198 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
8201 if (is_gimple_call (t
))
8207 if (call_flags
& (ECF_PURE
| ECF_CONST
)
8208 && !(call_flags
& ECF_LOOPING_CONST_OR_PURE
))
8211 /* Function call may do longjmp, terminate program or do other things.
8212 Special case noreturn that have non-abnormal edges out as in this case
8213 the fact is sufficiently represented by lack of edges out of T. */
8214 if (!(call_flags
& ECF_NORETURN
))
8218 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8219 if ((e
->flags
& EDGE_FAKE
) == 0)
8223 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
8224 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
8231 /* Add fake edges to the function exit for any non constant and non
8232 noreturn calls (or noreturn calls with EH/abnormal edges),
8233 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8234 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8237 The goal is to expose cases in which entering a basic block does
8238 not imply that all subsequent instructions must be executed. */
8241 gimple_flow_call_edges_add (sbitmap blocks
)
8244 int blocks_split
= 0;
8245 int last_bb
= last_basic_block_for_fn (cfun
);
8246 bool check_last_block
= false;
8248 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
8252 check_last_block
= true;
8254 check_last_block
= bitmap_bit_p (blocks
,
8255 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
8257 /* In the last basic block, before epilogue generation, there will be
8258 a fallthru edge to EXIT. Special care is required if the last insn
8259 of the last basic block is a call because make_edge folds duplicate
8260 edges, which would result in the fallthru edge also being marked
8261 fake, which would result in the fallthru edge being removed by
8262 remove_fake_edges, which would result in an invalid CFG.
8264 Moreover, we can't elide the outgoing fake edge, since the block
8265 profiler needs to take this into account in order to solve the minimal
8266 spanning tree in the case that the call doesn't return.
8268 Handle this by adding a dummy instruction in a new last basic block. */
8269 if (check_last_block
)
8271 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
8272 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
8275 if (!gsi_end_p (gsi
))
8278 if (t
&& stmt_can_terminate_bb_p (t
))
8282 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8285 gsi_insert_on_edge (e
, gimple_build_nop ());
8286 gsi_commit_edge_inserts ();
8291 /* Now add fake edges to the function exit for any non constant
8292 calls since there is no way that we can determine if they will
8294 for (i
= 0; i
< last_bb
; i
++)
8296 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8297 gimple_stmt_iterator gsi
;
8298 gimple
*stmt
, *last_stmt
;
8303 if (blocks
&& !bitmap_bit_p (blocks
, i
))
8306 gsi
= gsi_last_nondebug_bb (bb
);
8307 if (!gsi_end_p (gsi
))
8309 last_stmt
= gsi_stmt (gsi
);
8312 stmt
= gsi_stmt (gsi
);
8313 if (stmt_can_terminate_bb_p (stmt
))
8317 /* The handling above of the final block before the
8318 epilogue should be enough to verify that there is
8319 no edge to the exit block in CFG already.
8320 Calling make_edge in such case would cause us to
8321 mark that edge as fake and remove it later. */
8322 if (flag_checking
&& stmt
== last_stmt
)
8324 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
8325 gcc_assert (e
== NULL
);
8328 /* Note that the following may create a new basic block
8329 and renumber the existing basic blocks. */
8330 if (stmt
!= last_stmt
)
8332 e
= split_block (bb
, stmt
);
8336 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
8337 e
->probability
= profile_probability::guessed_never ();
8338 e
->count
= profile_count::guessed_zero ();
8342 while (!gsi_end_p (gsi
));
8347 checking_verify_flow_info ();
8349 return blocks_split
;
8352 /* Removes edge E and all the blocks dominated by it, and updates dominance
8353 information. The IL in E->src needs to be updated separately.
8354 If dominance info is not available, only the edge E is removed.*/
8357 remove_edge_and_dominated_blocks (edge e
)
8359 vec
<basic_block
> bbs_to_remove
= vNULL
;
8360 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
8363 bool none_removed
= false;
8365 basic_block bb
, dbb
;
8368 /* If we are removing a path inside a non-root loop that may change
8369 loop ownership of blocks or remove loops. Mark loops for fixup. */
8371 && loop_outer (e
->src
->loop_father
) != NULL
8372 && e
->src
->loop_father
== e
->dest
->loop_father
)
8373 loops_state_set (LOOPS_NEED_FIXUP
);
8375 if (!dom_info_available_p (CDI_DOMINATORS
))
8381 /* No updating is needed for edges to exit. */
8382 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8384 if (cfgcleanup_altered_bbs
)
8385 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8390 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8391 that is not dominated by E->dest, then this set is empty. Otherwise,
8392 all the basic blocks dominated by E->dest are removed.
8394 Also, to DF_IDOM we store the immediate dominators of the blocks in
8395 the dominance frontier of E (i.e., of the successors of the
8396 removed blocks, if there are any, and of E->dest otherwise). */
8397 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
8402 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
8404 none_removed
= true;
8409 auto_bitmap df
, df_idom
;
8411 bitmap_set_bit (df_idom
,
8412 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
8415 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
8416 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8418 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
8420 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
8421 bitmap_set_bit (df
, f
->dest
->index
);
8424 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
8425 bitmap_clear_bit (df
, bb
->index
);
8427 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
8429 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8430 bitmap_set_bit (df_idom
,
8431 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
8435 if (cfgcleanup_altered_bbs
)
8437 /* Record the set of the altered basic blocks. */
8438 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
8439 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
8442 /* Remove E and the cancelled blocks. */
8447 /* Walk backwards so as to get a chance to substitute all
8448 released DEFs into debug stmts. See
8449 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8451 for (i
= bbs_to_remove
.length (); i
-- > 0; )
8452 delete_basic_block (bbs_to_remove
[i
]);
8455 /* Update the dominance information. The immediate dominator may change only
8456 for blocks whose immediate dominator belongs to DF_IDOM:
8458 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8459 removal. Let Z the arbitrary block such that idom(Z) = Y and
8460 Z dominates X after the removal. Before removal, there exists a path P
8461 from Y to X that avoids Z. Let F be the last edge on P that is
8462 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8463 dominates W, and because of P, Z does not dominate W), and W belongs to
8464 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8465 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
8467 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8468 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
8470 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
8471 bbs_to_fix_dom
.safe_push (dbb
);
8474 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
8476 bbs_to_remove
.release ();
8477 bbs_to_fix_dom
.release ();
8480 /* Purge dead EH edges from basic block BB. */
8483 gimple_purge_dead_eh_edges (basic_block bb
)
8485 bool changed
= false;
8488 gimple
*stmt
= last_stmt (bb
);
8490 if (stmt
&& stmt_can_throw_internal (stmt
))
8493 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8495 if (e
->flags
& EDGE_EH
)
8497 remove_edge_and_dominated_blocks (e
);
8507 /* Purge dead EH edges from basic block listed in BLOCKS. */
8510 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
8512 bool changed
= false;
8516 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8518 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8520 /* Earlier gimple_purge_dead_eh_edges could have removed
8521 this basic block already. */
8522 gcc_assert (bb
|| changed
);
8524 changed
|= gimple_purge_dead_eh_edges (bb
);
8530 /* Purge dead abnormal call edges from basic block BB. */
8533 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
8535 bool changed
= false;
8538 gimple
*stmt
= last_stmt (bb
);
8540 if (!cfun
->has_nonlocal_label
8541 && !cfun
->calls_setjmp
)
8544 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8547 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8549 if (e
->flags
& EDGE_ABNORMAL
)
8551 if (e
->flags
& EDGE_FALLTHRU
)
8552 e
->flags
&= ~EDGE_ABNORMAL
;
8554 remove_edge_and_dominated_blocks (e
);
8564 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8567 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8569 bool changed
= false;
8573 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8575 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8577 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8578 this basic block already. */
8579 gcc_assert (bb
|| changed
);
8581 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8587 /* This function is called whenever a new edge is created or
8591 gimple_execute_on_growing_pred (edge e
)
8593 basic_block bb
= e
->dest
;
8595 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8596 reserve_phi_args_for_new_edge (bb
);
8599 /* This function is called immediately before edge E is removed from
8600 the edge vector E->dest->preds. */
8603 gimple_execute_on_shrinking_pred (edge e
)
8605 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8606 remove_phi_args (e
);
8609 /*---------------------------------------------------------------------------
8610 Helper functions for Loop versioning
8611 ---------------------------------------------------------------------------*/
8613 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8614 of 'first'. Both of them are dominated by 'new_head' basic block. When
8615 'new_head' was created by 'second's incoming edge it received phi arguments
8616 on the edge by split_edge(). Later, additional edge 'e' was created to
8617 connect 'new_head' and 'first'. Now this routine adds phi args on this
8618 additional edge 'e' that new_head to second edge received as part of edge
8622 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8623 basic_block new_head
, edge e
)
8626 gphi_iterator psi1
, psi2
;
8628 edge e2
= find_edge (new_head
, second
);
8630 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8631 edge, we should always have an edge from NEW_HEAD to SECOND. */
8632 gcc_assert (e2
!= NULL
);
8634 /* Browse all 'second' basic block phi nodes and add phi args to
8635 edge 'e' for 'first' head. PHI args are always in correct order. */
8637 for (psi2
= gsi_start_phis (second
),
8638 psi1
= gsi_start_phis (first
);
8639 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8640 gsi_next (&psi2
), gsi_next (&psi1
))
8644 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8645 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8650 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8651 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8652 the destination of the ELSE part. */
8655 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8656 basic_block second_head ATTRIBUTE_UNUSED
,
8657 basic_block cond_bb
, void *cond_e
)
8659 gimple_stmt_iterator gsi
;
8660 gimple
*new_cond_expr
;
8661 tree cond_expr
= (tree
) cond_e
;
8664 /* Build new conditional expr */
8665 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8666 NULL_TREE
, NULL_TREE
);
8668 /* Add new cond in cond_bb. */
8669 gsi
= gsi_last_bb (cond_bb
);
8670 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8672 /* Adjust edges appropriately to connect new head with first head
8673 as well as second head. */
8674 e0
= single_succ_edge (cond_bb
);
8675 e0
->flags
&= ~EDGE_FALLTHRU
;
8676 e0
->flags
|= EDGE_FALSE_VALUE
;
8680 /* Do book-keeping of basic block BB for the profile consistency checker.
8681 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8682 then do post-pass accounting. Store the counting in RECORD. */
8684 gimple_account_profile_record (basic_block bb
, int after_pass
,
8685 struct profile_record
*record
)
8687 gimple_stmt_iterator i
;
8688 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8690 record
->size
[after_pass
]
8691 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8692 if (bb
->count
.initialized_p ())
8693 record
->time
[after_pass
]
8694 += estimate_num_insns (gsi_stmt (i
),
8695 &eni_time_weights
) * bb
->count
.to_gcov_type ();
8696 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8697 record
->time
[after_pass
]
8698 += estimate_num_insns (gsi_stmt (i
),
8699 &eni_time_weights
) * bb
->frequency
;
8703 struct cfg_hooks gimple_cfg_hooks
= {
8705 gimple_verify_flow_info
,
8706 gimple_dump_bb
, /* dump_bb */
8707 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8708 create_bb
, /* create_basic_block */
8709 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8710 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8711 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8712 remove_bb
, /* delete_basic_block */
8713 gimple_split_block
, /* split_block */
8714 gimple_move_block_after
, /* move_block_after */
8715 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8716 gimple_merge_blocks
, /* merge_blocks */
8717 gimple_predict_edge
, /* predict_edge */
8718 gimple_predicted_by_p
, /* predicted_by_p */
8719 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8720 gimple_duplicate_bb
, /* duplicate_block */
8721 gimple_split_edge
, /* split_edge */
8722 gimple_make_forwarder_block
, /* make_forward_block */
8723 NULL
, /* tidy_fallthru_edge */
8724 NULL
, /* force_nonfallthru */
8725 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8726 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8727 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8728 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8729 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8730 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8731 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8732 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8733 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8734 flush_pending_stmts
, /* flush_pending_stmts */
8735 gimple_empty_block_p
, /* block_empty_p */
8736 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8737 gimple_account_profile_record
,
8741 /* Split all critical edges. */
8744 split_critical_edges (void)
8750 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8751 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8752 mappings around the calls to split_edge. */
8753 start_recording_case_labels ();
8754 FOR_ALL_BB_FN (bb
, cfun
)
8756 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8758 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8760 /* PRE inserts statements to edges and expects that
8761 since split_critical_edges was done beforehand, committing edge
8762 insertions will not split more edges. In addition to critical
8763 edges we must split edges that have multiple successors and
8764 end by control flow statements, such as RESX.
8765 Go ahead and split them too. This matches the logic in
8766 gimple_find_edge_insert_loc. */
8767 else if ((!single_pred_p (e
->dest
)
8768 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8769 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8770 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8771 && !(e
->flags
& EDGE_ABNORMAL
))
8773 gimple_stmt_iterator gsi
;
8775 gsi
= gsi_last_bb (e
->src
);
8776 if (!gsi_end_p (gsi
)
8777 && stmt_ends_bb_p (gsi_stmt (gsi
))
8778 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8779 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8785 end_recording_case_labels ();
8791 const pass_data pass_data_split_crit_edges
=
8793 GIMPLE_PASS
, /* type */
8794 "crited", /* name */
8795 OPTGROUP_NONE
, /* optinfo_flags */
8796 TV_TREE_SPLIT_EDGES
, /* tv_id */
8797 PROP_cfg
, /* properties_required */
8798 PROP_no_crit_edges
, /* properties_provided */
8799 0, /* properties_destroyed */
8800 0, /* todo_flags_start */
8801 0, /* todo_flags_finish */
8804 class pass_split_crit_edges
: public gimple_opt_pass
8807 pass_split_crit_edges (gcc::context
*ctxt
)
8808 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8811 /* opt_pass methods: */
8812 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8814 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8815 }; // class pass_split_crit_edges
8820 make_pass_split_crit_edges (gcc::context
*ctxt
)
8822 return new pass_split_crit_edges (ctxt
);
8826 /* Insert COND expression which is GIMPLE_COND after STMT
8827 in basic block BB with appropriate basic block split
8828 and creation of a new conditionally executed basic block.
8829 Update profile so the new bb is visited with probability PROB.
8830 Return created basic block. */
8832 insert_cond_bb (basic_block bb
, gimple
*stmt
, gimple
*cond
,
8833 profile_probability prob
)
8835 edge fall
= split_block (bb
, stmt
);
8836 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8839 /* Insert cond statement. */
8840 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8841 if (gsi_end_p (iter
))
8842 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8844 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8846 /* Create conditionally executed block. */
8847 new_bb
= create_empty_bb (bb
);
8848 edge e
= make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8849 e
->probability
= prob
;
8850 e
->count
= bb
->count
.apply_probability (prob
);
8851 new_bb
->count
= e
->count
;
8852 new_bb
->frequency
= prob
.apply (bb
->frequency
);
8853 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8855 /* Fix edge for split bb. */
8856 fall
->flags
= EDGE_FALSE_VALUE
;
8857 fall
->count
-= e
->count
;
8858 fall
->probability
-= e
->probability
;
8860 /* Update dominance info. */
8861 if (dom_info_available_p (CDI_DOMINATORS
))
8863 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8864 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8867 /* Update loop info. */
8869 add_bb_to_loop (new_bb
, bb
->loop_father
);
8874 /* Build a ternary operation and gimplify it. Emit code before GSI.
8875 Return the gimple_val holding the result. */
8878 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8879 tree type
, tree a
, tree b
, tree c
)
8882 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8884 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8887 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8891 /* Build a binary operation and gimplify it. Emit code before GSI.
8892 Return the gimple_val holding the result. */
8895 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8896 tree type
, tree a
, tree b
)
8900 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8903 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8907 /* Build a unary operation and gimplify it. Emit code before GSI.
8908 Return the gimple_val holding the result. */
8911 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8916 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8919 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8925 /* Given a basic block B which ends with a conditional and has
8926 precisely two successors, determine which of the edges is taken if
8927 the conditional is true and which is taken if the conditional is
8928 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8931 extract_true_false_edges_from_block (basic_block b
,
8935 edge e
= EDGE_SUCC (b
, 0);
8937 if (e
->flags
& EDGE_TRUE_VALUE
)
8940 *false_edge
= EDGE_SUCC (b
, 1);
8945 *true_edge
= EDGE_SUCC (b
, 1);
8950 /* From a controlling predicate in the immediate dominator DOM of
8951 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8952 predicate evaluates to true and false and store them to
8953 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8954 they are non-NULL. Returns true if the edges can be determined,
8955 else return false. */
8958 extract_true_false_controlled_edges (basic_block dom
, basic_block phiblock
,
8959 edge
*true_controlled_edge
,
8960 edge
*false_controlled_edge
)
8962 basic_block bb
= phiblock
;
8963 edge true_edge
, false_edge
, tem
;
8964 edge e0
= NULL
, e1
= NULL
;
8966 /* We have to verify that one edge into the PHI node is dominated
8967 by the true edge of the predicate block and the other edge
8968 dominated by the false edge. This ensures that the PHI argument
8969 we are going to take is completely determined by the path we
8970 take from the predicate block.
8971 We can only use BB dominance checks below if the destination of
8972 the true/false edges are dominated by their edge, thus only
8973 have a single predecessor. */
8974 extract_true_false_edges_from_block (dom
, &true_edge
, &false_edge
);
8975 tem
= EDGE_PRED (bb
, 0);
8976 if (tem
== true_edge
8977 || (single_pred_p (true_edge
->dest
)
8978 && (tem
->src
== true_edge
->dest
8979 || dominated_by_p (CDI_DOMINATORS
,
8980 tem
->src
, true_edge
->dest
))))
8982 else if (tem
== false_edge
8983 || (single_pred_p (false_edge
->dest
)
8984 && (tem
->src
== false_edge
->dest
8985 || dominated_by_p (CDI_DOMINATORS
,
8986 tem
->src
, false_edge
->dest
))))
8990 tem
= EDGE_PRED (bb
, 1);
8991 if (tem
== true_edge
8992 || (single_pred_p (true_edge
->dest
)
8993 && (tem
->src
== true_edge
->dest
8994 || dominated_by_p (CDI_DOMINATORS
,
8995 tem
->src
, true_edge
->dest
))))
8997 else if (tem
== false_edge
8998 || (single_pred_p (false_edge
->dest
)
8999 && (tem
->src
== false_edge
->dest
9000 || dominated_by_p (CDI_DOMINATORS
,
9001 tem
->src
, false_edge
->dest
))))
9008 if (true_controlled_edge
)
9009 *true_controlled_edge
= e0
;
9010 if (false_controlled_edge
)
9011 *false_controlled_edge
= e1
;
9016 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9017 range [low, high]. Place associated stmts before *GSI. */
9020 generate_range_test (basic_block bb
, tree index
, tree low
, tree high
,
9021 tree
*lhs
, tree
*rhs
)
9023 tree type
= TREE_TYPE (index
);
9024 tree utype
= unsigned_type_for (type
);
9026 low
= fold_convert (type
, low
);
9027 high
= fold_convert (type
, high
);
9029 tree tmp
= make_ssa_name (type
);
9031 = gimple_build_assign (tmp
, MINUS_EXPR
, index
, low
);
9033 *lhs
= make_ssa_name (utype
);
9034 gassign
*a
= gimple_build_assign (*lhs
, NOP_EXPR
, tmp
);
9036 *rhs
= fold_build2 (MINUS_EXPR
, utype
, high
, low
);
9037 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9038 gsi_insert_before (&gsi
, sub1
, GSI_SAME_STMT
);
9039 gsi_insert_before (&gsi
, a
, GSI_SAME_STMT
);
9042 /* Emit return warnings. */
9046 const pass_data pass_data_warn_function_return
=
9048 GIMPLE_PASS
, /* type */
9049 "*warn_function_return", /* name */
9050 OPTGROUP_NONE
, /* optinfo_flags */
9051 TV_NONE
, /* tv_id */
9052 PROP_cfg
, /* properties_required */
9053 0, /* properties_provided */
9054 0, /* properties_destroyed */
9055 0, /* todo_flags_start */
9056 0, /* todo_flags_finish */
9059 class pass_warn_function_return
: public gimple_opt_pass
9062 pass_warn_function_return (gcc::context
*ctxt
)
9063 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
9066 /* opt_pass methods: */
9067 virtual unsigned int execute (function
*);
9069 }; // class pass_warn_function_return
9072 pass_warn_function_return::execute (function
*fun
)
9074 source_location location
;
9079 if (!targetm
.warn_func_return (fun
->decl
))
9082 /* If we have a path to EXIT, then we do return. */
9083 if (TREE_THIS_VOLATILE (fun
->decl
)
9084 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
9086 location
= UNKNOWN_LOCATION
;
9087 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9089 last
= last_stmt (e
->src
);
9090 if ((gimple_code (last
) == GIMPLE_RETURN
9091 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
9092 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
9095 if (location
== UNKNOWN_LOCATION
)
9096 location
= cfun
->function_end_locus
;
9097 warning_at (location
, 0, "%<noreturn%> function does return");
9100 /* If we see "return;" in some basic block, then we do reach the end
9101 without returning a value. */
9102 else if (warn_return_type
9103 && !TREE_NO_WARNING (fun
->decl
)
9104 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
9105 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
9107 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
9109 gimple
*last
= last_stmt (e
->src
);
9110 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
9112 && gimple_return_retval (return_stmt
) == NULL
9113 && !gimple_no_warning_p (last
))
9115 location
= gimple_location (last
);
9116 if (location
== UNKNOWN_LOCATION
)
9117 location
= fun
->function_end_locus
;
9118 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
9119 TREE_NO_WARNING (fun
->decl
) = 1;
9130 make_pass_warn_function_return (gcc::context
*ctxt
)
9132 return new pass_warn_function_return (ctxt
);
9135 /* Walk a gimplified function and warn for functions whose return value is
9136 ignored and attribute((warn_unused_result)) is set. This is done before
9137 inlining, so we don't have to worry about that. */
9140 do_warn_unused_result (gimple_seq seq
)
9143 gimple_stmt_iterator i
;
9145 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
9147 gimple
*g
= gsi_stmt (i
);
9149 switch (gimple_code (g
))
9152 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
9155 do_warn_unused_result (gimple_try_eval (g
));
9156 do_warn_unused_result (gimple_try_cleanup (g
));
9159 do_warn_unused_result (gimple_catch_handler (
9160 as_a
<gcatch
*> (g
)));
9162 case GIMPLE_EH_FILTER
:
9163 do_warn_unused_result (gimple_eh_filter_failure (g
));
9167 if (gimple_call_lhs (g
))
9169 if (gimple_call_internal_p (g
))
9172 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9173 LHS. All calls whose value is ignored should be
9174 represented like this. Look for the attribute. */
9175 fdecl
= gimple_call_fndecl (g
);
9176 ftype
= gimple_call_fntype (g
);
9178 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
9180 location_t loc
= gimple_location (g
);
9183 warning_at (loc
, OPT_Wunused_result
,
9184 "ignoring return value of %qD, "
9185 "declared with attribute warn_unused_result",
9188 warning_at (loc
, OPT_Wunused_result
,
9189 "ignoring return value of function "
9190 "declared with attribute warn_unused_result");
9195 /* Not a container, not a call, or a call whose value is used. */
9203 const pass_data pass_data_warn_unused_result
=
9205 GIMPLE_PASS
, /* type */
9206 "*warn_unused_result", /* name */
9207 OPTGROUP_NONE
, /* optinfo_flags */
9208 TV_NONE
, /* tv_id */
9209 PROP_gimple_any
, /* properties_required */
9210 0, /* properties_provided */
9211 0, /* properties_destroyed */
9212 0, /* todo_flags_start */
9213 0, /* todo_flags_finish */
9216 class pass_warn_unused_result
: public gimple_opt_pass
9219 pass_warn_unused_result (gcc::context
*ctxt
)
9220 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
9223 /* opt_pass methods: */
9224 virtual bool gate (function
*) { return flag_warn_unused_result
; }
9225 virtual unsigned int execute (function
*)
9227 do_warn_unused_result (gimple_body (current_function_decl
));
9231 }; // class pass_warn_unused_result
9236 make_pass_warn_unused_result (gcc::context
*ctxt
)
9238 return new pass_warn_unused_result (ctxt
);
9241 /* IPA passes, compilation of earlier functions or inlining
9242 might have changed some properties, such as marked functions nothrow,
9243 pure, const or noreturn.
9244 Remove redundant edges and basic blocks, and create new ones if necessary.
9246 This pass can't be executed as stand alone pass from pass manager, because
9247 in between inlining and this fixup the verify_flow_info would fail. */
9250 execute_fixup_cfg (void)
9253 gimple_stmt_iterator gsi
;
9257 cgraph_node
*node
= cgraph_node::get (current_function_decl
);
9258 profile_count num
= node
->count
;
9259 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
9260 bool scale
= num
.initialized_p ()
9261 && (den
> 0 || num
== profile_count::zero ())
9266 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= node
->count
;
9267 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
9268 = EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
.apply_scale (num
, den
);
9270 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
9271 e
->count
= e
->count
.apply_scale (num
, den
);
9274 FOR_EACH_BB_FN (bb
, cfun
)
9277 bb
->count
= bb
->count
.apply_scale (num
, den
);
9278 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
9280 gimple
*stmt
= gsi_stmt (gsi
);
9281 tree decl
= is_gimple_call (stmt
)
9282 ? gimple_call_fndecl (stmt
)
9286 int flags
= gimple_call_flags (stmt
);
9287 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
9289 if (gimple_purge_dead_abnormal_call_edges (bb
))
9290 todo
|= TODO_cleanup_cfg
;
9292 if (gimple_in_ssa_p (cfun
))
9294 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9299 if (flags
& ECF_NORETURN
9300 && fixup_noreturn_call (stmt
))
9301 todo
|= TODO_cleanup_cfg
;
9304 /* Remove stores to variables we marked write-only.
9305 Keep access when store has side effect, i.e. in case when source
9307 if (gimple_store_p (stmt
)
9308 && !gimple_has_side_effects (stmt
))
9310 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9313 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9314 && varpool_node::get (lhs
)->writeonly
)
9316 unlink_stmt_vdef (stmt
);
9317 gsi_remove (&gsi
, true);
9318 release_defs (stmt
);
9319 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9323 /* For calls we can simply remove LHS when it is known
9324 to be write-only. */
9325 if (is_gimple_call (stmt
)
9326 && gimple_get_lhs (stmt
))
9328 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
9331 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
9332 && varpool_node::get (lhs
)->writeonly
)
9334 gimple_call_set_lhs (stmt
, NULL
);
9336 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
9340 if (maybe_clean_eh_stmt (stmt
)
9341 && gimple_purge_dead_eh_edges (bb
))
9342 todo
|= TODO_cleanup_cfg
;
9347 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
9348 e
->count
= e
->count
.apply_scale (num
, den
);
9350 /* If we have a basic block with no successors that does not
9351 end with a control statement or a noreturn call end it with
9352 a call to __builtin_unreachable. This situation can occur
9353 when inlining a noreturn call that does in fact return. */
9354 if (EDGE_COUNT (bb
->succs
) == 0)
9356 gimple
*stmt
= last_stmt (bb
);
9358 || (!is_ctrl_stmt (stmt
)
9359 && (!is_gimple_call (stmt
)
9360 || !gimple_call_noreturn_p (stmt
))))
9362 if (stmt
&& is_gimple_call (stmt
))
9363 gimple_call_set_ctrl_altering (stmt
, false);
9364 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
9365 stmt
= gimple_build_call (fndecl
, 0);
9366 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
9367 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
9368 if (!cfun
->after_inlining
)
9370 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
9372 = compute_call_stmt_bb_frequency (current_function_decl
,
9374 node
->create_edge (cgraph_node::get_create (fndecl
),
9375 call_stmt
, bb
->count
, freq
);
9381 compute_function_frequency ();
9384 && (todo
& TODO_cleanup_cfg
))
9385 loops_state_set (LOOPS_NEED_FIXUP
);
9392 const pass_data pass_data_fixup_cfg
=
9394 GIMPLE_PASS
, /* type */
9395 "fixup_cfg", /* name */
9396 OPTGROUP_NONE
, /* optinfo_flags */
9397 TV_NONE
, /* tv_id */
9398 PROP_cfg
, /* properties_required */
9399 0, /* properties_provided */
9400 0, /* properties_destroyed */
9401 0, /* todo_flags_start */
9402 0, /* todo_flags_finish */
9405 class pass_fixup_cfg
: public gimple_opt_pass
9408 pass_fixup_cfg (gcc::context
*ctxt
)
9409 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
9412 /* opt_pass methods: */
9413 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
9414 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
9416 }; // class pass_fixup_cfg
9421 make_pass_fixup_cfg (gcc::context
*ctxt
)
9423 return new pass_fixup_cfg (ctxt
);
9426 /* Garbage collection support for edge_def. */
9428 extern void gt_ggc_mx (tree
&);
9429 extern void gt_ggc_mx (gimple
*&);
9430 extern void gt_ggc_mx (rtx
&);
9431 extern void gt_ggc_mx (basic_block
&);
9434 gt_ggc_mx (rtx_insn
*& x
)
9437 gt_ggc_mx_rtx_def ((void *) x
);
9441 gt_ggc_mx (edge_def
*e
)
9443 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9445 gt_ggc_mx (e
->dest
);
9446 if (current_ir_type () == IR_GIMPLE
)
9447 gt_ggc_mx (e
->insns
.g
);
9449 gt_ggc_mx (e
->insns
.r
);
9453 /* PCH support for edge_def. */
9455 extern void gt_pch_nx (tree
&);
9456 extern void gt_pch_nx (gimple
*&);
9457 extern void gt_pch_nx (rtx
&);
9458 extern void gt_pch_nx (basic_block
&);
9461 gt_pch_nx (rtx_insn
*& x
)
9464 gt_pch_nx_rtx_def ((void *) x
);
9468 gt_pch_nx (edge_def
*e
)
9470 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9472 gt_pch_nx (e
->dest
);
9473 if (current_ir_type () == IR_GIMPLE
)
9474 gt_pch_nx (e
->insns
.g
);
9476 gt_pch_nx (e
->insns
.r
);
9481 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
9483 tree block
= LOCATION_BLOCK (e
->goto_locus
);
9484 op (&(e
->src
), cookie
);
9485 op (&(e
->dest
), cookie
);
9486 if (current_ir_type () == IR_GIMPLE
)
9487 op (&(e
->insns
.g
), cookie
);
9489 op (&(e
->insns
.r
), cookie
);
9490 op (&(block
), cookie
);
9495 namespace selftest
{
9497 /* Helper function for CFG selftests: create a dummy function decl
9498 and push it as cfun. */
9501 push_fndecl (const char *name
)
9503 tree fn_type
= build_function_type_array (integer_type_node
, 0, NULL
);
9504 /* FIXME: this uses input_location: */
9505 tree fndecl
= build_fn_decl (name
, fn_type
);
9506 tree retval
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
9507 NULL_TREE
, integer_type_node
);
9508 DECL_RESULT (fndecl
) = retval
;
9509 push_struct_function (fndecl
);
9510 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9511 ASSERT_TRUE (fun
!= NULL
);
9512 init_empty_tree_cfg_for_function (fun
);
9513 ASSERT_EQ (2, n_basic_blocks_for_fn (fun
));
9514 ASSERT_EQ (0, n_edges_for_fn (fun
));
9518 /* These tests directly create CFGs.
9519 Compare with the static fns within tree-cfg.c:
9521 - make_blocks: calls create_basic_block (seq, bb);
9524 /* Verify a simple cfg of the form:
9525 ENTRY -> A -> B -> C -> EXIT. */
9528 test_linear_chain ()
9530 gimple_register_cfg_hooks ();
9532 tree fndecl
= push_fndecl ("cfg_test_linear_chain");
9533 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9535 /* Create some empty blocks. */
9536 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9537 basic_block bb_b
= create_empty_bb (bb_a
);
9538 basic_block bb_c
= create_empty_bb (bb_b
);
9540 ASSERT_EQ (5, n_basic_blocks_for_fn (fun
));
9541 ASSERT_EQ (0, n_edges_for_fn (fun
));
9543 /* Create some edges: a simple linear chain of BBs. */
9544 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9545 make_edge (bb_a
, bb_b
, 0);
9546 make_edge (bb_b
, bb_c
, 0);
9547 make_edge (bb_c
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9549 /* Verify the edges. */
9550 ASSERT_EQ (4, n_edges_for_fn (fun
));
9551 ASSERT_EQ (NULL
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->preds
);
9552 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
->length ());
9553 ASSERT_EQ (1, bb_a
->preds
->length ());
9554 ASSERT_EQ (1, bb_a
->succs
->length ());
9555 ASSERT_EQ (1, bb_b
->preds
->length ());
9556 ASSERT_EQ (1, bb_b
->succs
->length ());
9557 ASSERT_EQ (1, bb_c
->preds
->length ());
9558 ASSERT_EQ (1, bb_c
->succs
->length ());
9559 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
->length ());
9560 ASSERT_EQ (NULL
, EXIT_BLOCK_PTR_FOR_FN (fun
)->succs
);
9562 /* Verify the dominance information
9563 Each BB in our simple chain should be dominated by the one before
9565 calculate_dominance_info (CDI_DOMINATORS
);
9566 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9567 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9568 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9569 ASSERT_EQ (1, dom_by_b
.length ());
9570 ASSERT_EQ (bb_c
, dom_by_b
[0]);
9571 free_dominance_info (CDI_DOMINATORS
);
9572 dom_by_b
.release ();
9574 /* Similarly for post-dominance: each BB in our chain is post-dominated
9575 by the one after it. */
9576 calculate_dominance_info (CDI_POST_DOMINATORS
);
9577 ASSERT_EQ (bb_b
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9578 ASSERT_EQ (bb_c
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9579 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9580 ASSERT_EQ (1, postdom_by_b
.length ());
9581 ASSERT_EQ (bb_a
, postdom_by_b
[0]);
9582 free_dominance_info (CDI_POST_DOMINATORS
);
9583 postdom_by_b
.release ();
9588 /* Verify a simple CFG of the form:
9604 gimple_register_cfg_hooks ();
9606 tree fndecl
= push_fndecl ("cfg_test_diamond");
9607 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9609 /* Create some empty blocks. */
9610 basic_block bb_a
= create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
9611 basic_block bb_b
= create_empty_bb (bb_a
);
9612 basic_block bb_c
= create_empty_bb (bb_a
);
9613 basic_block bb_d
= create_empty_bb (bb_b
);
9615 ASSERT_EQ (6, n_basic_blocks_for_fn (fun
));
9616 ASSERT_EQ (0, n_edges_for_fn (fun
));
9618 /* Create the edges. */
9619 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), bb_a
, EDGE_FALLTHRU
);
9620 make_edge (bb_a
, bb_b
, EDGE_TRUE_VALUE
);
9621 make_edge (bb_a
, bb_c
, EDGE_FALSE_VALUE
);
9622 make_edge (bb_b
, bb_d
, 0);
9623 make_edge (bb_c
, bb_d
, 0);
9624 make_edge (bb_d
, EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9626 /* Verify the edges. */
9627 ASSERT_EQ (6, n_edges_for_fn (fun
));
9628 ASSERT_EQ (1, bb_a
->preds
->length ());
9629 ASSERT_EQ (2, bb_a
->succs
->length ());
9630 ASSERT_EQ (1, bb_b
->preds
->length ());
9631 ASSERT_EQ (1, bb_b
->succs
->length ());
9632 ASSERT_EQ (1, bb_c
->preds
->length ());
9633 ASSERT_EQ (1, bb_c
->succs
->length ());
9634 ASSERT_EQ (2, bb_d
->preds
->length ());
9635 ASSERT_EQ (1, bb_d
->succs
->length ());
9637 /* Verify the dominance information. */
9638 calculate_dominance_info (CDI_DOMINATORS
);
9639 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_b
));
9640 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_c
));
9641 ASSERT_EQ (bb_a
, get_immediate_dominator (CDI_DOMINATORS
, bb_d
));
9642 vec
<basic_block
> dom_by_a
= get_dominated_by (CDI_DOMINATORS
, bb_a
);
9643 ASSERT_EQ (3, dom_by_a
.length ()); /* B, C, D, in some order. */
9644 dom_by_a
.release ();
9645 vec
<basic_block
> dom_by_b
= get_dominated_by (CDI_DOMINATORS
, bb_b
);
9646 ASSERT_EQ (0, dom_by_b
.length ());
9647 dom_by_b
.release ();
9648 free_dominance_info (CDI_DOMINATORS
);
9650 /* Similarly for post-dominance. */
9651 calculate_dominance_info (CDI_POST_DOMINATORS
);
9652 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_a
));
9653 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_b
));
9654 ASSERT_EQ (bb_d
, get_immediate_dominator (CDI_POST_DOMINATORS
, bb_c
));
9655 vec
<basic_block
> postdom_by_d
= get_dominated_by (CDI_POST_DOMINATORS
, bb_d
);
9656 ASSERT_EQ (3, postdom_by_d
.length ()); /* A, B, C in some order. */
9657 postdom_by_d
.release ();
9658 vec
<basic_block
> postdom_by_b
= get_dominated_by (CDI_POST_DOMINATORS
, bb_b
);
9659 ASSERT_EQ (0, postdom_by_b
.length ());
9660 postdom_by_b
.release ();
9661 free_dominance_info (CDI_POST_DOMINATORS
);
9666 /* Verify that we can handle a CFG containing a "complete" aka
9667 fully-connected subgraph (where A B C D below all have edges
9668 pointing to each other node, also to themselves).
9686 test_fully_connected ()
9688 gimple_register_cfg_hooks ();
9690 tree fndecl
= push_fndecl ("cfg_fully_connected");
9691 function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
9695 /* Create some empty blocks. */
9696 auto_vec
<basic_block
> subgraph_nodes
;
9697 for (int i
= 0; i
< n
; i
++)
9698 subgraph_nodes
.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
9700 ASSERT_EQ (n
+ 2, n_basic_blocks_for_fn (fun
));
9701 ASSERT_EQ (0, n_edges_for_fn (fun
));
9703 /* Create the edges. */
9704 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
), subgraph_nodes
[0], EDGE_FALLTHRU
);
9705 make_edge (subgraph_nodes
[0], EXIT_BLOCK_PTR_FOR_FN (fun
), 0);
9706 for (int i
= 0; i
< n
; i
++)
9707 for (int j
= 0; j
< n
; j
++)
9708 make_edge (subgraph_nodes
[i
], subgraph_nodes
[j
], 0);
9710 /* Verify the edges. */
9711 ASSERT_EQ (2 + (n
* n
), n_edges_for_fn (fun
));
9712 /* The first one is linked to ENTRY/EXIT as well as itself and
9714 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->preds
->length ());
9715 ASSERT_EQ (n
+ 1, subgraph_nodes
[0]->succs
->length ());
9716 /* The other ones in the subgraph are linked to everything in
9717 the subgraph (including themselves). */
9718 for (int i
= 1; i
< n
; i
++)
9720 ASSERT_EQ (n
, subgraph_nodes
[i
]->preds
->length ());
9721 ASSERT_EQ (n
, subgraph_nodes
[i
]->succs
->length ());
9724 /* Verify the dominance information. */
9725 calculate_dominance_info (CDI_DOMINATORS
);
9726 /* The initial block in the subgraph should be dominated by ENTRY. */
9727 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun
),
9728 get_immediate_dominator (CDI_DOMINATORS
,
9729 subgraph_nodes
[0]));
9730 /* Every other block in the subgraph should be dominated by the
9732 for (int i
= 1; i
< n
; i
++)
9733 ASSERT_EQ (subgraph_nodes
[0],
9734 get_immediate_dominator (CDI_DOMINATORS
,
9735 subgraph_nodes
[i
]));
9736 free_dominance_info (CDI_DOMINATORS
);
9738 /* Similarly for post-dominance. */
9739 calculate_dominance_info (CDI_POST_DOMINATORS
);
9740 /* The initial block in the subgraph should be postdominated by EXIT. */
9741 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun
),
9742 get_immediate_dominator (CDI_POST_DOMINATORS
,
9743 subgraph_nodes
[0]));
9744 /* Every other block in the subgraph should be postdominated by the
9745 initial block, since that leads to EXIT. */
9746 for (int i
= 1; i
< n
; i
++)
9747 ASSERT_EQ (subgraph_nodes
[0],
9748 get_immediate_dominator (CDI_POST_DOMINATORS
,
9749 subgraph_nodes
[i
]));
9750 free_dominance_info (CDI_POST_DOMINATORS
);
9755 /* Run all of the selftests within this file. */
9760 test_linear_chain ();
9762 test_fully_connected ();
9765 } // namespace selftest
9767 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9770 - switch statement (a block with many out-edges)
9771 - something that jumps to itself
9774 #endif /* CHECKING_P */