1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
30 #include "double-int.h"
37 #include "fold-const.h"
38 #include "trans-mem.h"
39 #include "stor-layout.h"
40 #include "print-tree.h"
43 #include "hard-reg-set.h"
46 #include "dominance.h"
49 #include "basic-block.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "gimple-fold.h"
56 #include "gimple-expr.h"
59 #include "gimple-iterator.h"
60 #include "gimplify-me.h"
61 #include "gimple-walk.h"
62 #include "gimple-ssa.h"
63 #include "plugin-api.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "stringpool.h"
70 #include "tree-ssanames.h"
71 #include "tree-ssa-loop-manip.h"
72 #include "tree-ssa-loop-niter.h"
73 #include "tree-into-ssa.h"
77 #include "tree-dump.h"
78 #include "tree-pass.h"
79 #include "diagnostic-core.h"
82 #include "tree-ssa-propagate.h"
83 #include "value-prof.h"
84 #include "tree-inline.h"
86 #include "tree-ssa-live.h"
88 #include "tree-cfgcleanup.h"
90 #include "wide-int-print.h"
92 /* This file contains functions for building the Control Flow Graph (CFG)
93 for a function tree. */
95 /* Local declarations. */
97 /* Initial capacity for the basic block array. */
98 static const int initial_cfg_capacity
= 20;
100 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
101 which use a particular edge. The CASE_LABEL_EXPRs are chained together
102 via their CASE_CHAIN field, which we clear after we're done with the
103 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
105 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
106 update the case vector in response to edge redirections.
108 Right now this table is set up and torn down at key points in the
109 compilation process. It would be nice if we could make the table
110 more persistent. The key is getting notification of changes to
111 the CFG (particularly edge removal, creation and redirection). */
113 static hash_map
<edge
, tree
> *edge_to_cases
;
115 /* If we record edge_to_cases, this bitmap will hold indexes
116 of basic blocks that end in a GIMPLE_SWITCH which we touched
117 due to edge manipulations. */
119 static bitmap touched_switch_bbs
;
121 /* CFG statistics. */
124 long num_merged_labels
;
127 static struct cfg_stats_d cfg_stats
;
129 /* Hash table to store last discriminator assigned for each locus. */
130 struct locus_discrim_map
136 /* Hashtable helpers. */
138 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
140 typedef locus_discrim_map value_type
;
141 typedef locus_discrim_map compare_type
;
142 static inline hashval_t
hash (const value_type
*);
143 static inline bool equal (const value_type
*, const compare_type
*);
146 /* Trivial hash function for a location_t. ITEM is a pointer to
147 a hash table entry that maps a location_t to a discriminator. */
150 locus_discrim_hasher::hash (const value_type
*item
)
152 return LOCATION_LINE (item
->locus
);
155 /* Equality function for the locus-to-discriminator map. A and B
156 point to the two hash table entries to compare. */
159 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
161 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
164 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
166 /* Basic blocks and flowgraphs. */
167 static void make_blocks (gimple_seq
);
170 static void make_edges (void);
171 static void assign_discriminators (void);
172 static void make_cond_expr_edges (basic_block
);
173 static void make_gimple_switch_edges (gswitch
*, basic_block
);
174 static bool make_goto_expr_edges (basic_block
);
175 static void make_gimple_asm_edges (basic_block
);
176 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
177 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
179 /* Various helpers. */
180 static inline bool stmt_starts_bb_p (gimple
, gimple
);
181 static int gimple_verify_flow_info (void);
182 static void gimple_make_forwarder_block (edge
);
183 static gimple
first_non_label_stmt (basic_block
);
184 static bool verify_gimple_transaction (gtransaction
*);
185 static bool call_can_make_abnormal_goto (gimple
);
187 /* Flowgraph optimization and cleanup. */
188 static void gimple_merge_blocks (basic_block
, basic_block
);
189 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
190 static void remove_bb (basic_block
);
191 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
192 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
193 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
194 static tree
find_case_label_for_value (gswitch
*, tree
);
197 init_empty_tree_cfg_for_function (struct function
*fn
)
199 /* Initialize the basic block array. */
201 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
202 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
203 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
204 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
205 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
206 initial_cfg_capacity
);
208 /* Build a mapping of labels to their associated blocks. */
209 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
210 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
211 initial_cfg_capacity
);
213 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
214 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
216 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
217 = EXIT_BLOCK_PTR_FOR_FN (fn
);
218 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
219 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
223 init_empty_tree_cfg (void)
225 init_empty_tree_cfg_for_function (cfun
);
228 /*---------------------------------------------------------------------------
230 ---------------------------------------------------------------------------*/
232 /* Entry point to the CFG builder for trees. SEQ is the sequence of
233 statements to be added to the flowgraph. */
236 build_gimple_cfg (gimple_seq seq
)
238 /* Register specific gimple functions. */
239 gimple_register_cfg_hooks ();
241 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
243 init_empty_tree_cfg ();
247 /* Make sure there is always at least one block, even if it's empty. */
248 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
249 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
251 /* Adjust the size of the array. */
252 if (basic_block_info_for_fn (cfun
)->length ()
253 < (size_t) n_basic_blocks_for_fn (cfun
))
254 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
255 n_basic_blocks_for_fn (cfun
));
257 /* To speed up statement iterator walks, we first purge dead labels. */
258 cleanup_dead_labels ();
260 /* Group case nodes to reduce the number of edges.
261 We do this after cleaning up dead labels because otherwise we miss
262 a lot of obvious case merging opportunities. */
263 group_case_labels ();
265 /* Create the edges of the flowgraph. */
266 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
268 assign_discriminators ();
269 cleanup_dead_labels ();
270 delete discriminator_per_locus
;
271 discriminator_per_locus
= NULL
;
274 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
275 them and propagate the information to LOOP. We assume that the annotations
276 come immediately before the condition in BB, if any. */
279 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
281 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
282 gimple stmt
= gsi_stmt (gsi
);
284 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
287 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
289 stmt
= gsi_stmt (gsi
);
290 if (gimple_code (stmt
) != GIMPLE_CALL
)
292 if (!gimple_call_internal_p (stmt
)
293 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
296 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
298 case annot_expr_ivdep_kind
:
299 loop
->safelen
= INT_MAX
;
301 case annot_expr_no_vector_kind
:
302 loop
->dont_vectorize
= true;
304 case annot_expr_vector_kind
:
305 loop
->force_vectorize
= true;
306 cfun
->has_force_vectorize_loops
= true;
312 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
313 gimple_call_arg (stmt
, 0));
314 gsi_replace (&gsi
, stmt
, true);
318 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
319 them and propagate the information to the loop. We assume that the
320 annotations come immediately before the condition of the loop. */
323 replace_loop_annotate (void)
327 gimple_stmt_iterator gsi
;
330 FOR_EACH_LOOP (loop
, 0)
332 /* First look into the header. */
333 replace_loop_annotate_in_block (loop
->header
, loop
);
335 /* Then look into the latch, if any. */
337 replace_loop_annotate_in_block (loop
->latch
, loop
);
340 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
341 FOR_EACH_BB_FN (bb
, cfun
)
343 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
345 stmt
= gsi_stmt (gsi
);
346 if (gimple_code (stmt
) != GIMPLE_CALL
)
348 if (!gimple_call_internal_p (stmt
)
349 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
352 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
354 case annot_expr_ivdep_kind
:
355 case annot_expr_no_vector_kind
:
356 case annot_expr_vector_kind
:
362 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
363 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
364 gimple_call_arg (stmt
, 0));
365 gsi_replace (&gsi
, stmt
, true);
372 execute_build_cfg (void)
374 gimple_seq body
= gimple_body (current_function_decl
);
376 build_gimple_cfg (body
);
377 gimple_set_body (current_function_decl
, NULL
);
378 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
380 fprintf (dump_file
, "Scope blocks:\n");
381 dump_scope_blocks (dump_file
, dump_flags
);
384 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
385 replace_loop_annotate ();
391 const pass_data pass_data_build_cfg
=
393 GIMPLE_PASS
, /* type */
395 OPTGROUP_NONE
, /* optinfo_flags */
396 TV_TREE_CFG
, /* tv_id */
397 PROP_gimple_leh
, /* properties_required */
398 ( PROP_cfg
| PROP_loops
), /* properties_provided */
399 0, /* properties_destroyed */
400 0, /* todo_flags_start */
401 0, /* todo_flags_finish */
404 class pass_build_cfg
: public gimple_opt_pass
407 pass_build_cfg (gcc::context
*ctxt
)
408 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
411 /* opt_pass methods: */
412 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
414 }; // class pass_build_cfg
419 make_pass_build_cfg (gcc::context
*ctxt
)
421 return new pass_build_cfg (ctxt
);
425 /* Return true if T is a computed goto. */
428 computed_goto_p (gimple t
)
430 return (gimple_code (t
) == GIMPLE_GOTO
431 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
434 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
435 the other edge points to a bb with just __builtin_unreachable ().
436 I.e. return true for C->M edge in:
444 __builtin_unreachable ();
448 assert_unreachable_fallthru_edge_p (edge e
)
450 basic_block pred_bb
= e
->src
;
451 gimple last
= last_stmt (pred_bb
);
452 if (last
&& gimple_code (last
) == GIMPLE_COND
)
454 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
455 if (other_bb
== e
->dest
)
456 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
457 if (EDGE_COUNT (other_bb
->succs
) == 0)
459 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
464 stmt
= gsi_stmt (gsi
);
465 while (is_gimple_debug (stmt
) || gimple_clobber_p (stmt
))
470 stmt
= gsi_stmt (gsi
);
472 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
479 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
480 could alter control flow except via eh. We initialize the flag at
481 CFG build time and only ever clear it later. */
484 gimple_call_initialize_ctrl_altering (gimple stmt
)
486 int flags
= gimple_call_flags (stmt
);
488 /* A call alters control flow if it can make an abnormal goto. */
489 if (call_can_make_abnormal_goto (stmt
)
490 /* A call also alters control flow if it does not return. */
491 || flags
& ECF_NORETURN
492 /* TM ending statements have backedges out of the transaction.
493 Return true so we split the basic block containing them.
494 Note that the TM_BUILTIN test is merely an optimization. */
495 || ((flags
& ECF_TM_BUILTIN
)
496 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
497 /* BUILT_IN_RETURN call is same as return statement. */
498 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
499 gimple_call_set_ctrl_altering (stmt
, true);
501 gimple_call_set_ctrl_altering (stmt
, false);
505 /* Build a flowgraph for the sequence of stmts SEQ. */
508 make_blocks (gimple_seq seq
)
510 gimple_stmt_iterator i
= gsi_start (seq
);
512 bool start_new_block
= true;
513 bool first_stmt_of_seq
= true;
514 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
516 while (!gsi_end_p (i
))
523 if (stmt
&& is_gimple_call (stmt
))
524 gimple_call_initialize_ctrl_altering (stmt
);
526 /* If the statement starts a new basic block or if we have determined
527 in a previous pass that we need to create a new block for STMT, do
529 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
531 if (!first_stmt_of_seq
)
532 gsi_split_seq_before (&i
, &seq
);
533 bb
= create_basic_block (seq
, NULL
, bb
);
534 start_new_block
= false;
537 /* Now add STMT to BB and create the subgraphs for special statement
539 gimple_set_bb (stmt
, bb
);
541 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
543 if (stmt_ends_bb_p (stmt
))
545 /* If the stmt can make abnormal goto use a new temporary
546 for the assignment to the LHS. This makes sure the old value
547 of the LHS is available on the abnormal edge. Otherwise
548 we will end up with overlapping life-ranges for abnormal
550 if (gimple_has_lhs (stmt
)
551 && stmt_can_make_abnormal_goto (stmt
)
552 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
554 tree lhs
= gimple_get_lhs (stmt
);
555 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
556 gimple s
= gimple_build_assign (lhs
, tmp
);
557 gimple_set_location (s
, gimple_location (stmt
));
558 gimple_set_block (s
, gimple_block (stmt
));
559 gimple_set_lhs (stmt
, tmp
);
560 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
561 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
562 DECL_GIMPLE_REG_P (tmp
) = 1;
563 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
565 start_new_block
= true;
569 first_stmt_of_seq
= false;
574 /* Create and return a new empty basic block after bb AFTER. */
577 create_bb (void *h
, void *e
, basic_block after
)
583 /* Create and initialize a new basic block. Since alloc_block uses
584 GC allocation that clears memory to allocate a basic block, we do
585 not have to clear the newly allocated basic block here. */
588 bb
->index
= last_basic_block_for_fn (cfun
);
590 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
592 /* Add the new block to the linked list of blocks. */
593 link_block (bb
, after
);
595 /* Grow the basic block array if needed. */
596 if ((size_t) last_basic_block_for_fn (cfun
)
597 == basic_block_info_for_fn (cfun
)->length ())
600 (last_basic_block_for_fn (cfun
)
601 + (last_basic_block_for_fn (cfun
) + 3) / 4);
602 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
605 /* Add the newly created block to the array. */
606 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
608 n_basic_blocks_for_fn (cfun
)++;
609 last_basic_block_for_fn (cfun
)++;
615 /*---------------------------------------------------------------------------
617 ---------------------------------------------------------------------------*/
619 /* Fold COND_EXPR_COND of each COND_EXPR. */
622 fold_cond_expr_cond (void)
626 FOR_EACH_BB_FN (bb
, cfun
)
628 gimple stmt
= last_stmt (bb
);
630 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
632 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
633 location_t loc
= gimple_location (stmt
);
637 fold_defer_overflow_warnings ();
638 cond
= fold_binary_loc (loc
, gimple_cond_code (cond_stmt
),
640 gimple_cond_lhs (cond_stmt
),
641 gimple_cond_rhs (cond_stmt
));
644 zerop
= integer_zerop (cond
);
645 onep
= integer_onep (cond
);
648 zerop
= onep
= false;
650 fold_undefer_overflow_warnings (zerop
|| onep
,
652 WARN_STRICT_OVERFLOW_CONDITIONAL
);
654 gimple_cond_make_false (cond_stmt
);
656 gimple_cond_make_true (cond_stmt
);
661 /* If basic block BB has an abnormal edge to a basic block
662 containing IFN_ABNORMAL_DISPATCHER internal call, return
663 that the dispatcher's basic block, otherwise return NULL. */
666 get_abnormal_succ_dispatcher (basic_block bb
)
671 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
672 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
674 gimple_stmt_iterator gsi
675 = gsi_start_nondebug_after_labels_bb (e
->dest
);
676 gimple g
= gsi_stmt (gsi
);
678 && is_gimple_call (g
)
679 && gimple_call_internal_p (g
)
680 && gimple_call_internal_fn (g
) == IFN_ABNORMAL_DISPATCHER
)
686 /* Helper function for make_edges. Create a basic block with
687 with ABNORMAL_DISPATCHER internal call in it if needed, and
688 create abnormal edges from BBS to it and from it to FOR_BB
689 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
692 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
693 basic_block for_bb
, int *bb_to_omp_idx
,
694 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
696 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
697 unsigned int idx
= 0;
703 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
704 if (bb_to_omp_idx
[for_bb
->index
] != 0)
708 /* If the dispatcher has been created already, then there are basic
709 blocks with abnormal edges to it, so just make a new edge to
711 if (*dispatcher
== NULL
)
713 /* Check if there are any basic blocks that need to have
714 abnormal edges to this dispatcher. If there are none, return
716 if (bb_to_omp_idx
== NULL
)
718 if (bbs
->is_empty ())
723 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
724 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
730 /* Create the dispatcher bb. */
731 *dispatcher
= create_basic_block (NULL
, NULL
, for_bb
);
734 /* Factor computed gotos into a common computed goto site. Also
735 record the location of that site so that we can un-factor the
736 gotos after we have converted back to normal form. */
737 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
739 /* Create the destination of the factored goto. Each original
740 computed goto will put its desired destination into this
741 variable and jump to the label we create immediately below. */
742 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
744 /* Build a label for the new block which will contain the
745 factored computed goto. */
746 tree factored_label_decl
747 = create_artificial_label (UNKNOWN_LOCATION
);
748 gimple factored_computed_goto_label
749 = gimple_build_label (factored_label_decl
);
750 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
752 /* Build our new computed goto. */
753 gimple factored_computed_goto
= gimple_build_goto (var
);
754 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
756 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
759 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
762 gsi
= gsi_last_bb (bb
);
763 gimple last
= gsi_stmt (gsi
);
765 gcc_assert (computed_goto_p (last
));
767 /* Copy the original computed goto's destination into VAR. */
769 = gimple_build_assign (var
, gimple_goto_dest (last
));
770 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
772 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
773 e
->goto_locus
= gimple_location (last
);
774 gsi_remove (&gsi
, true);
779 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
780 gimple g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
782 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
783 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
785 /* Create predecessor edges of the dispatcher. */
786 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
789 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
791 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
796 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
799 /* Join all the blocks in the flowgraph. */
805 struct omp_region
*cur_region
= NULL
;
806 auto_vec
<basic_block
> ab_edge_goto
;
807 auto_vec
<basic_block
> ab_edge_call
;
808 int *bb_to_omp_idx
= NULL
;
809 int cur_omp_region_idx
= 0;
811 /* Create an edge from entry to the first block with executable
813 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
814 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
817 /* Traverse the basic block array placing edges. */
818 FOR_EACH_BB_FN (bb
, cfun
)
820 gimple last
= last_stmt (bb
);
824 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
828 enum gimple_code code
= gimple_code (last
);
832 if (make_goto_expr_edges (bb
))
833 ab_edge_goto
.safe_push (bb
);
838 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
839 e
->goto_locus
= gimple_location (last
);
844 make_cond_expr_edges (bb
);
848 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
852 make_eh_edges (last
);
855 case GIMPLE_EH_DISPATCH
:
856 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
860 /* If this function receives a nonlocal goto, then we need to
861 make edges from this call site to all the nonlocal goto
863 if (stmt_can_make_abnormal_goto (last
))
864 ab_edge_call
.safe_push (bb
);
866 /* If this statement has reachable exception handlers, then
867 create abnormal edges to them. */
868 make_eh_edges (last
);
870 /* BUILTIN_RETURN is really a return statement. */
871 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
873 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
876 /* Some calls are known not to return. */
878 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
882 /* A GIMPLE_ASSIGN may throw internally and thus be considered
884 if (is_ctrl_altering_stmt (last
))
885 make_eh_edges (last
);
890 make_gimple_asm_edges (bb
);
895 fallthru
= make_gimple_omp_edges (bb
, &cur_region
,
896 &cur_omp_region_idx
);
897 if (cur_region
&& bb_to_omp_idx
== NULL
)
898 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
901 case GIMPLE_TRANSACTION
:
904 = gimple_transaction_label (as_a
<gtransaction
*> (last
));
906 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
912 gcc_assert (!stmt_ends_bb_p (last
));
920 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
923 /* Computed gotos are hell to deal with, especially if there are
924 lots of them with a large number of destinations. So we factor
925 them to a common computed goto location before we build the
926 edge list. After we convert back to normal form, we will un-factor
927 the computed gotos since factoring introduces an unwanted jump.
928 For non-local gotos and abnormal edges from calls to calls that return
929 twice or forced labels, factor the abnormal edges too, by having all
930 abnormal edges from the calls go to a common artificial basic block
931 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
932 basic block to all forced labels and calls returning twice.
933 We do this per-OpenMP structured block, because those regions
934 are guaranteed to be single entry single exit by the standard,
935 so it is not allowed to enter or exit such regions abnormally this way,
936 thus all computed gotos, non-local gotos and setjmp/longjmp calls
937 must not transfer control across SESE region boundaries. */
938 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
940 gimple_stmt_iterator gsi
;
941 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
942 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
943 int count
= n_basic_blocks_for_fn (cfun
);
946 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
948 FOR_EACH_BB_FN (bb
, cfun
)
950 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
952 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
958 target
= gimple_label_label (label_stmt
);
960 /* Make an edge to every label block that has been marked as a
961 potential target for a computed goto or a non-local goto. */
962 if (FORCED_LABEL (target
))
963 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
964 &ab_edge_goto
, true);
965 if (DECL_NONLOCAL (target
))
967 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
968 &ab_edge_call
, false);
973 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
974 gsi_next_nondebug (&gsi
);
975 if (!gsi_end_p (gsi
))
977 /* Make an edge to every setjmp-like call. */
978 gimple call_stmt
= gsi_stmt (gsi
);
979 if (is_gimple_call (call_stmt
)
980 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
981 || gimple_call_builtin_p (call_stmt
,
982 BUILT_IN_SETJMP_RECEIVER
)))
983 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
984 &ab_edge_call
, false);
989 XDELETE (dispatcher_bbs
);
992 XDELETE (bb_to_omp_idx
);
996 /* Fold COND_EXPR_COND of each COND_EXPR. */
997 fold_cond_expr_cond ();
1000 /* Find the next available discriminator value for LOCUS. The
1001 discriminator distinguishes among several basic blocks that
1002 share a common locus, allowing for more accurate sample-based
1006 next_discriminator_for_locus (location_t locus
)
1008 struct locus_discrim_map item
;
1009 struct locus_discrim_map
**slot
;
1012 item
.discriminator
= 0;
1013 slot
= discriminator_per_locus
->find_slot_with_hash (
1014 &item
, LOCATION_LINE (locus
), INSERT
);
1016 if (*slot
== HTAB_EMPTY_ENTRY
)
1018 *slot
= XNEW (struct locus_discrim_map
);
1020 (*slot
)->locus
= locus
;
1021 (*slot
)->discriminator
= 0;
1023 (*slot
)->discriminator
++;
1024 return (*slot
)->discriminator
;
1027 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1030 same_line_p (location_t locus1
, location_t locus2
)
1032 expanded_location from
, to
;
1034 if (locus1
== locus2
)
1037 from
= expand_location (locus1
);
1038 to
= expand_location (locus2
);
1040 if (from
.line
!= to
.line
)
1042 if (from
.file
== to
.file
)
1044 return (from
.file
!= NULL
1046 && filename_cmp (from
.file
, to
.file
) == 0);
1049 /* Assign discriminators to each basic block. */
1052 assign_discriminators (void)
1056 FOR_EACH_BB_FN (bb
, cfun
)
1060 gimple last
= last_stmt (bb
);
1061 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1063 if (locus
== UNKNOWN_LOCATION
)
1066 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1068 gimple first
= first_non_label_stmt (e
->dest
);
1069 gimple last
= last_stmt (e
->dest
);
1070 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1071 || (last
&& same_line_p (locus
, gimple_location (last
))))
1073 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1074 bb
->discriminator
= next_discriminator_for_locus (locus
);
1076 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1082 /* Create the edges for a GIMPLE_COND starting at block BB. */
1085 make_cond_expr_edges (basic_block bb
)
1087 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1088 gimple then_stmt
, else_stmt
;
1089 basic_block then_bb
, else_bb
;
1090 tree then_label
, else_label
;
1094 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1096 /* Entry basic blocks for each component. */
1097 then_label
= gimple_cond_true_label (entry
);
1098 else_label
= gimple_cond_false_label (entry
);
1099 then_bb
= label_to_block (then_label
);
1100 else_bb
= label_to_block (else_label
);
1101 then_stmt
= first_stmt (then_bb
);
1102 else_stmt
= first_stmt (else_bb
);
1104 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1105 e
->goto_locus
= gimple_location (then_stmt
);
1106 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1108 e
->goto_locus
= gimple_location (else_stmt
);
1110 /* We do not need the labels anymore. */
1111 gimple_cond_set_true_label (entry
, NULL_TREE
);
1112 gimple_cond_set_false_label (entry
, NULL_TREE
);
1116 /* Called for each element in the hash table (P) as we delete the
1117 edge to cases hash table.
1119 Clear all the TREE_CHAINs to prevent problems with copying of
1120 SWITCH_EXPRs and structure sharing rules, then free the hash table
1124 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1128 for (t
= value
; t
; t
= next
)
1130 next
= CASE_CHAIN (t
);
1131 CASE_CHAIN (t
) = NULL
;
1137 /* Start recording information mapping edges to case labels. */
1140 start_recording_case_labels (void)
1142 gcc_assert (edge_to_cases
== NULL
);
1143 edge_to_cases
= new hash_map
<edge
, tree
>;
1144 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1147 /* Return nonzero if we are recording information for case labels. */
1150 recording_case_labels_p (void)
1152 return (edge_to_cases
!= NULL
);
1155 /* Stop recording information mapping edges to case labels and
1156 remove any information we have recorded. */
1158 end_recording_case_labels (void)
1162 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1163 delete edge_to_cases
;
1164 edge_to_cases
= NULL
;
1165 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1167 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1170 gimple stmt
= last_stmt (bb
);
1171 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1172 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1175 BITMAP_FREE (touched_switch_bbs
);
1178 /* If we are inside a {start,end}_recording_cases block, then return
1179 a chain of CASE_LABEL_EXPRs from T which reference E.
1181 Otherwise return NULL. */
1184 get_cases_for_edge (edge e
, gswitch
*t
)
1189 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1190 chains available. Return NULL so the caller can detect this case. */
1191 if (!recording_case_labels_p ())
1194 slot
= edge_to_cases
->get (e
);
1198 /* If we did not find E in the hash table, then this must be the first
1199 time we have been queried for information about E & T. Add all the
1200 elements from T to the hash table then perform the query again. */
1202 n
= gimple_switch_num_labels (t
);
1203 for (i
= 0; i
< n
; i
++)
1205 tree elt
= gimple_switch_label (t
, i
);
1206 tree lab
= CASE_LABEL (elt
);
1207 basic_block label_bb
= label_to_block (lab
);
1208 edge this_edge
= find_edge (e
->src
, label_bb
);
1210 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1212 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1213 CASE_CHAIN (elt
) = s
;
1217 return *edge_to_cases
->get (e
);
1220 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1223 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1227 n
= gimple_switch_num_labels (entry
);
1229 for (i
= 0; i
< n
; ++i
)
1231 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1232 basic_block label_bb
= label_to_block (lab
);
1233 make_edge (bb
, label_bb
, 0);
1238 /* Return the basic block holding label DEST. */
1241 label_to_block_fn (struct function
*ifun
, tree dest
)
1243 int uid
= LABEL_DECL_UID (dest
);
1245 /* We would die hard when faced by an undefined label. Emit a label to
1246 the very first basic block. This will hopefully make even the dataflow
1247 and undefined variable warnings quite right. */
1248 if (seen_error () && uid
< 0)
1250 gimple_stmt_iterator gsi
=
1251 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1254 stmt
= gimple_build_label (dest
);
1255 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1256 uid
= LABEL_DECL_UID (dest
);
1258 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1260 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1263 /* Create edges for a goto statement at block BB. Returns true
1264 if abnormal edges should be created. */
1267 make_goto_expr_edges (basic_block bb
)
1269 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1270 gimple goto_t
= gsi_stmt (last
);
1272 /* A simple GOTO creates normal edges. */
1273 if (simple_goto_p (goto_t
))
1275 tree dest
= gimple_goto_dest (goto_t
);
1276 basic_block label_bb
= label_to_block (dest
);
1277 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1278 e
->goto_locus
= gimple_location (goto_t
);
1279 gsi_remove (&last
, true);
1283 /* A computed GOTO creates abnormal edges. */
1287 /* Create edges for an asm statement with labels at block BB. */
1290 make_gimple_asm_edges (basic_block bb
)
1292 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1293 int i
, n
= gimple_asm_nlabels (stmt
);
1295 for (i
= 0; i
< n
; ++i
)
1297 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1298 basic_block label_bb
= label_to_block (label
);
1299 make_edge (bb
, label_bb
, 0);
1303 /*---------------------------------------------------------------------------
1305 ---------------------------------------------------------------------------*/
1307 /* Cleanup useless labels in basic blocks. This is something we wish
1308 to do early because it allows us to group case labels before creating
1309 the edges for the CFG, and it speeds up block statement iterators in
1310 all passes later on.
1311 We rerun this pass after CFG is created, to get rid of the labels that
1312 are no longer referenced. After then we do not run it any more, since
1313 (almost) no new labels should be created. */
1315 /* A map from basic block index to the leading label of that block. */
1316 static struct label_record
1321 /* True if the label is referenced from somewhere. */
1325 /* Given LABEL return the first label in the same basic block. */
1328 main_block_label (tree label
)
1330 basic_block bb
= label_to_block (label
);
1331 tree main_label
= label_for_bb
[bb
->index
].label
;
1333 /* label_to_block possibly inserted undefined label into the chain. */
1336 label_for_bb
[bb
->index
].label
= label
;
1340 label_for_bb
[bb
->index
].used
= true;
1344 /* Clean up redundant labels within the exception tree. */
1347 cleanup_dead_labels_eh (void)
1354 if (cfun
->eh
== NULL
)
1357 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1358 if (lp
&& lp
->post_landing_pad
)
1360 lab
= main_block_label (lp
->post_landing_pad
);
1361 if (lab
!= lp
->post_landing_pad
)
1363 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1364 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1368 FOR_ALL_EH_REGION (r
)
1372 case ERT_MUST_NOT_THROW
:
1378 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1382 c
->label
= main_block_label (lab
);
1387 case ERT_ALLOWED_EXCEPTIONS
:
1388 lab
= r
->u
.allowed
.label
;
1390 r
->u
.allowed
.label
= main_block_label (lab
);
1396 /* Cleanup redundant labels. This is a three-step process:
1397 1) Find the leading label for each block.
1398 2) Redirect all references to labels to the leading labels.
1399 3) Cleanup all useless labels. */
1402 cleanup_dead_labels (void)
1405 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1407 /* Find a suitable label for each block. We use the first user-defined
1408 label if there is one, or otherwise just the first label we see. */
1409 FOR_EACH_BB_FN (bb
, cfun
)
1411 gimple_stmt_iterator i
;
1413 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1416 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1421 label
= gimple_label_label (label_stmt
);
1423 /* If we have not yet seen a label for the current block,
1424 remember this one and see if there are more labels. */
1425 if (!label_for_bb
[bb
->index
].label
)
1427 label_for_bb
[bb
->index
].label
= label
;
1431 /* If we did see a label for the current block already, but it
1432 is an artificially created label, replace it if the current
1433 label is a user defined label. */
1434 if (!DECL_ARTIFICIAL (label
)
1435 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1437 label_for_bb
[bb
->index
].label
= label
;
1443 /* Now redirect all jumps/branches to the selected label.
1444 First do so for each block ending in a control statement. */
1445 FOR_EACH_BB_FN (bb
, cfun
)
1447 gimple stmt
= last_stmt (bb
);
1448 tree label
, new_label
;
1453 switch (gimple_code (stmt
))
1457 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1458 label
= gimple_cond_true_label (cond_stmt
);
1461 new_label
= main_block_label (label
);
1462 if (new_label
!= label
)
1463 gimple_cond_set_true_label (cond_stmt
, new_label
);
1466 label
= gimple_cond_false_label (cond_stmt
);
1469 new_label
= main_block_label (label
);
1470 if (new_label
!= label
)
1471 gimple_cond_set_false_label (cond_stmt
, new_label
);
1478 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1479 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1481 /* Replace all destination labels. */
1482 for (i
= 0; i
< n
; ++i
)
1484 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1485 label
= CASE_LABEL (case_label
);
1486 new_label
= main_block_label (label
);
1487 if (new_label
!= label
)
1488 CASE_LABEL (case_label
) = new_label
;
1495 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1496 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1498 for (i
= 0; i
< n
; ++i
)
1500 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1501 tree label
= main_block_label (TREE_VALUE (cons
));
1502 TREE_VALUE (cons
) = label
;
1507 /* We have to handle gotos until they're removed, and we don't
1508 remove them until after we've created the CFG edges. */
1510 if (!computed_goto_p (stmt
))
1512 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1513 label
= gimple_goto_dest (goto_stmt
);
1514 new_label
= main_block_label (label
);
1515 if (new_label
!= label
)
1516 gimple_goto_set_dest (goto_stmt
, new_label
);
1520 case GIMPLE_TRANSACTION
:
1522 gtransaction
*trans_stmt
= as_a
<gtransaction
*> (stmt
);
1523 tree label
= gimple_transaction_label (trans_stmt
);
1526 tree new_label
= main_block_label (label
);
1527 if (new_label
!= label
)
1528 gimple_transaction_set_label (trans_stmt
, new_label
);
1538 /* Do the same for the exception region tree labels. */
1539 cleanup_dead_labels_eh ();
1541 /* Finally, purge dead labels. All user-defined labels and labels that
1542 can be the target of non-local gotos and labels which have their
1543 address taken are preserved. */
1544 FOR_EACH_BB_FN (bb
, cfun
)
1546 gimple_stmt_iterator i
;
1547 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1549 if (!label_for_this_bb
)
1552 /* If the main label of the block is unused, we may still remove it. */
1553 if (!label_for_bb
[bb
->index
].used
)
1554 label_for_this_bb
= NULL
;
1556 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1559 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1564 label
= gimple_label_label (label_stmt
);
1566 if (label
== label_for_this_bb
1567 || !DECL_ARTIFICIAL (label
)
1568 || DECL_NONLOCAL (label
)
1569 || FORCED_LABEL (label
))
1572 gsi_remove (&i
, true);
1576 free (label_for_bb
);
1579 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1580 the ones jumping to the same label.
1581 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1584 group_case_labels_stmt (gswitch
*stmt
)
1586 int old_size
= gimple_switch_num_labels (stmt
);
1587 int i
, j
, new_size
= old_size
;
1588 basic_block default_bb
= NULL
;
1590 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1592 /* Look for possible opportunities to merge cases. */
1594 while (i
< old_size
)
1596 tree base_case
, base_high
;
1597 basic_block base_bb
;
1599 base_case
= gimple_switch_label (stmt
, i
);
1601 gcc_assert (base_case
);
1602 base_bb
= label_to_block (CASE_LABEL (base_case
));
1604 /* Discard cases that have the same destination as the
1606 if (base_bb
== default_bb
)
1608 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1614 base_high
= CASE_HIGH (base_case
)
1615 ? CASE_HIGH (base_case
)
1616 : CASE_LOW (base_case
);
1619 /* Try to merge case labels. Break out when we reach the end
1620 of the label vector or when we cannot merge the next case
1621 label with the current one. */
1622 while (i
< old_size
)
1624 tree merge_case
= gimple_switch_label (stmt
, i
);
1625 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1626 wide_int bhp1
= wi::add (base_high
, 1);
1628 /* Merge the cases if they jump to the same place,
1629 and their ranges are consecutive. */
1630 if (merge_bb
== base_bb
1631 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1633 base_high
= CASE_HIGH (merge_case
) ?
1634 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1635 CASE_HIGH (base_case
) = base_high
;
1636 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1645 /* Compress the case labels in the label vector, and adjust the
1646 length of the vector. */
1647 for (i
= 0, j
= 0; i
< new_size
; i
++)
1649 while (! gimple_switch_label (stmt
, j
))
1651 gimple_switch_set_label (stmt
, i
,
1652 gimple_switch_label (stmt
, j
++));
1655 gcc_assert (new_size
<= old_size
);
1656 gimple_switch_set_num_labels (stmt
, new_size
);
1659 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1660 and scan the sorted vector of cases. Combine the ones jumping to the
1664 group_case_labels (void)
1668 FOR_EACH_BB_FN (bb
, cfun
)
1670 gimple stmt
= last_stmt (bb
);
1671 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1672 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1676 /* Checks whether we can merge block B into block A. */
1679 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1683 if (!single_succ_p (a
))
1686 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1689 if (single_succ (a
) != b
)
1692 if (!single_pred_p (b
))
1695 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1698 /* If A ends by a statement causing exceptions or something similar, we
1699 cannot merge the blocks. */
1700 stmt
= last_stmt (a
);
1701 if (stmt
&& stmt_ends_bb_p (stmt
))
1704 /* Do not allow a block with only a non-local label to be merged. */
1706 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1707 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1710 /* Examine the labels at the beginning of B. */
1711 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1715 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1718 lab
= gimple_label_label (label_stmt
);
1720 /* Do not remove user forced labels or for -O0 any user labels. */
1721 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1725 /* Protect simple loop latches. We only want to avoid merging
1726 the latch with the loop header in this case. */
1728 && b
->loop_father
->latch
== b
1729 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1730 && b
->loop_father
->header
== a
)
1733 /* It must be possible to eliminate all phi nodes in B. If ssa form
1734 is not up-to-date and a name-mapping is registered, we cannot eliminate
1735 any phis. Symbols marked for renaming are never a problem though. */
1736 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1739 gphi
*phi
= gsi
.phi ();
1740 /* Technically only new names matter. */
1741 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1745 /* When not optimizing, don't merge if we'd lose goto_locus. */
1747 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1749 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1750 gimple_stmt_iterator prev
, next
;
1751 prev
= gsi_last_nondebug_bb (a
);
1752 next
= gsi_after_labels (b
);
1753 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1754 gsi_next_nondebug (&next
);
1755 if ((gsi_end_p (prev
)
1756 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1757 && (gsi_end_p (next
)
1758 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1765 /* Replaces all uses of NAME by VAL. */
1768 replace_uses_by (tree name
, tree val
)
1770 imm_use_iterator imm_iter
;
1775 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1777 /* Mark the block if we change the last stmt in it. */
1778 if (cfgcleanup_altered_bbs
1779 && stmt_ends_bb_p (stmt
))
1780 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1782 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1784 replace_exp (use
, val
);
1786 if (gimple_code (stmt
) == GIMPLE_PHI
)
1788 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1789 PHI_ARG_INDEX_FROM_USE (use
));
1790 if (e
->flags
& EDGE_ABNORMAL
1791 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
))
1793 /* This can only occur for virtual operands, since
1794 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1795 would prevent replacement. */
1796 gcc_checking_assert (virtual_operand_p (name
));
1797 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1802 if (gimple_code (stmt
) != GIMPLE_PHI
)
1804 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1805 gimple orig_stmt
= stmt
;
1808 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1809 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1810 only change sth from non-invariant to invariant, and only
1811 when propagating constants. */
1812 if (is_gimple_min_invariant (val
))
1813 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1815 tree op
= gimple_op (stmt
, i
);
1816 /* Operands may be empty here. For example, the labels
1817 of a GIMPLE_COND are nulled out following the creation
1818 of the corresponding CFG edges. */
1819 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1820 recompute_tree_invariant_for_addr_expr (op
);
1823 if (fold_stmt (&gsi
))
1824 stmt
= gsi_stmt (gsi
);
1826 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1827 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1833 gcc_checking_assert (has_zero_uses (name
));
1835 /* Also update the trees stored in loop structures. */
1840 FOR_EACH_LOOP (loop
, 0)
1842 substitute_in_loop_info (loop
, name
, val
);
1847 /* Merge block B into block A. */
1850 gimple_merge_blocks (basic_block a
, basic_block b
)
1852 gimple_stmt_iterator last
, gsi
;
1856 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1858 /* Remove all single-valued PHI nodes from block B of the form
1859 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1860 gsi
= gsi_last_bb (a
);
1861 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1863 gimple phi
= gsi_stmt (psi
);
1864 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1866 bool may_replace_uses
= (virtual_operand_p (def
)
1867 || may_propagate_copy (def
, use
));
1869 /* In case we maintain loop closed ssa form, do not propagate arguments
1870 of loop exit phi nodes. */
1872 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1873 && !virtual_operand_p (def
)
1874 && TREE_CODE (use
) == SSA_NAME
1875 && a
->loop_father
!= b
->loop_father
)
1876 may_replace_uses
= false;
1878 if (!may_replace_uses
)
1880 gcc_assert (!virtual_operand_p (def
));
1882 /* Note that just emitting the copies is fine -- there is no problem
1883 with ordering of phi nodes. This is because A is the single
1884 predecessor of B, therefore results of the phi nodes cannot
1885 appear as arguments of the phi nodes. */
1886 copy
= gimple_build_assign (def
, use
);
1887 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1888 remove_phi_node (&psi
, false);
1892 /* If we deal with a PHI for virtual operands, we can simply
1893 propagate these without fussing with folding or updating
1895 if (virtual_operand_p (def
))
1897 imm_use_iterator iter
;
1898 use_operand_p use_p
;
1901 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1902 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1903 SET_USE (use_p
, use
);
1905 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1906 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1909 replace_uses_by (def
, use
);
1911 remove_phi_node (&psi
, true);
1915 /* Ensure that B follows A. */
1916 move_block_after (b
, a
);
1918 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1919 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1921 /* Remove labels from B and set gimple_bb to A for other statements. */
1922 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1924 gimple stmt
= gsi_stmt (gsi
);
1925 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1927 tree label
= gimple_label_label (label_stmt
);
1930 gsi_remove (&gsi
, false);
1932 /* Now that we can thread computed gotos, we might have
1933 a situation where we have a forced label in block B
1934 However, the label at the start of block B might still be
1935 used in other ways (think about the runtime checking for
1936 Fortran assigned gotos). So we can not just delete the
1937 label. Instead we move the label to the start of block A. */
1938 if (FORCED_LABEL (label
))
1940 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1941 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1943 /* Other user labels keep around in a form of a debug stmt. */
1944 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1946 gimple dbg
= gimple_build_debug_bind (label
,
1949 gimple_debug_bind_reset_value (dbg
);
1950 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1953 lp_nr
= EH_LANDING_PAD_NR (label
);
1956 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1957 lp
->post_landing_pad
= NULL
;
1962 gimple_set_bb (stmt
, a
);
1967 /* When merging two BBs, if their counts are different, the larger count
1968 is selected as the new bb count. This is to handle inconsistent
1970 if (a
->loop_father
== b
->loop_father
)
1972 a
->count
= MAX (a
->count
, b
->count
);
1973 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
1976 /* Merge the sequences. */
1977 last
= gsi_last_bb (a
);
1978 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1979 set_bb_seq (b
, NULL
);
1981 if (cfgcleanup_altered_bbs
)
1982 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1986 /* Return the one of two successors of BB that is not reachable by a
1987 complex edge, if there is one. Else, return BB. We use
1988 this in optimizations that use post-dominators for their heuristics,
1989 to catch the cases in C++ where function calls are involved. */
1992 single_noncomplex_succ (basic_block bb
)
1995 if (EDGE_COUNT (bb
->succs
) != 2)
1998 e0
= EDGE_SUCC (bb
, 0);
1999 e1
= EDGE_SUCC (bb
, 1);
2000 if (e0
->flags
& EDGE_COMPLEX
)
2002 if (e1
->flags
& EDGE_COMPLEX
)
2008 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2011 notice_special_calls (gcall
*call
)
2013 int flags
= gimple_call_flags (call
);
2015 if (flags
& ECF_MAY_BE_ALLOCA
)
2016 cfun
->calls_alloca
= true;
2017 if (flags
& ECF_RETURNS_TWICE
)
2018 cfun
->calls_setjmp
= true;
2022 /* Clear flags set by notice_special_calls. Used by dead code removal
2023 to update the flags. */
2026 clear_special_calls (void)
2028 cfun
->calls_alloca
= false;
2029 cfun
->calls_setjmp
= false;
2032 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2035 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2037 /* Since this block is no longer reachable, we can just delete all
2038 of its PHI nodes. */
2039 remove_phi_nodes (bb
);
2041 /* Remove edges to BB's successors. */
2042 while (EDGE_COUNT (bb
->succs
) > 0)
2043 remove_edge (EDGE_SUCC (bb
, 0));
2047 /* Remove statements of basic block BB. */
2050 remove_bb (basic_block bb
)
2052 gimple_stmt_iterator i
;
2056 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2057 if (dump_flags
& TDF_DETAILS
)
2059 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2060 fprintf (dump_file
, "\n");
2066 struct loop
*loop
= bb
->loop_father
;
2068 /* If a loop gets removed, clean up the information associated
2070 if (loop
->latch
== bb
2071 || loop
->header
== bb
)
2072 free_numbers_of_iterations_estimates_loop (loop
);
2075 /* Remove all the instructions in the block. */
2076 if (bb_seq (bb
) != NULL
)
2078 /* Walk backwards so as to get a chance to substitute all
2079 released DEFs into debug stmts. See
2080 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2082 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2084 gimple stmt
= gsi_stmt (i
);
2085 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2087 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2088 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2091 gimple_stmt_iterator new_gsi
;
2093 /* A non-reachable non-local label may still be referenced.
2094 But it no longer needs to carry the extra semantics of
2096 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2098 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2099 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2102 new_bb
= bb
->prev_bb
;
2103 new_gsi
= gsi_start_bb (new_bb
);
2104 gsi_remove (&i
, false);
2105 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2109 /* Release SSA definitions if we are in SSA. Note that we
2110 may be called when not in SSA. For example,
2111 final_cleanup calls this function via
2112 cleanup_tree_cfg. */
2113 if (gimple_in_ssa_p (cfun
))
2114 release_defs (stmt
);
2116 gsi_remove (&i
, true);
2120 i
= gsi_last_bb (bb
);
2126 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2127 bb
->il
.gimple
.seq
= NULL
;
2128 bb
->il
.gimple
.phi_nodes
= NULL
;
2132 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2133 predicate VAL, return the edge that will be taken out of the block.
2134 If VAL does not match a unique edge, NULL is returned. */
2137 find_taken_edge (basic_block bb
, tree val
)
2141 stmt
= last_stmt (bb
);
2144 gcc_assert (is_ctrl_stmt (stmt
));
2149 if (!is_gimple_min_invariant (val
))
2152 if (gimple_code (stmt
) == GIMPLE_COND
)
2153 return find_taken_edge_cond_expr (bb
, val
);
2155 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2156 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2158 if (computed_goto_p (stmt
))
2160 /* Only optimize if the argument is a label, if the argument is
2161 not a label then we can not construct a proper CFG.
2163 It may be the case that we only need to allow the LABEL_REF to
2164 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2165 appear inside a LABEL_EXPR just to be safe. */
2166 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2167 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2168 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2175 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2176 statement, determine which of the outgoing edges will be taken out of the
2177 block. Return NULL if either edge may be taken. */
2180 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2185 dest
= label_to_block (val
);
2188 e
= find_edge (bb
, dest
);
2189 gcc_assert (e
!= NULL
);
2195 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2196 statement, determine which of the two edges will be taken out of the
2197 block. Return NULL if either edge may be taken. */
2200 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2202 edge true_edge
, false_edge
;
2204 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2206 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2207 return (integer_zerop (val
) ? false_edge
: true_edge
);
2210 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2211 statement, determine which edge will be taken out of the block. Return
2212 NULL if any edge may be taken. */
2215 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2218 basic_block dest_bb
;
2222 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2223 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2225 e
= find_edge (bb
, dest_bb
);
2231 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2232 We can make optimal use here of the fact that the case labels are
2233 sorted: We can do a binary search for a case matching VAL. */
2236 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2238 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2239 tree default_case
= gimple_switch_default_label (switch_stmt
);
2241 for (low
= 0, high
= n
; high
- low
> 1; )
2243 size_t i
= (high
+ low
) / 2;
2244 tree t
= gimple_switch_label (switch_stmt
, i
);
2247 /* Cache the result of comparing CASE_LOW and val. */
2248 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2255 if (CASE_HIGH (t
) == NULL
)
2257 /* A singe-valued case label. */
2263 /* A case range. We can only handle integer ranges. */
2264 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2269 return default_case
;
2273 /* Dump a basic block on stderr. */
2276 gimple_debug_bb (basic_block bb
)
2278 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2282 /* Dump basic block with index N on stderr. */
2285 gimple_debug_bb_n (int n
)
2287 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2288 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2292 /* Dump the CFG on stderr.
2294 FLAGS are the same used by the tree dumping functions
2295 (see TDF_* in dumpfile.h). */
2298 gimple_debug_cfg (int flags
)
2300 gimple_dump_cfg (stderr
, flags
);
2304 /* Dump the program showing basic block boundaries on the given FILE.
2306 FLAGS are the same used by the tree dumping functions (see TDF_* in
2310 gimple_dump_cfg (FILE *file
, int flags
)
2312 if (flags
& TDF_DETAILS
)
2314 dump_function_header (file
, current_function_decl
, flags
);
2315 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2316 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2317 last_basic_block_for_fn (cfun
));
2319 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2320 fprintf (file
, "\n");
2323 if (flags
& TDF_STATS
)
2324 dump_cfg_stats (file
);
2326 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2330 /* Dump CFG statistics on FILE. */
2333 dump_cfg_stats (FILE *file
)
2335 static long max_num_merged_labels
= 0;
2336 unsigned long size
, total
= 0;
2339 const char * const fmt_str
= "%-30s%-13s%12s\n";
2340 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2341 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2342 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2343 const char *funcname
= current_function_name ();
2345 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2347 fprintf (file
, "---------------------------------------------------------\n");
2348 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2349 fprintf (file
, fmt_str
, "", " instances ", "used ");
2350 fprintf (file
, "---------------------------------------------------------\n");
2352 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2354 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2355 SCALE (size
), LABEL (size
));
2358 FOR_EACH_BB_FN (bb
, cfun
)
2359 num_edges
+= EDGE_COUNT (bb
->succs
);
2360 size
= num_edges
* sizeof (struct edge_def
);
2362 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2364 fprintf (file
, "---------------------------------------------------------\n");
2365 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2367 fprintf (file
, "---------------------------------------------------------\n");
2368 fprintf (file
, "\n");
2370 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2371 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2373 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2374 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2376 fprintf (file
, "\n");
2380 /* Dump CFG statistics on stderr. Keep extern so that it's always
2381 linked in the final executable. */
2384 debug_cfg_stats (void)
2386 dump_cfg_stats (stderr
);
2389 /*---------------------------------------------------------------------------
2390 Miscellaneous helpers
2391 ---------------------------------------------------------------------------*/
2393 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2394 flow. Transfers of control flow associated with EH are excluded. */
2397 call_can_make_abnormal_goto (gimple t
)
2399 /* If the function has no non-local labels, then a call cannot make an
2400 abnormal transfer of control. */
2401 if (!cfun
->has_nonlocal_label
2402 && !cfun
->calls_setjmp
)
2405 /* Likewise if the call has no side effects. */
2406 if (!gimple_has_side_effects (t
))
2409 /* Likewise if the called function is leaf. */
2410 if (gimple_call_flags (t
) & ECF_LEAF
)
2417 /* Return true if T can make an abnormal transfer of control flow.
2418 Transfers of control flow associated with EH are excluded. */
2421 stmt_can_make_abnormal_goto (gimple t
)
2423 if (computed_goto_p (t
))
2425 if (is_gimple_call (t
))
2426 return call_can_make_abnormal_goto (t
);
2431 /* Return true if T represents a stmt that always transfers control. */
2434 is_ctrl_stmt (gimple t
)
2436 switch (gimple_code (t
))
2450 /* Return true if T is a statement that may alter the flow of control
2451 (e.g., a call to a non-returning function). */
2454 is_ctrl_altering_stmt (gimple t
)
2458 switch (gimple_code (t
))
2461 /* Per stmt call flag indicates whether the call could alter
2463 if (gimple_call_ctrl_altering_p (t
))
2467 case GIMPLE_EH_DISPATCH
:
2468 /* EH_DISPATCH branches to the individual catch handlers at
2469 this level of a try or allowed-exceptions region. It can
2470 fallthru to the next statement as well. */
2474 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2479 /* OpenMP directives alter control flow. */
2482 case GIMPLE_TRANSACTION
:
2483 /* A transaction start alters control flow. */
2490 /* If a statement can throw, it alters control flow. */
2491 return stmt_can_throw_internal (t
);
2495 /* Return true if T is a simple local goto. */
2498 simple_goto_p (gimple t
)
2500 return (gimple_code (t
) == GIMPLE_GOTO
2501 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2505 /* Return true if STMT should start a new basic block. PREV_STMT is
2506 the statement preceding STMT. It is used when STMT is a label or a
2507 case label. Labels should only start a new basic block if their
2508 previous statement wasn't a label. Otherwise, sequence of labels
2509 would generate unnecessary basic blocks that only contain a single
2513 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2518 /* Labels start a new basic block only if the preceding statement
2519 wasn't a label of the same type. This prevents the creation of
2520 consecutive blocks that have nothing but a single label. */
2521 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2523 /* Nonlocal and computed GOTO targets always start a new block. */
2524 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2525 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2528 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2530 if (DECL_NONLOCAL (gimple_label_label (
2531 as_a
<glabel
*> (prev_stmt
))))
2534 cfg_stats
.num_merged_labels
++;
2540 else if (gimple_code (stmt
) == GIMPLE_CALL
2541 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2542 /* setjmp acts similar to a nonlocal GOTO target and thus should
2543 start a new block. */
2550 /* Return true if T should end a basic block. */
2553 stmt_ends_bb_p (gimple t
)
2555 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2558 /* Remove block annotations and other data structures. */
2561 delete_tree_cfg_annotations (void)
2563 vec_free (label_to_block_map_for_fn (cfun
));
2567 /* Return the first statement in basic block BB. */
2570 first_stmt (basic_block bb
)
2572 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2575 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2583 /* Return the first non-label statement in basic block BB. */
2586 first_non_label_stmt (basic_block bb
)
2588 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2589 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2591 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2594 /* Return the last statement in basic block BB. */
2597 last_stmt (basic_block bb
)
2599 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2602 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2610 /* Return the last statement of an otherwise empty block. Return NULL
2611 if the block is totally empty, or if it contains more than one
2615 last_and_only_stmt (basic_block bb
)
2617 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2623 last
= gsi_stmt (i
);
2624 gsi_prev_nondebug (&i
);
2628 /* Empty statements should no longer appear in the instruction stream.
2629 Everything that might have appeared before should be deleted by
2630 remove_useless_stmts, and the optimizers should just gsi_remove
2631 instead of smashing with build_empty_stmt.
2633 Thus the only thing that should appear here in a block containing
2634 one executable statement is a label. */
2635 prev
= gsi_stmt (i
);
2636 if (gimple_code (prev
) == GIMPLE_LABEL
)
2642 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2645 reinstall_phi_args (edge new_edge
, edge old_edge
)
2651 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2655 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2656 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2657 i
++, gsi_next (&phis
))
2659 gphi
*phi
= phis
.phi ();
2660 tree result
= redirect_edge_var_map_result (vm
);
2661 tree arg
= redirect_edge_var_map_def (vm
);
2663 gcc_assert (result
== gimple_phi_result (phi
));
2665 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2668 redirect_edge_var_map_clear (old_edge
);
2671 /* Returns the basic block after which the new basic block created
2672 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2673 near its "logical" location. This is of most help to humans looking
2674 at debugging dumps. */
2677 split_edge_bb_loc (edge edge_in
)
2679 basic_block dest
= edge_in
->dest
;
2680 basic_block dest_prev
= dest
->prev_bb
;
2684 edge e
= find_edge (dest_prev
, dest
);
2685 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2686 return edge_in
->src
;
2691 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2692 Abort on abnormal edges. */
2695 gimple_split_edge (edge edge_in
)
2697 basic_block new_bb
, after_bb
, dest
;
2700 /* Abnormal edges cannot be split. */
2701 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2703 dest
= edge_in
->dest
;
2705 after_bb
= split_edge_bb_loc (edge_in
);
2707 new_bb
= create_empty_bb (after_bb
);
2708 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2709 new_bb
->count
= edge_in
->count
;
2710 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2711 new_edge
->probability
= REG_BR_PROB_BASE
;
2712 new_edge
->count
= edge_in
->count
;
2714 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2715 gcc_assert (e
== edge_in
);
2716 reinstall_phi_args (new_edge
, e
);
2722 /* Verify properties of the address expression T with base object BASE. */
2725 verify_address (tree t
, tree base
)
2728 bool old_side_effects
;
2730 bool new_side_effects
;
2732 old_constant
= TREE_CONSTANT (t
);
2733 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2735 recompute_tree_invariant_for_addr_expr (t
);
2736 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2737 new_constant
= TREE_CONSTANT (t
);
2739 if (old_constant
!= new_constant
)
2741 error ("constant not recomputed when ADDR_EXPR changed");
2744 if (old_side_effects
!= new_side_effects
)
2746 error ("side effects not recomputed when ADDR_EXPR changed");
2750 if (!(TREE_CODE (base
) == VAR_DECL
2751 || TREE_CODE (base
) == PARM_DECL
2752 || TREE_CODE (base
) == RESULT_DECL
))
2755 if (DECL_GIMPLE_REG_P (base
))
2757 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2764 /* Callback for walk_tree, check that all elements with address taken are
2765 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2766 inside a PHI node. */
2769 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2776 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2777 #define CHECK_OP(N, MSG) \
2778 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2779 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2781 switch (TREE_CODE (t
))
2784 if (SSA_NAME_IN_FREE_LIST (t
))
2786 error ("SSA name in freelist but still referenced");
2792 error ("INDIRECT_REF in gimple IL");
2796 x
= TREE_OPERAND (t
, 0);
2797 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2798 || !is_gimple_mem_ref_addr (x
))
2800 error ("invalid first operand of MEM_REF");
2803 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2804 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2806 error ("invalid offset operand of MEM_REF");
2807 return TREE_OPERAND (t
, 1);
2809 if (TREE_CODE (x
) == ADDR_EXPR
2810 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2816 x
= fold (ASSERT_EXPR_COND (t
));
2817 if (x
== boolean_false_node
)
2819 error ("ASSERT_EXPR with an always-false condition");
2825 error ("MODIFY_EXPR not expected while having tuples");
2832 gcc_assert (is_gimple_address (t
));
2834 /* Skip any references (they will be checked when we recurse down the
2835 tree) and ensure that any variable used as a prefix is marked
2837 for (x
= TREE_OPERAND (t
, 0);
2838 handled_component_p (x
);
2839 x
= TREE_OPERAND (x
, 0))
2842 if ((tem
= verify_address (t
, x
)))
2845 if (!(TREE_CODE (x
) == VAR_DECL
2846 || TREE_CODE (x
) == PARM_DECL
2847 || TREE_CODE (x
) == RESULT_DECL
))
2850 if (!TREE_ADDRESSABLE (x
))
2852 error ("address taken, but ADDRESSABLE bit not set");
2860 x
= COND_EXPR_COND (t
);
2861 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2863 error ("non-integral used in condition");
2866 if (!is_gimple_condexpr (x
))
2868 error ("invalid conditional operand");
2873 case NON_LVALUE_EXPR
:
2874 case TRUTH_NOT_EXPR
:
2878 case FIX_TRUNC_EXPR
:
2883 CHECK_OP (0, "invalid operand to unary operator");
2889 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2891 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2895 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2897 tree t0
= TREE_OPERAND (t
, 0);
2898 tree t1
= TREE_OPERAND (t
, 1);
2899 tree t2
= TREE_OPERAND (t
, 2);
2900 if (!tree_fits_uhwi_p (t1
)
2901 || !tree_fits_uhwi_p (t2
))
2903 error ("invalid position or size operand to BIT_FIELD_REF");
2906 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2907 && (TYPE_PRECISION (TREE_TYPE (t
))
2908 != tree_to_uhwi (t1
)))
2910 error ("integral result type precision does not match "
2911 "field size of BIT_FIELD_REF");
2914 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2915 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2916 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2917 != tree_to_uhwi (t1
)))
2919 error ("mode precision of non-integral result does not "
2920 "match field size of BIT_FIELD_REF");
2923 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2924 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2925 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2927 error ("position plus size exceeds size of referenced object in "
2932 t
= TREE_OPERAND (t
, 0);
2937 case ARRAY_RANGE_REF
:
2938 case VIEW_CONVERT_EXPR
:
2939 /* We have a nest of references. Verify that each of the operands
2940 that determine where to reference is either a constant or a variable,
2941 verify that the base is valid, and then show we've already checked
2943 while (handled_component_p (t
))
2945 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2946 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2947 else if (TREE_CODE (t
) == ARRAY_REF
2948 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2950 CHECK_OP (1, "invalid array index");
2951 if (TREE_OPERAND (t
, 2))
2952 CHECK_OP (2, "invalid array lower bound");
2953 if (TREE_OPERAND (t
, 3))
2954 CHECK_OP (3, "invalid array stride");
2956 else if (TREE_CODE (t
) == BIT_FIELD_REF
2957 || TREE_CODE (t
) == REALPART_EXPR
2958 || TREE_CODE (t
) == IMAGPART_EXPR
)
2960 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2965 t
= TREE_OPERAND (t
, 0);
2968 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2970 error ("invalid reference prefix");
2977 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2978 POINTER_PLUS_EXPR. */
2979 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2981 error ("invalid operand to plus/minus, type is a pointer");
2984 CHECK_OP (0, "invalid operand to binary operator");
2985 CHECK_OP (1, "invalid operand to binary operator");
2988 case POINTER_PLUS_EXPR
:
2989 /* Check to make sure the first operand is a pointer or reference type. */
2990 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2992 error ("invalid operand to pointer plus, first operand is not a pointer");
2995 /* Check to make sure the second operand is a ptrofftype. */
2996 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2998 error ("invalid operand to pointer plus, second operand is not an "
2999 "integer type of appropriate width");
3009 case UNORDERED_EXPR
:
3018 case TRUNC_DIV_EXPR
:
3020 case FLOOR_DIV_EXPR
:
3021 case ROUND_DIV_EXPR
:
3022 case TRUNC_MOD_EXPR
:
3024 case FLOOR_MOD_EXPR
:
3025 case ROUND_MOD_EXPR
:
3027 case EXACT_DIV_EXPR
:
3037 CHECK_OP (0, "invalid operand to binary operator");
3038 CHECK_OP (1, "invalid operand to binary operator");
3042 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3046 case CASE_LABEL_EXPR
:
3049 error ("invalid CASE_CHAIN");
3063 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3064 Returns true if there is an error, otherwise false. */
3067 verify_types_in_gimple_min_lval (tree expr
)
3071 if (is_gimple_id (expr
))
3074 if (TREE_CODE (expr
) != TARGET_MEM_REF
3075 && TREE_CODE (expr
) != MEM_REF
)
3077 error ("invalid expression for min lvalue");
3081 /* TARGET_MEM_REFs are strange beasts. */
3082 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3085 op
= TREE_OPERAND (expr
, 0);
3086 if (!is_gimple_val (op
))
3088 error ("invalid operand in indirect reference");
3089 debug_generic_stmt (op
);
3092 /* Memory references now generally can involve a value conversion. */
3097 /* Verify if EXPR is a valid GIMPLE reference expression. If
3098 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3099 if there is an error, otherwise false. */
3102 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3104 while (handled_component_p (expr
))
3106 tree op
= TREE_OPERAND (expr
, 0);
3108 if (TREE_CODE (expr
) == ARRAY_REF
3109 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3111 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3112 || (TREE_OPERAND (expr
, 2)
3113 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3114 || (TREE_OPERAND (expr
, 3)
3115 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3117 error ("invalid operands to array reference");
3118 debug_generic_stmt (expr
);
3123 /* Verify if the reference array element types are compatible. */
3124 if (TREE_CODE (expr
) == ARRAY_REF
3125 && !useless_type_conversion_p (TREE_TYPE (expr
),
3126 TREE_TYPE (TREE_TYPE (op
))))
3128 error ("type mismatch in array reference");
3129 debug_generic_stmt (TREE_TYPE (expr
));
3130 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3133 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3134 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3135 TREE_TYPE (TREE_TYPE (op
))))
3137 error ("type mismatch in array range reference");
3138 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3139 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3143 if ((TREE_CODE (expr
) == REALPART_EXPR
3144 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3145 && !useless_type_conversion_p (TREE_TYPE (expr
),
3146 TREE_TYPE (TREE_TYPE (op
))))
3148 error ("type mismatch in real/imagpart reference");
3149 debug_generic_stmt (TREE_TYPE (expr
));
3150 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3154 if (TREE_CODE (expr
) == COMPONENT_REF
3155 && !useless_type_conversion_p (TREE_TYPE (expr
),
3156 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3158 error ("type mismatch in component reference");
3159 debug_generic_stmt (TREE_TYPE (expr
));
3160 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3164 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3166 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3167 that their operand is not an SSA name or an invariant when
3168 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3169 bug). Otherwise there is nothing to verify, gross mismatches at
3170 most invoke undefined behavior. */
3172 && (TREE_CODE (op
) == SSA_NAME
3173 || is_gimple_min_invariant (op
)))
3175 error ("conversion of an SSA_NAME on the left hand side");
3176 debug_generic_stmt (expr
);
3179 else if (TREE_CODE (op
) == SSA_NAME
3180 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3182 error ("conversion of register to a different size");
3183 debug_generic_stmt (expr
);
3186 else if (!handled_component_p (op
))
3193 if (TREE_CODE (expr
) == MEM_REF
)
3195 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3197 error ("invalid address operand in MEM_REF");
3198 debug_generic_stmt (expr
);
3201 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3202 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3204 error ("invalid offset operand in MEM_REF");
3205 debug_generic_stmt (expr
);
3209 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3211 if (!TMR_BASE (expr
)
3212 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3214 error ("invalid address operand in TARGET_MEM_REF");
3217 if (!TMR_OFFSET (expr
)
3218 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3219 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3221 error ("invalid offset operand in TARGET_MEM_REF");
3222 debug_generic_stmt (expr
);
3227 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3228 && verify_types_in_gimple_min_lval (expr
));
3231 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3232 list of pointer-to types that is trivially convertible to DEST. */
3235 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3239 if (!TYPE_POINTER_TO (src_obj
))
3242 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3243 if (useless_type_conversion_p (dest
, src
))
3249 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3250 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3253 valid_fixed_convert_types_p (tree type1
, tree type2
)
3255 return (FIXED_POINT_TYPE_P (type1
)
3256 && (INTEGRAL_TYPE_P (type2
)
3257 || SCALAR_FLOAT_TYPE_P (type2
)
3258 || FIXED_POINT_TYPE_P (type2
)));
3261 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3262 is a problem, otherwise false. */
3265 verify_gimple_call (gcall
*stmt
)
3267 tree fn
= gimple_call_fn (stmt
);
3268 tree fntype
, fndecl
;
3271 if (gimple_call_internal_p (stmt
))
3275 error ("gimple call has two targets");
3276 debug_generic_stmt (fn
);
3284 error ("gimple call has no target");
3289 if (fn
&& !is_gimple_call_addr (fn
))
3291 error ("invalid function in gimple call");
3292 debug_generic_stmt (fn
);
3297 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3298 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3299 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3301 error ("non-function in gimple call");
3305 fndecl
= gimple_call_fndecl (stmt
);
3307 && TREE_CODE (fndecl
) == FUNCTION_DECL
3308 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3309 && !DECL_PURE_P (fndecl
)
3310 && !TREE_READONLY (fndecl
))
3312 error ("invalid pure const state for function");
3316 if (gimple_call_lhs (stmt
)
3317 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3318 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3320 error ("invalid LHS in gimple call");
3324 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3326 error ("LHS in noreturn call");
3330 fntype
= gimple_call_fntype (stmt
);
3332 && gimple_call_lhs (stmt
)
3333 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3335 /* ??? At least C++ misses conversions at assignments from
3336 void * call results.
3337 ??? Java is completely off. Especially with functions
3338 returning java.lang.Object.
3339 For now simply allow arbitrary pointer type conversions. */
3340 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3341 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3343 error ("invalid conversion in gimple call");
3344 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3345 debug_generic_stmt (TREE_TYPE (fntype
));
3349 if (gimple_call_chain (stmt
)
3350 && !is_gimple_val (gimple_call_chain (stmt
)))
3352 error ("invalid static chain in gimple call");
3353 debug_generic_stmt (gimple_call_chain (stmt
));
3357 /* If there is a static chain argument, the call should either be
3358 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3359 if (gimple_call_chain (stmt
)
3361 && !DECL_STATIC_CHAIN (fndecl
))
3363 error ("static chain with function that doesn%'t use one");
3367 /* ??? The C frontend passes unpromoted arguments in case it
3368 didn't see a function declaration before the call. So for now
3369 leave the call arguments mostly unverified. Once we gimplify
3370 unit-at-a-time we have a chance to fix this. */
3372 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3374 tree arg
= gimple_call_arg (stmt
, i
);
3375 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3376 && !is_gimple_val (arg
))
3377 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3378 && !is_gimple_lvalue (arg
)))
3380 error ("invalid argument to gimple call");
3381 debug_generic_expr (arg
);
3389 /* Verifies the gimple comparison with the result type TYPE and
3390 the operands OP0 and OP1. */
3393 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3395 tree op0_type
= TREE_TYPE (op0
);
3396 tree op1_type
= TREE_TYPE (op1
);
3398 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3400 error ("invalid operands in gimple comparison");
3404 /* For comparisons we do not have the operations type as the
3405 effective type the comparison is carried out in. Instead
3406 we require that either the first operand is trivially
3407 convertible into the second, or the other way around.
3408 Because we special-case pointers to void we allow
3409 comparisons of pointers with the same mode as well. */
3410 if (!useless_type_conversion_p (op0_type
, op1_type
)
3411 && !useless_type_conversion_p (op1_type
, op0_type
)
3412 && (!POINTER_TYPE_P (op0_type
)
3413 || !POINTER_TYPE_P (op1_type
)
3414 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3416 error ("mismatching comparison operand types");
3417 debug_generic_expr (op0_type
);
3418 debug_generic_expr (op1_type
);
3422 /* The resulting type of a comparison may be an effective boolean type. */
3423 if (INTEGRAL_TYPE_P (type
)
3424 && (TREE_CODE (type
) == BOOLEAN_TYPE
3425 || TYPE_PRECISION (type
) == 1))
3427 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3428 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3430 error ("vector comparison returning a boolean");
3431 debug_generic_expr (op0_type
);
3432 debug_generic_expr (op1_type
);
3436 /* Or an integer vector type with the same size and element count
3437 as the comparison operand types. */
3438 else if (TREE_CODE (type
) == VECTOR_TYPE
3439 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3441 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3442 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3444 error ("non-vector operands in vector comparison");
3445 debug_generic_expr (op0_type
);
3446 debug_generic_expr (op1_type
);
3450 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3451 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3452 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3453 /* The result of a vector comparison is of signed
3455 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3457 error ("invalid vector comparison resulting type");
3458 debug_generic_expr (type
);
3464 error ("bogus comparison result type");
3465 debug_generic_expr (type
);
3472 /* Verify a gimple assignment statement STMT with an unary rhs.
3473 Returns true if anything is wrong. */
3476 verify_gimple_assign_unary (gassign
*stmt
)
3478 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3479 tree lhs
= gimple_assign_lhs (stmt
);
3480 tree lhs_type
= TREE_TYPE (lhs
);
3481 tree rhs1
= gimple_assign_rhs1 (stmt
);
3482 tree rhs1_type
= TREE_TYPE (rhs1
);
3484 if (!is_gimple_reg (lhs
))
3486 error ("non-register as LHS of unary operation");
3490 if (!is_gimple_val (rhs1
))
3492 error ("invalid operand in unary operation");
3496 /* First handle conversions. */
3501 /* Allow conversions from pointer type to integral type only if
3502 there is no sign or zero extension involved.
3503 For targets were the precision of ptrofftype doesn't match that
3504 of pointers we need to allow arbitrary conversions to ptrofftype. */
3505 if ((POINTER_TYPE_P (lhs_type
)
3506 && INTEGRAL_TYPE_P (rhs1_type
))
3507 || (POINTER_TYPE_P (rhs1_type
)
3508 && INTEGRAL_TYPE_P (lhs_type
)
3509 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3510 || ptrofftype_p (sizetype
))))
3513 /* Allow conversion from integral to offset type and vice versa. */
3514 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3515 && INTEGRAL_TYPE_P (rhs1_type
))
3516 || (INTEGRAL_TYPE_P (lhs_type
)
3517 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3520 /* Otherwise assert we are converting between types of the
3522 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3524 error ("invalid types in nop conversion");
3525 debug_generic_expr (lhs_type
);
3526 debug_generic_expr (rhs1_type
);
3533 case ADDR_SPACE_CONVERT_EXPR
:
3535 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3536 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3537 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3539 error ("invalid types in address space conversion");
3540 debug_generic_expr (lhs_type
);
3541 debug_generic_expr (rhs1_type
);
3548 case FIXED_CONVERT_EXPR
:
3550 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3551 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3553 error ("invalid types in fixed-point conversion");
3554 debug_generic_expr (lhs_type
);
3555 debug_generic_expr (rhs1_type
);
3564 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3565 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3566 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3568 error ("invalid types in conversion to floating point");
3569 debug_generic_expr (lhs_type
);
3570 debug_generic_expr (rhs1_type
);
3577 case FIX_TRUNC_EXPR
:
3579 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3580 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3581 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3583 error ("invalid types in conversion to integer");
3584 debug_generic_expr (lhs_type
);
3585 debug_generic_expr (rhs1_type
);
3591 case REDUC_MAX_EXPR
:
3592 case REDUC_MIN_EXPR
:
3593 case REDUC_PLUS_EXPR
:
3594 if (!VECTOR_TYPE_P (rhs1_type
)
3595 || !useless_type_conversion_p (lhs_type
, TREE_TYPE (rhs1_type
)))
3597 error ("reduction should convert from vector to element type");
3598 debug_generic_expr (lhs_type
);
3599 debug_generic_expr (rhs1_type
);
3604 case VEC_UNPACK_HI_EXPR
:
3605 case VEC_UNPACK_LO_EXPR
:
3606 case VEC_UNPACK_FLOAT_HI_EXPR
:
3607 case VEC_UNPACK_FLOAT_LO_EXPR
:
3622 /* For the remaining codes assert there is no conversion involved. */
3623 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3625 error ("non-trivial conversion in unary operation");
3626 debug_generic_expr (lhs_type
);
3627 debug_generic_expr (rhs1_type
);
3634 /* Verify a gimple assignment statement STMT with a binary rhs.
3635 Returns true if anything is wrong. */
3638 verify_gimple_assign_binary (gassign
*stmt
)
3640 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3641 tree lhs
= gimple_assign_lhs (stmt
);
3642 tree lhs_type
= TREE_TYPE (lhs
);
3643 tree rhs1
= gimple_assign_rhs1 (stmt
);
3644 tree rhs1_type
= TREE_TYPE (rhs1
);
3645 tree rhs2
= gimple_assign_rhs2 (stmt
);
3646 tree rhs2_type
= TREE_TYPE (rhs2
);
3648 if (!is_gimple_reg (lhs
))
3650 error ("non-register as LHS of binary operation");
3654 if (!is_gimple_val (rhs1
)
3655 || !is_gimple_val (rhs2
))
3657 error ("invalid operands in binary operation");
3661 /* First handle operations that involve different types. */
3666 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3667 || !(INTEGRAL_TYPE_P (rhs1_type
)
3668 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3669 || !(INTEGRAL_TYPE_P (rhs2_type
)
3670 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3672 error ("type mismatch in complex expression");
3673 debug_generic_expr (lhs_type
);
3674 debug_generic_expr (rhs1_type
);
3675 debug_generic_expr (rhs2_type
);
3687 /* Shifts and rotates are ok on integral types, fixed point
3688 types and integer vector types. */
3689 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3690 && !FIXED_POINT_TYPE_P (rhs1_type
)
3691 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3692 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3693 || (!INTEGRAL_TYPE_P (rhs2_type
)
3694 /* Vector shifts of vectors are also ok. */
3695 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3696 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3697 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3698 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3699 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3701 error ("type mismatch in shift expression");
3702 debug_generic_expr (lhs_type
);
3703 debug_generic_expr (rhs1_type
);
3704 debug_generic_expr (rhs2_type
);
3711 case WIDEN_LSHIFT_EXPR
:
3713 if (!INTEGRAL_TYPE_P (lhs_type
)
3714 || !INTEGRAL_TYPE_P (rhs1_type
)
3715 || TREE_CODE (rhs2
) != INTEGER_CST
3716 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3718 error ("type mismatch in widening vector shift expression");
3719 debug_generic_expr (lhs_type
);
3720 debug_generic_expr (rhs1_type
);
3721 debug_generic_expr (rhs2_type
);
3728 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3729 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3731 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3732 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3733 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3734 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3735 || TREE_CODE (rhs2
) != INTEGER_CST
3736 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3737 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3739 error ("type mismatch in widening vector shift expression");
3740 debug_generic_expr (lhs_type
);
3741 debug_generic_expr (rhs1_type
);
3742 debug_generic_expr (rhs2_type
);
3752 tree lhs_etype
= lhs_type
;
3753 tree rhs1_etype
= rhs1_type
;
3754 tree rhs2_etype
= rhs2_type
;
3755 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3757 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3758 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3760 error ("invalid non-vector operands to vector valued plus");
3763 lhs_etype
= TREE_TYPE (lhs_type
);
3764 rhs1_etype
= TREE_TYPE (rhs1_type
);
3765 rhs2_etype
= TREE_TYPE (rhs2_type
);
3767 if (POINTER_TYPE_P (lhs_etype
)
3768 || POINTER_TYPE_P (rhs1_etype
)
3769 || POINTER_TYPE_P (rhs2_etype
))
3771 error ("invalid (pointer) operands to plus/minus");
3775 /* Continue with generic binary expression handling. */
3779 case POINTER_PLUS_EXPR
:
3781 if (!POINTER_TYPE_P (rhs1_type
)
3782 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3783 || !ptrofftype_p (rhs2_type
))
3785 error ("type mismatch in pointer plus expression");
3786 debug_generic_stmt (lhs_type
);
3787 debug_generic_stmt (rhs1_type
);
3788 debug_generic_stmt (rhs2_type
);
3795 case TRUTH_ANDIF_EXPR
:
3796 case TRUTH_ORIF_EXPR
:
3797 case TRUTH_AND_EXPR
:
3799 case TRUTH_XOR_EXPR
:
3809 case UNORDERED_EXPR
:
3817 /* Comparisons are also binary, but the result type is not
3818 connected to the operand types. */
3819 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3821 case WIDEN_MULT_EXPR
:
3822 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3824 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3825 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3827 case WIDEN_SUM_EXPR
:
3828 case VEC_WIDEN_MULT_HI_EXPR
:
3829 case VEC_WIDEN_MULT_LO_EXPR
:
3830 case VEC_WIDEN_MULT_EVEN_EXPR
:
3831 case VEC_WIDEN_MULT_ODD_EXPR
:
3832 case VEC_PACK_TRUNC_EXPR
:
3833 case VEC_PACK_SAT_EXPR
:
3834 case VEC_PACK_FIX_TRUNC_EXPR
:
3839 case MULT_HIGHPART_EXPR
:
3840 case TRUNC_DIV_EXPR
:
3842 case FLOOR_DIV_EXPR
:
3843 case ROUND_DIV_EXPR
:
3844 case TRUNC_MOD_EXPR
:
3846 case FLOOR_MOD_EXPR
:
3847 case ROUND_MOD_EXPR
:
3849 case EXACT_DIV_EXPR
:
3855 /* Continue with generic binary expression handling. */
3862 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3863 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3865 error ("type mismatch in binary expression");
3866 debug_generic_stmt (lhs_type
);
3867 debug_generic_stmt (rhs1_type
);
3868 debug_generic_stmt (rhs2_type
);
3875 /* Verify a gimple assignment statement STMT with a ternary rhs.
3876 Returns true if anything is wrong. */
3879 verify_gimple_assign_ternary (gassign
*stmt
)
3881 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3882 tree lhs
= gimple_assign_lhs (stmt
);
3883 tree lhs_type
= TREE_TYPE (lhs
);
3884 tree rhs1
= gimple_assign_rhs1 (stmt
);
3885 tree rhs1_type
= TREE_TYPE (rhs1
);
3886 tree rhs2
= gimple_assign_rhs2 (stmt
);
3887 tree rhs2_type
= TREE_TYPE (rhs2
);
3888 tree rhs3
= gimple_assign_rhs3 (stmt
);
3889 tree rhs3_type
= TREE_TYPE (rhs3
);
3891 if (!is_gimple_reg (lhs
))
3893 error ("non-register as LHS of ternary operation");
3897 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3898 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3899 || !is_gimple_val (rhs2
)
3900 || !is_gimple_val (rhs3
))
3902 error ("invalid operands in ternary operation");
3906 /* First handle operations that involve different types. */
3909 case WIDEN_MULT_PLUS_EXPR
:
3910 case WIDEN_MULT_MINUS_EXPR
:
3911 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3912 && !FIXED_POINT_TYPE_P (rhs1_type
))
3913 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3914 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3915 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3916 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3918 error ("type mismatch in widening multiply-accumulate expression");
3919 debug_generic_expr (lhs_type
);
3920 debug_generic_expr (rhs1_type
);
3921 debug_generic_expr (rhs2_type
);
3922 debug_generic_expr (rhs3_type
);
3928 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3929 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3930 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3932 error ("type mismatch in fused multiply-add expression");
3933 debug_generic_expr (lhs_type
);
3934 debug_generic_expr (rhs1_type
);
3935 debug_generic_expr (rhs2_type
);
3936 debug_generic_expr (rhs3_type
);
3943 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3944 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3946 error ("type mismatch in conditional expression");
3947 debug_generic_expr (lhs_type
);
3948 debug_generic_expr (rhs2_type
);
3949 debug_generic_expr (rhs3_type
);
3955 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3956 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3958 error ("type mismatch in vector permute expression");
3959 debug_generic_expr (lhs_type
);
3960 debug_generic_expr (rhs1_type
);
3961 debug_generic_expr (rhs2_type
);
3962 debug_generic_expr (rhs3_type
);
3966 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3967 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3968 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3970 error ("vector types expected in vector permute expression");
3971 debug_generic_expr (lhs_type
);
3972 debug_generic_expr (rhs1_type
);
3973 debug_generic_expr (rhs2_type
);
3974 debug_generic_expr (rhs3_type
);
3978 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3979 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3980 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3981 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3982 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3984 error ("vectors with different element number found "
3985 "in vector permute expression");
3986 debug_generic_expr (lhs_type
);
3987 debug_generic_expr (rhs1_type
);
3988 debug_generic_expr (rhs2_type
);
3989 debug_generic_expr (rhs3_type
);
3993 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3994 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3995 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3997 error ("invalid mask type in vector permute expression");
3998 debug_generic_expr (lhs_type
);
3999 debug_generic_expr (rhs1_type
);
4000 debug_generic_expr (rhs2_type
);
4001 debug_generic_expr (rhs3_type
);
4008 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4009 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4010 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4011 (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4012 > GET_MODE_BITSIZE (GET_MODE_INNER
4013 (TYPE_MODE (TREE_TYPE (lhs_type
)))))
4015 error ("type mismatch in sad expression");
4016 debug_generic_expr (lhs_type
);
4017 debug_generic_expr (rhs1_type
);
4018 debug_generic_expr (rhs2_type
);
4019 debug_generic_expr (rhs3_type
);
4023 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4024 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4025 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4027 error ("vector types expected in sad expression");
4028 debug_generic_expr (lhs_type
);
4029 debug_generic_expr (rhs1_type
);
4030 debug_generic_expr (rhs2_type
);
4031 debug_generic_expr (rhs3_type
);
4038 case REALIGN_LOAD_EXPR
:
4048 /* Verify a gimple assignment statement STMT with a single rhs.
4049 Returns true if anything is wrong. */
4052 verify_gimple_assign_single (gassign
*stmt
)
4054 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4055 tree lhs
= gimple_assign_lhs (stmt
);
4056 tree lhs_type
= TREE_TYPE (lhs
);
4057 tree rhs1
= gimple_assign_rhs1 (stmt
);
4058 tree rhs1_type
= TREE_TYPE (rhs1
);
4061 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4063 error ("non-trivial conversion at assignment");
4064 debug_generic_expr (lhs_type
);
4065 debug_generic_expr (rhs1_type
);
4069 if (gimple_clobber_p (stmt
)
4070 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4072 error ("non-decl/MEM_REF LHS in clobber statement");
4073 debug_generic_expr (lhs
);
4077 if (handled_component_p (lhs
)
4078 || TREE_CODE (lhs
) == MEM_REF
4079 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4080 res
|= verify_types_in_gimple_reference (lhs
, true);
4082 /* Special codes we cannot handle via their class. */
4087 tree op
= TREE_OPERAND (rhs1
, 0);
4088 if (!is_gimple_addressable (op
))
4090 error ("invalid operand in unary expression");
4094 /* Technically there is no longer a need for matching types, but
4095 gimple hygiene asks for this check. In LTO we can end up
4096 combining incompatible units and thus end up with addresses
4097 of globals that change their type to a common one. */
4099 && !types_compatible_p (TREE_TYPE (op
),
4100 TREE_TYPE (TREE_TYPE (rhs1
)))
4101 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4104 error ("type mismatch in address expression");
4105 debug_generic_stmt (TREE_TYPE (rhs1
));
4106 debug_generic_stmt (TREE_TYPE (op
));
4110 return verify_types_in_gimple_reference (op
, true);
4115 error ("INDIRECT_REF in gimple IL");
4121 case ARRAY_RANGE_REF
:
4122 case VIEW_CONVERT_EXPR
:
4125 case TARGET_MEM_REF
:
4127 if (!is_gimple_reg (lhs
)
4128 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4130 error ("invalid rhs for gimple memory store");
4131 debug_generic_stmt (lhs
);
4132 debug_generic_stmt (rhs1
);
4135 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4147 /* tcc_declaration */
4152 if (!is_gimple_reg (lhs
)
4153 && !is_gimple_reg (rhs1
)
4154 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4156 error ("invalid rhs for gimple memory store");
4157 debug_generic_stmt (lhs
);
4158 debug_generic_stmt (rhs1
);
4164 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4167 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4169 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4171 /* For vector CONSTRUCTORs we require that either it is empty
4172 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4173 (then the element count must be correct to cover the whole
4174 outer vector and index must be NULL on all elements, or it is
4175 a CONSTRUCTOR of scalar elements, where we as an exception allow
4176 smaller number of elements (assuming zero filling) and
4177 consecutive indexes as compared to NULL indexes (such
4178 CONSTRUCTORs can appear in the IL from FEs). */
4179 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4181 if (elt_t
== NULL_TREE
)
4183 elt_t
= TREE_TYPE (elt_v
);
4184 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4186 tree elt_t
= TREE_TYPE (elt_v
);
4187 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4190 error ("incorrect type of vector CONSTRUCTOR"
4192 debug_generic_stmt (rhs1
);
4195 else if (CONSTRUCTOR_NELTS (rhs1
)
4196 * TYPE_VECTOR_SUBPARTS (elt_t
)
4197 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4199 error ("incorrect number of vector CONSTRUCTOR"
4201 debug_generic_stmt (rhs1
);
4205 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4208 error ("incorrect type of vector CONSTRUCTOR elements");
4209 debug_generic_stmt (rhs1
);
4212 else if (CONSTRUCTOR_NELTS (rhs1
)
4213 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4215 error ("incorrect number of vector CONSTRUCTOR elements");
4216 debug_generic_stmt (rhs1
);
4220 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4222 error ("incorrect type of vector CONSTRUCTOR elements");
4223 debug_generic_stmt (rhs1
);
4226 if (elt_i
!= NULL_TREE
4227 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4228 || TREE_CODE (elt_i
) != INTEGER_CST
4229 || compare_tree_int (elt_i
, i
) != 0))
4231 error ("vector CONSTRUCTOR with non-NULL element index");
4232 debug_generic_stmt (rhs1
);
4235 if (!is_gimple_val (elt_v
))
4237 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4238 debug_generic_stmt (rhs1
);
4243 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4245 error ("non-vector CONSTRUCTOR with elements");
4246 debug_generic_stmt (rhs1
);
4252 case WITH_SIZE_EXPR
:
4262 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4263 is a problem, otherwise false. */
4266 verify_gimple_assign (gassign
*stmt
)
4268 switch (gimple_assign_rhs_class (stmt
))
4270 case GIMPLE_SINGLE_RHS
:
4271 return verify_gimple_assign_single (stmt
);
4273 case GIMPLE_UNARY_RHS
:
4274 return verify_gimple_assign_unary (stmt
);
4276 case GIMPLE_BINARY_RHS
:
4277 return verify_gimple_assign_binary (stmt
);
4279 case GIMPLE_TERNARY_RHS
:
4280 return verify_gimple_assign_ternary (stmt
);
4287 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4288 is a problem, otherwise false. */
4291 verify_gimple_return (greturn
*stmt
)
4293 tree op
= gimple_return_retval (stmt
);
4294 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4296 /* We cannot test for present return values as we do not fix up missing
4297 return values from the original source. */
4301 if (!is_gimple_val (op
)
4302 && TREE_CODE (op
) != RESULT_DECL
)
4304 error ("invalid operand in return statement");
4305 debug_generic_stmt (op
);
4309 if ((TREE_CODE (op
) == RESULT_DECL
4310 && DECL_BY_REFERENCE (op
))
4311 || (TREE_CODE (op
) == SSA_NAME
4312 && SSA_NAME_VAR (op
)
4313 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4314 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4315 op
= TREE_TYPE (op
);
4317 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4319 error ("invalid conversion in return statement");
4320 debug_generic_stmt (restype
);
4321 debug_generic_stmt (TREE_TYPE (op
));
4329 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4330 is a problem, otherwise false. */
4333 verify_gimple_goto (ggoto
*stmt
)
4335 tree dest
= gimple_goto_dest (stmt
);
4337 /* ??? We have two canonical forms of direct goto destinations, a
4338 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4339 if (TREE_CODE (dest
) != LABEL_DECL
4340 && (!is_gimple_val (dest
)
4341 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4343 error ("goto destination is neither a label nor a pointer");
4350 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4351 is a problem, otherwise false. */
4354 verify_gimple_switch (gswitch
*stmt
)
4357 tree elt
, prev_upper_bound
= NULL_TREE
;
4358 tree index_type
, elt_type
= NULL_TREE
;
4360 if (!is_gimple_val (gimple_switch_index (stmt
)))
4362 error ("invalid operand to switch statement");
4363 debug_generic_stmt (gimple_switch_index (stmt
));
4367 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4368 if (! INTEGRAL_TYPE_P (index_type
))
4370 error ("non-integral type switch statement");
4371 debug_generic_expr (index_type
);
4375 elt
= gimple_switch_label (stmt
, 0);
4376 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4378 error ("invalid default case label in switch statement");
4379 debug_generic_expr (elt
);
4383 n
= gimple_switch_num_labels (stmt
);
4384 for (i
= 1; i
< n
; i
++)
4386 elt
= gimple_switch_label (stmt
, i
);
4388 if (! CASE_LOW (elt
))
4390 error ("invalid case label in switch statement");
4391 debug_generic_expr (elt
);
4395 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4397 error ("invalid case range in switch statement");
4398 debug_generic_expr (elt
);
4404 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4405 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4407 error ("type mismatch for case label in switch statement");
4408 debug_generic_expr (elt
);
4414 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4415 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4417 error ("type precision mismatch in switch statement");
4422 if (prev_upper_bound
)
4424 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4426 error ("case labels not sorted in switch statement");
4431 prev_upper_bound
= CASE_HIGH (elt
);
4432 if (! prev_upper_bound
)
4433 prev_upper_bound
= CASE_LOW (elt
);
4439 /* Verify a gimple debug statement STMT.
4440 Returns true if anything is wrong. */
4443 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4445 /* There isn't much that could be wrong in a gimple debug stmt. A
4446 gimple debug bind stmt, for example, maps a tree, that's usually
4447 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4448 component or member of an aggregate type, to another tree, that
4449 can be an arbitrary expression. These stmts expand into debug
4450 insns, and are converted to debug notes by var-tracking.c. */
4454 /* Verify a gimple label statement STMT.
4455 Returns true if anything is wrong. */
4458 verify_gimple_label (glabel
*stmt
)
4460 tree decl
= gimple_label_label (stmt
);
4464 if (TREE_CODE (decl
) != LABEL_DECL
)
4466 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4467 && DECL_CONTEXT (decl
) != current_function_decl
)
4469 error ("label's context is not the current function decl");
4473 uid
= LABEL_DECL_UID (decl
);
4476 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4478 error ("incorrect entry in label_to_block_map");
4482 uid
= EH_LANDING_PAD_NR (decl
);
4485 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4486 if (decl
!= lp
->post_landing_pad
)
4488 error ("incorrect setting of landing pad number");
4496 /* Verify a gimple cond statement STMT.
4497 Returns true if anything is wrong. */
4500 verify_gimple_cond (gcond
*stmt
)
4502 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4504 error ("invalid comparison code in gimple cond");
4507 if (!(!gimple_cond_true_label (stmt
)
4508 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4509 || !(!gimple_cond_false_label (stmt
)
4510 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4512 error ("invalid labels in gimple cond");
4516 return verify_gimple_comparison (boolean_type_node
,
4517 gimple_cond_lhs (stmt
),
4518 gimple_cond_rhs (stmt
));
4521 /* Verify the GIMPLE statement STMT. Returns true if there is an
4522 error, otherwise false. */
4525 verify_gimple_stmt (gimple stmt
)
4527 switch (gimple_code (stmt
))
4530 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4533 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4536 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4539 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4542 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4545 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4548 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4553 case GIMPLE_TRANSACTION
:
4554 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4556 /* Tuples that do not have tree operands. */
4558 case GIMPLE_PREDICT
:
4560 case GIMPLE_EH_DISPATCH
:
4561 case GIMPLE_EH_MUST_NOT_THROW
:
4565 /* OpenMP directives are validated by the FE and never operated
4566 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4567 non-gimple expressions when the main index variable has had
4568 its address taken. This does not affect the loop itself
4569 because the header of an GIMPLE_OMP_FOR is merely used to determine
4570 how to setup the parallel iteration. */
4574 return verify_gimple_debug (stmt
);
4581 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4582 and false otherwise. */
4585 verify_gimple_phi (gimple phi
)
4589 tree phi_result
= gimple_phi_result (phi
);
4594 error ("invalid PHI result");
4598 virtual_p
= virtual_operand_p (phi_result
);
4599 if (TREE_CODE (phi_result
) != SSA_NAME
4601 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4603 error ("invalid PHI result");
4607 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4609 tree t
= gimple_phi_arg_def (phi
, i
);
4613 error ("missing PHI def");
4617 /* Addressable variables do have SSA_NAMEs but they
4618 are not considered gimple values. */
4619 else if ((TREE_CODE (t
) == SSA_NAME
4620 && virtual_p
!= virtual_operand_p (t
))
4622 && (TREE_CODE (t
) != SSA_NAME
4623 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4625 && !is_gimple_val (t
)))
4627 error ("invalid PHI argument");
4628 debug_generic_expr (t
);
4631 #ifdef ENABLE_TYPES_CHECKING
4632 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4634 error ("incompatible types in PHI argument %u", i
);
4635 debug_generic_stmt (TREE_TYPE (phi_result
));
4636 debug_generic_stmt (TREE_TYPE (t
));
4645 /* Verify the GIMPLE statements inside the sequence STMTS. */
4648 verify_gimple_in_seq_2 (gimple_seq stmts
)
4650 gimple_stmt_iterator ittr
;
4653 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4655 gimple stmt
= gsi_stmt (ittr
);
4657 switch (gimple_code (stmt
))
4660 err
|= verify_gimple_in_seq_2 (
4661 gimple_bind_body (as_a
<gbind
*> (stmt
)));
4665 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4666 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4669 case GIMPLE_EH_FILTER
:
4670 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4673 case GIMPLE_EH_ELSE
:
4675 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
4676 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
4677 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
4682 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
4683 as_a
<gcatch
*> (stmt
)));
4686 case GIMPLE_TRANSACTION
:
4687 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4692 bool err2
= verify_gimple_stmt (stmt
);
4694 debug_gimple_stmt (stmt
);
4703 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4704 is a problem, otherwise false. */
4707 verify_gimple_transaction (gtransaction
*stmt
)
4709 tree lab
= gimple_transaction_label (stmt
);
4710 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4712 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4716 /* Verify the GIMPLE statements inside the statement list STMTS. */
4719 verify_gimple_in_seq (gimple_seq stmts
)
4721 timevar_push (TV_TREE_STMT_VERIFY
);
4722 if (verify_gimple_in_seq_2 (stmts
))
4723 internal_error ("verify_gimple failed");
4724 timevar_pop (TV_TREE_STMT_VERIFY
);
4727 /* Return true when the T can be shared. */
4730 tree_node_can_be_shared (tree t
)
4732 if (IS_TYPE_OR_DECL_P (t
)
4733 || is_gimple_min_invariant (t
)
4734 || TREE_CODE (t
) == SSA_NAME
4735 || t
== error_mark_node
4736 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4739 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4748 /* Called via walk_tree. Verify tree sharing. */
4751 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4753 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
4755 if (tree_node_can_be_shared (*tp
))
4757 *walk_subtrees
= false;
4761 if (visited
->add (*tp
))
4767 /* Called via walk_gimple_stmt. Verify tree sharing. */
4770 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4772 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4773 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4776 static bool eh_error_found
;
4778 verify_eh_throw_stmt_node (const gimple
&stmt
, const int &,
4779 hash_set
<gimple
> *visited
)
4781 if (!visited
->contains (stmt
))
4783 error ("dead STMT in EH table");
4784 debug_gimple_stmt (stmt
);
4785 eh_error_found
= true;
4790 /* Verify if the location LOCs block is in BLOCKS. */
4793 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
4795 tree block
= LOCATION_BLOCK (loc
);
4796 if (block
!= NULL_TREE
4797 && !blocks
->contains (block
))
4799 error ("location references block not in block tree");
4802 if (block
!= NULL_TREE
)
4803 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4807 /* Called via walk_tree. Verify that expressions have no blocks. */
4810 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4814 *walk_subtrees
= false;
4818 location_t loc
= EXPR_LOCATION (*tp
);
4819 if (LOCATION_BLOCK (loc
) != NULL
)
4825 /* Called via walk_tree. Verify locations of expressions. */
4828 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4830 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
4832 if (TREE_CODE (*tp
) == VAR_DECL
4833 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4835 tree t
= DECL_DEBUG_EXPR (*tp
);
4836 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4840 if ((TREE_CODE (*tp
) == VAR_DECL
4841 || TREE_CODE (*tp
) == PARM_DECL
4842 || TREE_CODE (*tp
) == RESULT_DECL
)
4843 && DECL_HAS_VALUE_EXPR_P (*tp
))
4845 tree t
= DECL_VALUE_EXPR (*tp
);
4846 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4853 *walk_subtrees
= false;
4857 location_t loc
= EXPR_LOCATION (*tp
);
4858 if (verify_location (blocks
, loc
))
4864 /* Called via walk_gimple_op. Verify locations of expressions. */
4867 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4869 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4870 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4873 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4876 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
4879 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4882 collect_subblocks (blocks
, t
);
4886 /* Verify the GIMPLE statements in the CFG of FN. */
4889 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
4894 timevar_push (TV_TREE_STMT_VERIFY
);
4895 hash_set
<void *> visited
;
4896 hash_set
<gimple
> visited_stmts
;
4898 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4899 hash_set
<tree
> blocks
;
4900 if (DECL_INITIAL (fn
->decl
))
4902 blocks
.add (DECL_INITIAL (fn
->decl
));
4903 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
4906 FOR_EACH_BB_FN (bb
, fn
)
4908 gimple_stmt_iterator gsi
;
4910 for (gphi_iterator gpi
= gsi_start_phis (bb
);
4914 gphi
*phi
= gpi
.phi ();
4918 visited_stmts
.add (phi
);
4920 if (gimple_bb (phi
) != bb
)
4922 error ("gimple_bb (phi) is set to a wrong basic block");
4926 err2
|= verify_gimple_phi (phi
);
4928 /* Only PHI arguments have locations. */
4929 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4931 error ("PHI node with location");
4935 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4937 tree arg
= gimple_phi_arg_def (phi
, i
);
4938 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4942 error ("incorrect sharing of tree nodes");
4943 debug_generic_expr (addr
);
4946 location_t loc
= gimple_phi_arg_location (phi
, i
);
4947 if (virtual_operand_p (gimple_phi_result (phi
))
4948 && loc
!= UNKNOWN_LOCATION
)
4950 error ("virtual PHI with argument locations");
4953 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
4956 debug_generic_expr (addr
);
4959 err2
|= verify_location (&blocks
, loc
);
4963 debug_gimple_stmt (phi
);
4967 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4969 gimple stmt
= gsi_stmt (gsi
);
4971 struct walk_stmt_info wi
;
4975 visited_stmts
.add (stmt
);
4977 if (gimple_bb (stmt
) != bb
)
4979 error ("gimple_bb (stmt) is set to a wrong basic block");
4983 err2
|= verify_gimple_stmt (stmt
);
4984 err2
|= verify_location (&blocks
, gimple_location (stmt
));
4986 memset (&wi
, 0, sizeof (wi
));
4987 wi
.info
= (void *) &visited
;
4988 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4991 error ("incorrect sharing of tree nodes");
4992 debug_generic_expr (addr
);
4996 memset (&wi
, 0, sizeof (wi
));
4997 wi
.info
= (void *) &blocks
;
4998 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
5001 debug_generic_expr (addr
);
5005 /* ??? Instead of not checking these stmts at all the walker
5006 should know its context via wi. */
5007 if (!is_gimple_debug (stmt
)
5008 && !is_gimple_omp (stmt
))
5010 memset (&wi
, 0, sizeof (wi
));
5011 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5014 debug_generic_expr (addr
);
5015 inform (gimple_location (stmt
), "in statement");
5020 /* If the statement is marked as part of an EH region, then it is
5021 expected that the statement could throw. Verify that when we
5022 have optimizations that simplify statements such that we prove
5023 that they cannot throw, that we update other data structures
5025 lp_nr
= lookup_stmt_eh_lp (stmt
);
5028 if (!stmt_could_throw_p (stmt
))
5032 error ("statement marked for throw, but doesn%'t");
5036 else if (!gsi_one_before_end_p (gsi
))
5038 error ("statement marked for throw in middle of block");
5044 debug_gimple_stmt (stmt
);
5049 eh_error_found
= false;
5050 hash_map
<gimple
, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5052 eh_table
->traverse
<hash_set
<gimple
> *, verify_eh_throw_stmt_node
>
5055 if (err
|| eh_error_found
)
5056 internal_error ("verify_gimple failed");
5058 verify_histograms ();
5059 timevar_pop (TV_TREE_STMT_VERIFY
);
5063 /* Verifies that the flow information is OK. */
5066 gimple_verify_flow_info (void)
5070 gimple_stmt_iterator gsi
;
5075 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5076 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5078 error ("ENTRY_BLOCK has IL associated with it");
5082 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5083 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5085 error ("EXIT_BLOCK has IL associated with it");
5089 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5090 if (e
->flags
& EDGE_FALLTHRU
)
5092 error ("fallthru to exit from bb %d", e
->src
->index
);
5096 FOR_EACH_BB_FN (bb
, cfun
)
5098 bool found_ctrl_stmt
= false;
5102 /* Skip labels on the start of basic block. */
5103 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5106 gimple prev_stmt
= stmt
;
5108 stmt
= gsi_stmt (gsi
);
5110 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5113 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5114 if (prev_stmt
&& DECL_NONLOCAL (label
))
5116 error ("nonlocal label ");
5117 print_generic_expr (stderr
, label
, 0);
5118 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5123 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5125 error ("EH landing pad label ");
5126 print_generic_expr (stderr
, label
, 0);
5127 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5132 if (label_to_block (label
) != bb
)
5135 print_generic_expr (stderr
, label
, 0);
5136 fprintf (stderr
, " to block does not match in bb %d",
5141 if (decl_function_context (label
) != current_function_decl
)
5144 print_generic_expr (stderr
, label
, 0);
5145 fprintf (stderr
, " has incorrect context in bb %d",
5151 /* Verify that body of basic block BB is free of control flow. */
5152 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5154 gimple stmt
= gsi_stmt (gsi
);
5156 if (found_ctrl_stmt
)
5158 error ("control flow in the middle of basic block %d",
5163 if (stmt_ends_bb_p (stmt
))
5164 found_ctrl_stmt
= true;
5166 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5169 print_generic_expr (stderr
, gimple_label_label (label_stmt
), 0);
5170 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5175 gsi
= gsi_last_bb (bb
);
5176 if (gsi_end_p (gsi
))
5179 stmt
= gsi_stmt (gsi
);
5181 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5184 err
|= verify_eh_edges (stmt
);
5186 if (is_ctrl_stmt (stmt
))
5188 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5189 if (e
->flags
& EDGE_FALLTHRU
)
5191 error ("fallthru edge after a control statement in bb %d",
5197 if (gimple_code (stmt
) != GIMPLE_COND
)
5199 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5200 after anything else but if statement. */
5201 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5202 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5204 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5210 switch (gimple_code (stmt
))
5217 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5221 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5222 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5223 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5224 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5225 || EDGE_COUNT (bb
->succs
) >= 3)
5227 error ("wrong outgoing edge flags at end of bb %d",
5235 if (simple_goto_p (stmt
))
5237 error ("explicit goto at end of bb %d", bb
->index
);
5242 /* FIXME. We should double check that the labels in the
5243 destination blocks have their address taken. */
5244 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5245 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5246 | EDGE_FALSE_VALUE
))
5247 || !(e
->flags
& EDGE_ABNORMAL
))
5249 error ("wrong outgoing edge flags at end of bb %d",
5257 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5259 /* ... fallthru ... */
5261 if (!single_succ_p (bb
)
5262 || (single_succ_edge (bb
)->flags
5263 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5264 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5266 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5269 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5271 error ("return edge does not point to exit in bb %d",
5279 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5284 n
= gimple_switch_num_labels (switch_stmt
);
5286 /* Mark all the destination basic blocks. */
5287 for (i
= 0; i
< n
; ++i
)
5289 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5290 basic_block label_bb
= label_to_block (lab
);
5291 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5292 label_bb
->aux
= (void *)1;
5295 /* Verify that the case labels are sorted. */
5296 prev
= gimple_switch_label (switch_stmt
, 0);
5297 for (i
= 1; i
< n
; ++i
)
5299 tree c
= gimple_switch_label (switch_stmt
, i
);
5302 error ("found default case not at the start of "
5308 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5310 error ("case labels not sorted: ");
5311 print_generic_expr (stderr
, prev
, 0);
5312 fprintf (stderr
," is greater than ");
5313 print_generic_expr (stderr
, c
, 0);
5314 fprintf (stderr
," but comes before it.\n");
5319 /* VRP will remove the default case if it can prove it will
5320 never be executed. So do not verify there always exists
5321 a default case here. */
5323 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5327 error ("extra outgoing edge %d->%d",
5328 bb
->index
, e
->dest
->index
);
5332 e
->dest
->aux
= (void *)2;
5333 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5334 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5336 error ("wrong outgoing edge flags at end of bb %d",
5342 /* Check that we have all of them. */
5343 for (i
= 0; i
< n
; ++i
)
5345 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5346 basic_block label_bb
= label_to_block (lab
);
5348 if (label_bb
->aux
!= (void *)2)
5350 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5355 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5356 e
->dest
->aux
= (void *)0;
5360 case GIMPLE_EH_DISPATCH
:
5361 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5369 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5370 verify_dominators (CDI_DOMINATORS
);
5376 /* Updates phi nodes after creating a forwarder block joined
5377 by edge FALLTHRU. */
5380 gimple_make_forwarder_block (edge fallthru
)
5384 basic_block dummy
, bb
;
5388 dummy
= fallthru
->src
;
5389 bb
= fallthru
->dest
;
5391 if (single_pred_p (bb
))
5394 /* If we redirected a branch we must create new PHI nodes at the
5396 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5398 gphi
*phi
, *new_phi
;
5401 var
= gimple_phi_result (phi
);
5402 new_phi
= create_phi_node (var
, bb
);
5403 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5404 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5408 /* Add the arguments we have stored on edges. */
5409 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5414 flush_pending_stmts (e
);
5419 /* Return a non-special label in the head of basic block BLOCK.
5420 Create one if it doesn't exist. */
5423 gimple_block_label (basic_block bb
)
5425 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5430 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5432 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5435 label
= gimple_label_label (stmt
);
5436 if (!DECL_NONLOCAL (label
))
5439 gsi_move_before (&i
, &s
);
5444 label
= create_artificial_label (UNKNOWN_LOCATION
);
5445 stmt
= gimple_build_label (label
);
5446 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5451 /* Attempt to perform edge redirection by replacing a possibly complex
5452 jump instruction by a goto or by removing the jump completely.
5453 This can apply only if all edges now point to the same block. The
5454 parameters and return values are equivalent to
5455 redirect_edge_and_branch. */
5458 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5460 basic_block src
= e
->src
;
5461 gimple_stmt_iterator i
;
5464 /* We can replace or remove a complex jump only when we have exactly
5466 if (EDGE_COUNT (src
->succs
) != 2
5467 /* Verify that all targets will be TARGET. Specifically, the
5468 edge that is not E must also go to TARGET. */
5469 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5472 i
= gsi_last_bb (src
);
5476 stmt
= gsi_stmt (i
);
5478 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5480 gsi_remove (&i
, true);
5481 e
= ssa_redirect_edge (e
, target
);
5482 e
->flags
= EDGE_FALLTHRU
;
5490 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5491 edge representing the redirected branch. */
5494 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5496 basic_block bb
= e
->src
;
5497 gimple_stmt_iterator gsi
;
5501 if (e
->flags
& EDGE_ABNORMAL
)
5504 if (e
->dest
== dest
)
5507 if (e
->flags
& EDGE_EH
)
5508 return redirect_eh_edge (e
, dest
);
5510 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5512 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5517 gsi
= gsi_last_bb (bb
);
5518 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5520 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5523 /* For COND_EXPR, we only need to redirect the edge. */
5527 /* No non-abnormal edges should lead from a non-simple goto, and
5528 simple ones should be represented implicitly. */
5533 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5534 tree label
= gimple_block_label (dest
);
5535 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5537 /* If we have a list of cases associated with E, then use it
5538 as it's a lot faster than walking the entire case vector. */
5541 edge e2
= find_edge (e
->src
, dest
);
5548 CASE_LABEL (cases
) = label
;
5549 cases
= CASE_CHAIN (cases
);
5552 /* If there was already an edge in the CFG, then we need
5553 to move all the cases associated with E to E2. */
5556 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5558 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5559 CASE_CHAIN (cases2
) = first
;
5561 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5565 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5567 for (i
= 0; i
< n
; i
++)
5569 tree elt
= gimple_switch_label (switch_stmt
, i
);
5570 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5571 CASE_LABEL (elt
) = label
;
5579 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5580 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5583 for (i
= 0; i
< n
; ++i
)
5585 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5586 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5589 label
= gimple_block_label (dest
);
5590 TREE_VALUE (cons
) = label
;
5594 /* If we didn't find any label matching the former edge in the
5595 asm labels, we must be redirecting the fallthrough
5597 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5602 gsi_remove (&gsi
, true);
5603 e
->flags
|= EDGE_FALLTHRU
;
5606 case GIMPLE_OMP_RETURN
:
5607 case GIMPLE_OMP_CONTINUE
:
5608 case GIMPLE_OMP_SECTIONS_SWITCH
:
5609 case GIMPLE_OMP_FOR
:
5610 /* The edges from OMP constructs can be simply redirected. */
5613 case GIMPLE_EH_DISPATCH
:
5614 if (!(e
->flags
& EDGE_FALLTHRU
))
5615 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5618 case GIMPLE_TRANSACTION
:
5619 /* The ABORT edge has a stored label associated with it, otherwise
5620 the edges are simply redirectable. */
5622 gimple_transaction_set_label (as_a
<gtransaction
*> (stmt
),
5623 gimple_block_label (dest
));
5627 /* Otherwise it must be a fallthru edge, and we don't need to
5628 do anything besides redirecting it. */
5629 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5633 /* Update/insert PHI nodes as necessary. */
5635 /* Now update the edges in the CFG. */
5636 e
= ssa_redirect_edge (e
, dest
);
5641 /* Returns true if it is possible to remove edge E by redirecting
5642 it to the destination of the other edge from E->src. */
5645 gimple_can_remove_branch_p (const_edge e
)
5647 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5653 /* Simple wrapper, as we can always redirect fallthru edges. */
5656 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5658 e
= gimple_redirect_edge_and_branch (e
, dest
);
5665 /* Splits basic block BB after statement STMT (but at least after the
5666 labels). If STMT is NULL, BB is split just after the labels. */
5669 gimple_split_block (basic_block bb
, void *stmt
)
5671 gimple_stmt_iterator gsi
;
5672 gimple_stmt_iterator gsi_tgt
;
5679 new_bb
= create_empty_bb (bb
);
5681 /* Redirect the outgoing edges. */
5682 new_bb
->succs
= bb
->succs
;
5684 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5687 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5690 /* Move everything from GSI to the new basic block. */
5691 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5693 act
= gsi_stmt (gsi
);
5694 if (gimple_code (act
) == GIMPLE_LABEL
)
5707 if (gsi_end_p (gsi
))
5710 /* Split the statement list - avoid re-creating new containers as this
5711 brings ugly quadratic memory consumption in the inliner.
5712 (We are still quadratic since we need to update stmt BB pointers,
5714 gsi_split_seq_before (&gsi
, &list
);
5715 set_bb_seq (new_bb
, list
);
5716 for (gsi_tgt
= gsi_start (list
);
5717 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5718 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5724 /* Moves basic block BB after block AFTER. */
5727 gimple_move_block_after (basic_block bb
, basic_block after
)
5729 if (bb
->prev_bb
== after
)
5733 link_block (bb
, after
);
5739 /* Return TRUE if block BB has no executable statements, otherwise return
5743 gimple_empty_block_p (basic_block bb
)
5745 /* BB must have no executable statements. */
5746 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5749 if (gsi_end_p (gsi
))
5751 if (is_gimple_debug (gsi_stmt (gsi
)))
5752 gsi_next_nondebug (&gsi
);
5753 return gsi_end_p (gsi
);
5757 /* Split a basic block if it ends with a conditional branch and if the
5758 other part of the block is not empty. */
5761 gimple_split_block_before_cond_jump (basic_block bb
)
5763 gimple last
, split_point
;
5764 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5765 if (gsi_end_p (gsi
))
5767 last
= gsi_stmt (gsi
);
5768 if (gimple_code (last
) != GIMPLE_COND
5769 && gimple_code (last
) != GIMPLE_SWITCH
)
5771 gsi_prev_nondebug (&gsi
);
5772 split_point
= gsi_stmt (gsi
);
5773 return split_block (bb
, split_point
)->dest
;
5777 /* Return true if basic_block can be duplicated. */
5780 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5785 /* Create a duplicate of the basic block BB. NOTE: This does not
5786 preserve SSA form. */
5789 gimple_duplicate_bb (basic_block bb
)
5792 gimple_stmt_iterator gsi_tgt
;
5794 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5796 /* Copy the PHI nodes. We ignore PHI node arguments here because
5797 the incoming edges have not been setup yet. */
5798 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5804 copy
= create_phi_node (NULL_TREE
, new_bb
);
5805 create_new_def_for (gimple_phi_result (phi
), copy
,
5806 gimple_phi_result_ptr (copy
));
5807 gimple_set_uid (copy
, gimple_uid (phi
));
5810 gsi_tgt
= gsi_start_bb (new_bb
);
5811 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5815 def_operand_p def_p
;
5816 ssa_op_iter op_iter
;
5820 stmt
= gsi_stmt (gsi
);
5821 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5824 /* Don't duplicate label debug stmts. */
5825 if (gimple_debug_bind_p (stmt
)
5826 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5830 /* Create a new copy of STMT and duplicate STMT's virtual
5832 copy
= gimple_copy (stmt
);
5833 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5835 maybe_duplicate_eh_stmt (copy
, stmt
);
5836 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5838 /* When copying around a stmt writing into a local non-user
5839 aggregate, make sure it won't share stack slot with other
5841 lhs
= gimple_get_lhs (stmt
);
5842 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5844 tree base
= get_base_address (lhs
);
5846 && (TREE_CODE (base
) == VAR_DECL
5847 || TREE_CODE (base
) == RESULT_DECL
)
5848 && DECL_IGNORED_P (base
)
5849 && !TREE_STATIC (base
)
5850 && !DECL_EXTERNAL (base
)
5851 && (TREE_CODE (base
) != VAR_DECL
5852 || !DECL_HAS_VALUE_EXPR_P (base
)))
5853 DECL_NONSHAREABLE (base
) = 1;
5856 /* Create new names for all the definitions created by COPY and
5857 add replacement mappings for each new name. */
5858 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5859 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5865 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5868 add_phi_args_after_copy_edge (edge e_copy
)
5870 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5873 gphi
*phi
, *phi_copy
;
5875 gphi_iterator psi
, psi_copy
;
5877 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5880 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5882 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5883 dest
= get_bb_original (e_copy
->dest
);
5885 dest
= e_copy
->dest
;
5887 e
= find_edge (bb
, dest
);
5890 /* During loop unrolling the target of the latch edge is copied.
5891 In this case we are not looking for edge to dest, but to
5892 duplicated block whose original was dest. */
5893 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5895 if ((e
->dest
->flags
& BB_DUPLICATED
)
5896 && get_bb_original (e
->dest
) == dest
)
5900 gcc_assert (e
!= NULL
);
5903 for (psi
= gsi_start_phis (e
->dest
),
5904 psi_copy
= gsi_start_phis (e_copy
->dest
);
5906 gsi_next (&psi
), gsi_next (&psi_copy
))
5909 phi_copy
= psi_copy
.phi ();
5910 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5911 add_phi_arg (phi_copy
, def
, e_copy
,
5912 gimple_phi_arg_location_from_edge (phi
, e
));
5917 /* Basic block BB_COPY was created by code duplication. Add phi node
5918 arguments for edges going out of BB_COPY. The blocks that were
5919 duplicated have BB_DUPLICATED set. */
5922 add_phi_args_after_copy_bb (basic_block bb_copy
)
5927 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5929 add_phi_args_after_copy_edge (e_copy
);
5933 /* Blocks in REGION_COPY array of length N_REGION were created by
5934 duplication of basic blocks. Add phi node arguments for edges
5935 going from these blocks. If E_COPY is not NULL, also add
5936 phi node arguments for its destination.*/
5939 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5944 for (i
= 0; i
< n_region
; i
++)
5945 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5947 for (i
= 0; i
< n_region
; i
++)
5948 add_phi_args_after_copy_bb (region_copy
[i
]);
5950 add_phi_args_after_copy_edge (e_copy
);
5952 for (i
= 0; i
< n_region
; i
++)
5953 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5956 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5957 important exit edge EXIT. By important we mean that no SSA name defined
5958 inside region is live over the other exit edges of the region. All entry
5959 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5960 to the duplicate of the region. Dominance and loop information is
5961 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5962 UPDATE_DOMINANCE is false then we assume that the caller will update the
5963 dominance information after calling this function. The new basic
5964 blocks are stored to REGION_COPY in the same order as they had in REGION,
5965 provided that REGION_COPY is not NULL.
5966 The function returns false if it is unable to copy the region,
5970 gimple_duplicate_sese_region (edge entry
, edge exit
,
5971 basic_block
*region
, unsigned n_region
,
5972 basic_block
*region_copy
,
5973 bool update_dominance
)
5976 bool free_region_copy
= false, copying_header
= false;
5977 struct loop
*loop
= entry
->dest
->loop_father
;
5979 vec
<basic_block
> doms
;
5981 int total_freq
= 0, entry_freq
= 0;
5982 gcov_type total_count
= 0, entry_count
= 0;
5984 if (!can_copy_bbs_p (region
, n_region
))
5987 /* Some sanity checking. Note that we do not check for all possible
5988 missuses of the functions. I.e. if you ask to copy something weird,
5989 it will work, but the state of structures probably will not be
5991 for (i
= 0; i
< n_region
; i
++)
5993 /* We do not handle subloops, i.e. all the blocks must belong to the
5995 if (region
[i
]->loop_father
!= loop
)
5998 if (region
[i
] != entry
->dest
5999 && region
[i
] == loop
->header
)
6003 /* In case the function is used for loop header copying (which is the primary
6004 use), ensure that EXIT and its copy will be new latch and entry edges. */
6005 if (loop
->header
== entry
->dest
)
6007 copying_header
= true;
6009 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6012 for (i
= 0; i
< n_region
; i
++)
6013 if (region
[i
] != exit
->src
6014 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6018 initialize_original_copy_tables ();
6021 set_loop_copy (loop
, loop_outer (loop
));
6023 set_loop_copy (loop
, loop
);
6027 region_copy
= XNEWVEC (basic_block
, n_region
);
6028 free_region_copy
= true;
6031 /* Record blocks outside the region that are dominated by something
6033 if (update_dominance
)
6036 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6039 if (entry
->dest
->count
)
6041 total_count
= entry
->dest
->count
;
6042 entry_count
= entry
->count
;
6043 /* Fix up corner cases, to avoid division by zero or creation of negative
6045 if (entry_count
> total_count
)
6046 entry_count
= total_count
;
6050 total_freq
= entry
->dest
->frequency
;
6051 entry_freq
= EDGE_FREQUENCY (entry
);
6052 /* Fix up corner cases, to avoid division by zero or creation of negative
6054 if (total_freq
== 0)
6056 else if (entry_freq
> total_freq
)
6057 entry_freq
= total_freq
;
6060 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6061 split_edge_bb_loc (entry
), update_dominance
);
6064 scale_bbs_frequencies_gcov_type (region
, n_region
,
6065 total_count
- entry_count
,
6067 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
6072 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6074 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6079 loop
->header
= exit
->dest
;
6080 loop
->latch
= exit
->src
;
6083 /* Redirect the entry and add the phi node arguments. */
6084 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6085 gcc_assert (redirected
!= NULL
);
6086 flush_pending_stmts (entry
);
6088 /* Concerning updating of dominators: We must recount dominators
6089 for entry block and its copy. Anything that is outside of the
6090 region, but was dominated by something inside needs recounting as
6092 if (update_dominance
)
6094 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6095 doms
.safe_push (get_bb_original (entry
->dest
));
6096 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6100 /* Add the other PHI node arguments. */
6101 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6103 if (free_region_copy
)
6106 free_original_copy_tables ();
6110 /* Checks if BB is part of the region defined by N_REGION BBS. */
6112 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6116 for (n
= 0; n
< n_region
; n
++)
6124 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6125 are stored to REGION_COPY in the same order in that they appear
6126 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6127 the region, EXIT an exit from it. The condition guarding EXIT
6128 is moved to ENTRY. Returns true if duplication succeeds, false
6154 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6155 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6156 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6159 bool free_region_copy
= false;
6160 struct loop
*loop
= exit
->dest
->loop_father
;
6161 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6162 basic_block switch_bb
, entry_bb
, nentry_bb
;
6163 vec
<basic_block
> doms
;
6164 int total_freq
= 0, exit_freq
= 0;
6165 gcov_type total_count
= 0, exit_count
= 0;
6166 edge exits
[2], nexits
[2], e
;
6167 gimple_stmt_iterator gsi
;
6170 basic_block exit_bb
;
6174 struct loop
*target
, *aloop
, *cloop
;
6176 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6178 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6180 if (!can_copy_bbs_p (region
, n_region
))
6183 initialize_original_copy_tables ();
6184 set_loop_copy (orig_loop
, loop
);
6187 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6189 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6191 cloop
= duplicate_loop (aloop
, target
);
6192 duplicate_subloops (aloop
, cloop
);
6198 region_copy
= XNEWVEC (basic_block
, n_region
);
6199 free_region_copy
= true;
6202 gcc_assert (!need_ssa_update_p (cfun
));
6204 /* Record blocks outside the region that are dominated by something
6206 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6208 if (exit
->src
->count
)
6210 total_count
= exit
->src
->count
;
6211 exit_count
= exit
->count
;
6212 /* Fix up corner cases, to avoid division by zero or creation of negative
6214 if (exit_count
> total_count
)
6215 exit_count
= total_count
;
6219 total_freq
= exit
->src
->frequency
;
6220 exit_freq
= EDGE_FREQUENCY (exit
);
6221 /* Fix up corner cases, to avoid division by zero or creation of negative
6223 if (total_freq
== 0)
6225 if (exit_freq
> total_freq
)
6226 exit_freq
= total_freq
;
6229 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6230 split_edge_bb_loc (exit
), true);
6233 scale_bbs_frequencies_gcov_type (region
, n_region
,
6234 total_count
- exit_count
,
6236 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6241 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6243 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6246 /* Create the switch block, and put the exit condition to it. */
6247 entry_bb
= entry
->dest
;
6248 nentry_bb
= get_bb_copy (entry_bb
);
6249 if (!last_stmt (entry
->src
)
6250 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6251 switch_bb
= entry
->src
;
6253 switch_bb
= split_edge (entry
);
6254 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6256 gsi
= gsi_last_bb (switch_bb
);
6257 cond_stmt
= last_stmt (exit
->src
);
6258 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6259 cond_stmt
= gimple_copy (cond_stmt
);
6261 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6263 sorig
= single_succ_edge (switch_bb
);
6264 sorig
->flags
= exits
[1]->flags
;
6265 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6267 /* Register the new edge from SWITCH_BB in loop exit lists. */
6268 rescan_loop_exit (snew
, true, false);
6270 /* Add the PHI node arguments. */
6271 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6273 /* Get rid of now superfluous conditions and associated edges (and phi node
6275 exit_bb
= exit
->dest
;
6277 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6278 PENDING_STMT (e
) = NULL
;
6280 /* The latch of ORIG_LOOP was copied, and so was the backedge
6281 to the original header. We redirect this backedge to EXIT_BB. */
6282 for (i
= 0; i
< n_region
; i
++)
6283 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6285 gcc_assert (single_succ_edge (region_copy
[i
]));
6286 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6287 PENDING_STMT (e
) = NULL
;
6288 for (psi
= gsi_start_phis (exit_bb
);
6293 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6294 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6297 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6298 PENDING_STMT (e
) = NULL
;
6300 /* Anything that is outside of the region, but was dominated by something
6301 inside needs to update dominance info. */
6302 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6304 /* Update the SSA web. */
6305 update_ssa (TODO_update_ssa
);
6307 if (free_region_copy
)
6310 free_original_copy_tables ();
6314 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6315 adding blocks when the dominator traversal reaches EXIT. This
6316 function silently assumes that ENTRY strictly dominates EXIT. */
6319 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6320 vec
<basic_block
> *bbs_p
)
6324 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6326 son
= next_dom_son (CDI_DOMINATORS
, son
))
6328 bbs_p
->safe_push (son
);
6330 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6334 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6335 The duplicates are recorded in VARS_MAP. */
6338 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6341 tree t
= *tp
, new_t
;
6342 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6344 if (DECL_CONTEXT (t
) == to_context
)
6348 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6354 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6355 add_local_decl (f
, new_t
);
6359 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6360 new_t
= copy_node (t
);
6362 DECL_CONTEXT (new_t
) = to_context
;
6373 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6374 VARS_MAP maps old ssa names and var_decls to the new ones. */
6377 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6382 gcc_assert (!virtual_operand_p (name
));
6384 tree
*loc
= vars_map
->get (name
);
6388 tree decl
= SSA_NAME_VAR (name
);
6391 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6392 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6393 decl
, SSA_NAME_DEF_STMT (name
));
6394 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6395 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6399 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6400 name
, SSA_NAME_DEF_STMT (name
));
6402 vars_map
->put (name
, new_name
);
6416 hash_map
<tree
, tree
> *vars_map
;
6417 htab_t new_label_map
;
6418 hash_map
<void *, void *> *eh_map
;
6422 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6423 contained in *TP if it has been ORIG_BLOCK previously and change the
6424 DECL_CONTEXT of every local variable referenced in *TP. */
6427 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6429 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6430 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6435 tree block
= TREE_BLOCK (t
);
6436 if (block
== p
->orig_block
6437 || (p
->orig_block
== NULL_TREE
6438 && block
!= NULL_TREE
))
6439 TREE_SET_BLOCK (t
, p
->new_block
);
6440 #ifdef ENABLE_CHECKING
6441 else if (block
!= NULL_TREE
)
6443 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6444 block
= BLOCK_SUPERCONTEXT (block
);
6445 gcc_assert (block
== p
->orig_block
);
6449 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6451 if (TREE_CODE (t
) == SSA_NAME
)
6452 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6453 else if (TREE_CODE (t
) == LABEL_DECL
)
6455 if (p
->new_label_map
)
6457 struct tree_map in
, *out
;
6459 out
= (struct tree_map
*)
6460 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6465 DECL_CONTEXT (t
) = p
->to_context
;
6467 else if (p
->remap_decls_p
)
6469 /* Replace T with its duplicate. T should no longer appear in the
6470 parent function, so this looks wasteful; however, it may appear
6471 in referenced_vars, and more importantly, as virtual operands of
6472 statements, and in alias lists of other variables. It would be
6473 quite difficult to expunge it from all those places. ??? It might
6474 suffice to do this for addressable variables. */
6475 if ((TREE_CODE (t
) == VAR_DECL
6476 && !is_global_var (t
))
6477 || TREE_CODE (t
) == CONST_DECL
)
6478 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6482 else if (TYPE_P (t
))
6488 /* Helper for move_stmt_r. Given an EH region number for the source
6489 function, map that to the duplicate EH regio number in the dest. */
6492 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6494 eh_region old_r
, new_r
;
6496 old_r
= get_eh_region_from_number (old_nr
);
6497 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6499 return new_r
->index
;
6502 /* Similar, but operate on INTEGER_CSTs. */
6505 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6509 old_nr
= tree_to_shwi (old_t_nr
);
6510 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6512 return build_int_cst (integer_type_node
, new_nr
);
6515 /* Like move_stmt_op, but for gimple statements.
6517 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6518 contained in the current statement in *GSI_P and change the
6519 DECL_CONTEXT of every local variable referenced in the current
6523 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6524 struct walk_stmt_info
*wi
)
6526 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6527 gimple stmt
= gsi_stmt (*gsi_p
);
6528 tree block
= gimple_block (stmt
);
6530 if (block
== p
->orig_block
6531 || (p
->orig_block
== NULL_TREE
6532 && block
!= NULL_TREE
))
6533 gimple_set_block (stmt
, p
->new_block
);
6535 switch (gimple_code (stmt
))
6538 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6540 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6541 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6542 switch (DECL_FUNCTION_CODE (fndecl
))
6544 case BUILT_IN_EH_COPY_VALUES
:
6545 r
= gimple_call_arg (stmt
, 1);
6546 r
= move_stmt_eh_region_tree_nr (r
, p
);
6547 gimple_call_set_arg (stmt
, 1, r
);
6550 case BUILT_IN_EH_POINTER
:
6551 case BUILT_IN_EH_FILTER
:
6552 r
= gimple_call_arg (stmt
, 0);
6553 r
= move_stmt_eh_region_tree_nr (r
, p
);
6554 gimple_call_set_arg (stmt
, 0, r
);
6565 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6566 int r
= gimple_resx_region (resx_stmt
);
6567 r
= move_stmt_eh_region_nr (r
, p
);
6568 gimple_resx_set_region (resx_stmt
, r
);
6572 case GIMPLE_EH_DISPATCH
:
6574 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6575 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6576 r
= move_stmt_eh_region_nr (r
, p
);
6577 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6581 case GIMPLE_OMP_RETURN
:
6582 case GIMPLE_OMP_CONTINUE
:
6585 if (is_gimple_omp (stmt
))
6587 /* Do not remap variables inside OMP directives. Variables
6588 referenced in clauses and directive header belong to the
6589 parent function and should not be moved into the child
6591 bool save_remap_decls_p
= p
->remap_decls_p
;
6592 p
->remap_decls_p
= false;
6593 *handled_ops_p
= true;
6595 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6598 p
->remap_decls_p
= save_remap_decls_p
;
6606 /* Move basic block BB from function CFUN to function DEST_FN. The
6607 block is moved out of the original linked list and placed after
6608 block AFTER in the new list. Also, the block is removed from the
6609 original array of blocks and placed in DEST_FN's array of blocks.
6610 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6611 updated to reflect the moved edges.
6613 The local variables are remapped to new instances, VARS_MAP is used
6614 to record the mapping. */
6617 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6618 basic_block after
, bool update_edge_count_p
,
6619 struct move_stmt_d
*d
)
6621 struct control_flow_graph
*cfg
;
6624 gimple_stmt_iterator si
;
6625 unsigned old_len
, new_len
;
6627 /* Remove BB from dominance structures. */
6628 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6630 /* Move BB from its current loop to the copy in the new function. */
6633 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6635 bb
->loop_father
= new_loop
;
6638 /* Link BB to the new linked list. */
6639 move_block_after (bb
, after
);
6641 /* Update the edge count in the corresponding flowgraphs. */
6642 if (update_edge_count_p
)
6643 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6645 cfun
->cfg
->x_n_edges
--;
6646 dest_cfun
->cfg
->x_n_edges
++;
6649 /* Remove BB from the original basic block array. */
6650 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6651 cfun
->cfg
->x_n_basic_blocks
--;
6653 /* Grow DEST_CFUN's basic block array if needed. */
6654 cfg
= dest_cfun
->cfg
;
6655 cfg
->x_n_basic_blocks
++;
6656 if (bb
->index
>= cfg
->x_last_basic_block
)
6657 cfg
->x_last_basic_block
= bb
->index
+ 1;
6659 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6660 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6662 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6663 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6666 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6668 /* Remap the variables in phi nodes. */
6669 for (gphi_iterator psi
= gsi_start_phis (bb
);
6672 gphi
*phi
= psi
.phi ();
6674 tree op
= PHI_RESULT (phi
);
6678 if (virtual_operand_p (op
))
6680 /* Remove the phi nodes for virtual operands (alias analysis will be
6681 run for the new function, anyway). */
6682 remove_phi_node (&psi
, true);
6686 SET_PHI_RESULT (phi
,
6687 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6688 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6690 op
= USE_FROM_PTR (use
);
6691 if (TREE_CODE (op
) == SSA_NAME
)
6692 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6695 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6697 location_t locus
= gimple_phi_arg_location (phi
, i
);
6698 tree block
= LOCATION_BLOCK (locus
);
6700 if (locus
== UNKNOWN_LOCATION
)
6702 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6704 if (d
->new_block
== NULL_TREE
)
6705 locus
= LOCATION_LOCUS (locus
);
6707 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6708 gimple_phi_arg_set_location (phi
, i
, locus
);
6715 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6717 gimple stmt
= gsi_stmt (si
);
6718 struct walk_stmt_info wi
;
6720 memset (&wi
, 0, sizeof (wi
));
6722 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6724 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
6726 tree label
= gimple_label_label (label_stmt
);
6727 int uid
= LABEL_DECL_UID (label
);
6729 gcc_assert (uid
> -1);
6731 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6732 if (old_len
<= (unsigned) uid
)
6734 new_len
= 3 * uid
/ 2 + 1;
6735 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6738 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6739 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6741 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6743 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6744 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6747 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6748 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6750 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6751 gimple_remove_stmt_histograms (cfun
, stmt
);
6753 /* We cannot leave any operands allocated from the operand caches of
6754 the current function. */
6755 free_stmt_operands (cfun
, stmt
);
6756 push_cfun (dest_cfun
);
6761 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6762 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6764 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6765 if (d
->orig_block
== NULL_TREE
6766 || block
== d
->orig_block
)
6767 e
->goto_locus
= d
->new_block
?
6768 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6769 LOCATION_LOCUS (e
->goto_locus
);
6773 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6774 the outermost EH region. Use REGION as the incoming base EH region. */
6777 find_outermost_region_in_block (struct function
*src_cfun
,
6778 basic_block bb
, eh_region region
)
6780 gimple_stmt_iterator si
;
6782 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6784 gimple stmt
= gsi_stmt (si
);
6785 eh_region stmt_region
;
6788 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6789 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6793 region
= stmt_region
;
6794 else if (stmt_region
!= region
)
6796 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6797 gcc_assert (region
!= NULL
);
6806 new_label_mapper (tree decl
, void *data
)
6808 htab_t hash
= (htab_t
) data
;
6812 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6814 m
= XNEW (struct tree_map
);
6815 m
->hash
= DECL_UID (decl
);
6816 m
->base
.from
= decl
;
6817 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6818 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6819 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6820 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6822 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6823 gcc_assert (*slot
== NULL
);
6830 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6834 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
6839 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6842 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6844 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6847 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6849 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6850 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6852 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6857 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6858 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6861 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6865 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6868 /* Discard it from the old loop array. */
6869 (*get_loops (fn1
))[loop
->num
] = NULL
;
6871 /* Place it in the new loop array, assigning it a new number. */
6872 loop
->num
= number_of_loops (fn2
);
6873 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6875 /* Recurse to children. */
6876 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6877 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6880 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6881 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6884 verify_sese (basic_block entry
, basic_block exit
, vec
<basic_block
> *bbs_p
)
6889 bitmap bbs
= BITMAP_ALLOC (NULL
);
6892 gcc_assert (entry
!= NULL
);
6893 gcc_assert (entry
!= exit
);
6894 gcc_assert (bbs_p
!= NULL
);
6896 gcc_assert (bbs_p
->length () > 0);
6898 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
6899 bitmap_set_bit (bbs
, bb
->index
);
6901 gcc_assert (bitmap_bit_p (bbs
, entry
->index
));
6902 gcc_assert (exit
== NULL
|| bitmap_bit_p (bbs
, exit
->index
));
6904 FOR_EACH_VEC_ELT (*bbs_p
, i
, bb
)
6908 gcc_assert (single_pred_p (entry
));
6909 gcc_assert (!bitmap_bit_p (bbs
, single_pred (entry
)->index
));
6912 for (ei
= ei_start (bb
->preds
); !ei_end_p (ei
); ei_next (&ei
))
6915 gcc_assert (bitmap_bit_p (bbs
, e
->src
->index
));
6920 gcc_assert (single_succ_p (exit
));
6921 gcc_assert (!bitmap_bit_p (bbs
, single_succ (exit
)->index
));
6924 for (ei
= ei_start (bb
->succs
); !ei_end_p (ei
); ei_next (&ei
))
6927 gcc_assert (bitmap_bit_p (bbs
, e
->dest
->index
));
6935 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6936 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6937 single basic block in the original CFG and the new basic block is
6938 returned. DEST_CFUN must not have a CFG yet.
6940 Note that the region need not be a pure SESE region. Blocks inside
6941 the region may contain calls to abort/exit. The only restriction
6942 is that ENTRY_BB should be the only entry point and it must
6945 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6946 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6947 to the new function.
6949 All local variables referenced in the region are assumed to be in
6950 the corresponding BLOCK_VARS and unexpanded variable lists
6951 associated with DEST_CFUN. */
6954 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6955 basic_block exit_bb
, tree orig_block
)
6957 vec
<basic_block
> bbs
, dom_bbs
;
6958 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6959 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6960 struct function
*saved_cfun
= cfun
;
6961 int *entry_flag
, *exit_flag
;
6962 unsigned *entry_prob
, *exit_prob
;
6963 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6966 htab_t new_label_map
;
6967 hash_map
<void *, void *> *eh_map
;
6968 struct loop
*loop
= entry_bb
->loop_father
;
6969 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6970 struct move_stmt_d d
;
6972 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6974 gcc_assert (entry_bb
!= exit_bb
6976 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6978 /* Collect all the blocks in the region. Manually add ENTRY_BB
6979 because it won't be added by dfs_enumerate_from. */
6981 bbs
.safe_push (entry_bb
);
6982 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6983 #ifdef ENABLE_CHECKING
6984 verify_sese (entry_bb
, exit_bb
, &bbs
);
6987 /* The blocks that used to be dominated by something in BBS will now be
6988 dominated by the new block. */
6989 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6993 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6994 the predecessor edges to ENTRY_BB and the successor edges to
6995 EXIT_BB so that we can re-attach them to the new basic block that
6996 will replace the region. */
6997 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6998 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6999 entry_flag
= XNEWVEC (int, num_entry_edges
);
7000 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
7002 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
7004 entry_prob
[i
] = e
->probability
;
7005 entry_flag
[i
] = e
->flags
;
7006 entry_pred
[i
++] = e
->src
;
7012 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
7013 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
7014 exit_flag
= XNEWVEC (int, num_exit_edges
);
7015 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
7017 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
7019 exit_prob
[i
] = e
->probability
;
7020 exit_flag
[i
] = e
->flags
;
7021 exit_succ
[i
++] = e
->dest
;
7033 /* Switch context to the child function to initialize DEST_FN's CFG. */
7034 gcc_assert (dest_cfun
->cfg
== NULL
);
7035 push_cfun (dest_cfun
);
7037 init_empty_tree_cfg ();
7039 /* Initialize EH information for the new function. */
7041 new_label_map
= NULL
;
7044 eh_region region
= NULL
;
7046 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7047 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
7049 init_eh_for_function ();
7052 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
7053 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
7054 new_label_mapper
, new_label_map
);
7058 /* Initialize an empty loop tree. */
7059 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
7060 init_loops_structure (dest_cfun
, loops
, 1);
7061 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
7062 set_loops_for_fn (dest_cfun
, loops
);
7064 /* Move the outlined loop tree part. */
7065 num_nodes
= bbs
.length ();
7066 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7068 if (bb
->loop_father
->header
== bb
)
7070 struct loop
*this_loop
= bb
->loop_father
;
7071 struct loop
*outer
= loop_outer (this_loop
);
7073 /* If the SESE region contains some bbs ending with
7074 a noreturn call, those are considered to belong
7075 to the outermost loop in saved_cfun, rather than
7076 the entry_bb's loop_father. */
7080 num_nodes
-= this_loop
->num_nodes
;
7081 flow_loop_tree_node_remove (bb
->loop_father
);
7082 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7083 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7086 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7089 /* Remove loop exits from the outlined region. */
7090 if (loops_for_fn (saved_cfun
)->exits
)
7091 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7093 struct loops
*l
= loops_for_fn (saved_cfun
);
7095 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7098 l
->exits
->clear_slot (slot
);
7103 /* Adjust the number of blocks in the tree root of the outlined part. */
7104 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7106 /* Setup a mapping to be used by move_block_to_fn. */
7107 loop
->aux
= current_loops
->tree_root
;
7108 loop0
->aux
= current_loops
->tree_root
;
7112 /* Move blocks from BBS into DEST_CFUN. */
7113 gcc_assert (bbs
.length () >= 2);
7114 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7115 hash_map
<tree
, tree
> vars_map
;
7117 memset (&d
, 0, sizeof (d
));
7118 d
.orig_block
= orig_block
;
7119 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7120 d
.from_context
= cfun
->decl
;
7121 d
.to_context
= dest_cfun
->decl
;
7122 d
.vars_map
= &vars_map
;
7123 d
.new_label_map
= new_label_map
;
7125 d
.remap_decls_p
= true;
7127 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7129 /* No need to update edge counts on the last block. It has
7130 already been updated earlier when we detached the region from
7131 the original CFG. */
7132 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7138 /* Loop sizes are no longer correct, fix them up. */
7139 loop
->num_nodes
-= num_nodes
;
7140 for (struct loop
*outer
= loop_outer (loop
);
7141 outer
; outer
= loop_outer (outer
))
7142 outer
->num_nodes
-= num_nodes
;
7143 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7145 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7148 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7153 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7155 dest_cfun
->has_simduid_loops
= true;
7157 if (aloop
->force_vectorize
)
7158 dest_cfun
->has_force_vectorize_loops
= true;
7162 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7166 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7168 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7169 = BLOCK_SUBBLOCKS (orig_block
);
7170 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7171 block
; block
= BLOCK_CHAIN (block
))
7172 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7173 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7176 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7177 &vars_map
, dest_cfun
->decl
);
7180 htab_delete (new_label_map
);
7184 /* Rewire the entry and exit blocks. The successor to the entry
7185 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7186 the child function. Similarly, the predecessor of DEST_FN's
7187 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7188 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7189 various CFG manipulation function get to the right CFG.
7191 FIXME, this is silly. The CFG ought to become a parameter to
7193 push_cfun (dest_cfun
);
7194 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7196 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7199 /* Back in the original function, the SESE region has disappeared,
7200 create a new basic block in its place. */
7201 bb
= create_empty_bb (entry_pred
[0]);
7203 add_bb_to_loop (bb
, loop
);
7204 for (i
= 0; i
< num_entry_edges
; i
++)
7206 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7207 e
->probability
= entry_prob
[i
];
7210 for (i
= 0; i
< num_exit_edges
; i
++)
7212 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7213 e
->probability
= exit_prob
[i
];
7216 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7217 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7218 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7236 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7240 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
7242 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7243 struct function
*dsf
;
7244 bool ignore_topmost_bind
= false, any_var
= false;
7247 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7248 && decl_is_tm_clone (fndecl
));
7249 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7251 current_function_decl
= fndecl
;
7252 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7254 arg
= DECL_ARGUMENTS (fndecl
);
7257 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7258 fprintf (file
, " ");
7259 print_generic_expr (file
, arg
, dump_flags
);
7260 if (flags
& TDF_VERBOSE
)
7261 print_node (file
, "", arg
, 4);
7262 if (DECL_CHAIN (arg
))
7263 fprintf (file
, ", ");
7264 arg
= DECL_CHAIN (arg
);
7266 fprintf (file
, ")\n");
7268 if (flags
& TDF_VERBOSE
)
7269 print_node (file
, "", fndecl
, 2);
7271 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7272 if (dsf
&& (flags
& TDF_EH
))
7273 dump_eh_tree (file
, dsf
);
7275 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7277 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7278 current_function_decl
= old_current_fndecl
;
7282 /* When GIMPLE is lowered, the variables are no longer available in
7283 BIND_EXPRs, so display them separately. */
7284 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7287 ignore_topmost_bind
= true;
7289 fprintf (file
, "{\n");
7290 if (!vec_safe_is_empty (fun
->local_decls
))
7291 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7293 print_generic_decl (file
, var
, flags
);
7294 if (flags
& TDF_VERBOSE
)
7295 print_node (file
, "", var
, 4);
7296 fprintf (file
, "\n");
7300 if (gimple_in_ssa_p (cfun
))
7301 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7303 tree name
= ssa_name (ix
);
7304 if (name
&& !SSA_NAME_VAR (name
))
7306 fprintf (file
, " ");
7307 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7308 fprintf (file
, " ");
7309 print_generic_expr (file
, name
, flags
);
7310 fprintf (file
, ";\n");
7317 if (fun
&& fun
->decl
== fndecl
7319 && basic_block_info_for_fn (fun
))
7321 /* If the CFG has been built, emit a CFG-based dump. */
7322 if (!ignore_topmost_bind
)
7323 fprintf (file
, "{\n");
7325 if (any_var
&& n_basic_blocks_for_fn (fun
))
7326 fprintf (file
, "\n");
7328 FOR_EACH_BB_FN (bb
, fun
)
7329 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7331 fprintf (file
, "}\n");
7333 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7335 /* The function is now in GIMPLE form but the CFG has not been
7336 built yet. Emit the single sequence of GIMPLE statements
7337 that make up its body. */
7338 gimple_seq body
= gimple_body (fndecl
);
7340 if (gimple_seq_first_stmt (body
)
7341 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7342 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7343 print_gimple_seq (file
, body
, 0, flags
);
7346 if (!ignore_topmost_bind
)
7347 fprintf (file
, "{\n");
7350 fprintf (file
, "\n");
7352 print_gimple_seq (file
, body
, 2, flags
);
7353 fprintf (file
, "}\n");
7360 /* Make a tree based dump. */
7361 chain
= DECL_SAVED_TREE (fndecl
);
7362 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7364 if (ignore_topmost_bind
)
7366 chain
= BIND_EXPR_BODY (chain
);
7374 if (!ignore_topmost_bind
)
7375 fprintf (file
, "{\n");
7380 fprintf (file
, "\n");
7382 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7383 if (ignore_topmost_bind
)
7384 fprintf (file
, "}\n");
7387 if (flags
& TDF_ENUMERATE_LOCALS
)
7388 dump_enumerated_decls (file
, flags
);
7389 fprintf (file
, "\n\n");
7391 current_function_decl
= old_current_fndecl
;
7394 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7397 debug_function (tree fn
, int flags
)
7399 dump_function_to_file (fn
, stderr
, flags
);
7403 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7406 print_pred_bbs (FILE *file
, basic_block bb
)
7411 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7412 fprintf (file
, "bb_%d ", e
->src
->index
);
7416 /* Print on FILE the indexes for the successors of basic_block BB. */
7419 print_succ_bbs (FILE *file
, basic_block bb
)
7424 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7425 fprintf (file
, "bb_%d ", e
->dest
->index
);
7428 /* Print to FILE the basic block BB following the VERBOSITY level. */
7431 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7433 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7434 memset ((void *) s_indent
, ' ', (size_t) indent
);
7435 s_indent
[indent
] = '\0';
7437 /* Print basic_block's header. */
7440 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7441 print_pred_bbs (file
, bb
);
7442 fprintf (file
, "}, succs = {");
7443 print_succ_bbs (file
, bb
);
7444 fprintf (file
, "})\n");
7447 /* Print basic_block's body. */
7450 fprintf (file
, "%s {\n", s_indent
);
7451 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7452 fprintf (file
, "%s }\n", s_indent
);
7456 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7458 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7459 VERBOSITY level this outputs the contents of the loop, or just its
7463 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7471 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7472 memset ((void *) s_indent
, ' ', (size_t) indent
);
7473 s_indent
[indent
] = '\0';
7475 /* Print loop's header. */
7476 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7478 fprintf (file
, "header = %d", loop
->header
->index
);
7481 fprintf (file
, "deleted)\n");
7485 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7487 fprintf (file
, ", multiple latches");
7488 fprintf (file
, ", niter = ");
7489 print_generic_expr (file
, loop
->nb_iterations
, 0);
7491 if (loop
->any_upper_bound
)
7493 fprintf (file
, ", upper_bound = ");
7494 print_decu (loop
->nb_iterations_upper_bound
, file
);
7497 if (loop
->any_estimate
)
7499 fprintf (file
, ", estimate = ");
7500 print_decu (loop
->nb_iterations_estimate
, file
);
7502 fprintf (file
, ")\n");
7504 /* Print loop's body. */
7507 fprintf (file
, "%s{\n", s_indent
);
7508 FOR_EACH_BB_FN (bb
, cfun
)
7509 if (bb
->loop_father
== loop
)
7510 print_loops_bb (file
, bb
, indent
, verbosity
);
7512 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7513 fprintf (file
, "%s}\n", s_indent
);
7517 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7518 spaces. Following VERBOSITY level this outputs the contents of the
7519 loop, or just its structure. */
7522 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7528 print_loop (file
, loop
, indent
, verbosity
);
7529 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7532 /* Follow a CFG edge from the entry point of the program, and on entry
7533 of a loop, pretty print the loop structure on FILE. */
7536 print_loops (FILE *file
, int verbosity
)
7540 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7541 if (bb
&& bb
->loop_father
)
7542 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7548 debug (struct loop
&ref
)
7550 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7554 debug (struct loop
*ptr
)
7559 fprintf (stderr
, "<nil>\n");
7562 /* Dump a loop verbosely. */
7565 debug_verbose (struct loop
&ref
)
7567 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7571 debug_verbose (struct loop
*ptr
)
7576 fprintf (stderr
, "<nil>\n");
7580 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7583 debug_loops (int verbosity
)
7585 print_loops (stderr
, verbosity
);
7588 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7591 debug_loop (struct loop
*loop
, int verbosity
)
7593 print_loop (stderr
, loop
, 0, verbosity
);
7596 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7600 debug_loop_num (unsigned num
, int verbosity
)
7602 debug_loop (get_loop (cfun
, num
), verbosity
);
7605 /* Return true if BB ends with a call, possibly followed by some
7606 instructions that must stay with the call. Return false,
7610 gimple_block_ends_with_call_p (basic_block bb
)
7612 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7613 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7617 /* Return true if BB ends with a conditional branch. Return false,
7621 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7623 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7624 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7628 /* Return true if we need to add fake edge to exit at statement T.
7629 Helper function for gimple_flow_call_edges_add. */
7632 need_fake_edge_p (gimple t
)
7634 tree fndecl
= NULL_TREE
;
7637 /* NORETURN and LONGJMP calls already have an edge to exit.
7638 CONST and PURE calls do not need one.
7639 We don't currently check for CONST and PURE here, although
7640 it would be a good idea, because those attributes are
7641 figured out from the RTL in mark_constant_function, and
7642 the counter incrementation code from -fprofile-arcs
7643 leads to different results from -fbranch-probabilities. */
7644 if (is_gimple_call (t
))
7646 fndecl
= gimple_call_fndecl (t
);
7647 call_flags
= gimple_call_flags (t
);
7650 if (is_gimple_call (t
)
7652 && DECL_BUILT_IN (fndecl
)
7653 && (call_flags
& ECF_NOTHROW
)
7654 && !(call_flags
& ECF_RETURNS_TWICE
)
7655 /* fork() doesn't really return twice, but the effect of
7656 wrapping it in __gcov_fork() which calls __gcov_flush()
7657 and clears the counters before forking has the same
7658 effect as returning twice. Force a fake edge. */
7659 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7660 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7663 if (is_gimple_call (t
))
7669 if (!(call_flags
& ECF_NORETURN
))
7673 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7674 if ((e
->flags
& EDGE_FAKE
) == 0)
7678 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
7679 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
7686 /* Add fake edges to the function exit for any non constant and non
7687 noreturn calls (or noreturn calls with EH/abnormal edges),
7688 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7689 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7692 The goal is to expose cases in which entering a basic block does
7693 not imply that all subsequent instructions must be executed. */
7696 gimple_flow_call_edges_add (sbitmap blocks
)
7699 int blocks_split
= 0;
7700 int last_bb
= last_basic_block_for_fn (cfun
);
7701 bool check_last_block
= false;
7703 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7707 check_last_block
= true;
7709 check_last_block
= bitmap_bit_p (blocks
,
7710 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7712 /* In the last basic block, before epilogue generation, there will be
7713 a fallthru edge to EXIT. Special care is required if the last insn
7714 of the last basic block is a call because make_edge folds duplicate
7715 edges, which would result in the fallthru edge also being marked
7716 fake, which would result in the fallthru edge being removed by
7717 remove_fake_edges, which would result in an invalid CFG.
7719 Moreover, we can't elide the outgoing fake edge, since the block
7720 profiler needs to take this into account in order to solve the minimal
7721 spanning tree in the case that the call doesn't return.
7723 Handle this by adding a dummy instruction in a new last basic block. */
7724 if (check_last_block
)
7726 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7727 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7730 if (!gsi_end_p (gsi
))
7733 if (t
&& need_fake_edge_p (t
))
7737 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7740 gsi_insert_on_edge (e
, gimple_build_nop ());
7741 gsi_commit_edge_inserts ();
7746 /* Now add fake edges to the function exit for any non constant
7747 calls since there is no way that we can determine if they will
7749 for (i
= 0; i
< last_bb
; i
++)
7751 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7752 gimple_stmt_iterator gsi
;
7753 gimple stmt
, last_stmt
;
7758 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7761 gsi
= gsi_last_nondebug_bb (bb
);
7762 if (!gsi_end_p (gsi
))
7764 last_stmt
= gsi_stmt (gsi
);
7767 stmt
= gsi_stmt (gsi
);
7768 if (need_fake_edge_p (stmt
))
7772 /* The handling above of the final block before the
7773 epilogue should be enough to verify that there is
7774 no edge to the exit block in CFG already.
7775 Calling make_edge in such case would cause us to
7776 mark that edge as fake and remove it later. */
7777 #ifdef ENABLE_CHECKING
7778 if (stmt
== last_stmt
)
7780 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7781 gcc_assert (e
== NULL
);
7785 /* Note that the following may create a new basic block
7786 and renumber the existing basic blocks. */
7787 if (stmt
!= last_stmt
)
7789 e
= split_block (bb
, stmt
);
7793 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7797 while (!gsi_end_p (gsi
));
7802 verify_flow_info ();
7804 return blocks_split
;
7807 /* Removes edge E and all the blocks dominated by it, and updates dominance
7808 information. The IL in E->src needs to be updated separately.
7809 If dominance info is not available, only the edge E is removed.*/
7812 remove_edge_and_dominated_blocks (edge e
)
7814 vec
<basic_block
> bbs_to_remove
= vNULL
;
7815 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7819 bool none_removed
= false;
7821 basic_block bb
, dbb
;
7824 if (!dom_info_available_p (CDI_DOMINATORS
))
7830 /* No updating is needed for edges to exit. */
7831 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7833 if (cfgcleanup_altered_bbs
)
7834 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7839 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7840 that is not dominated by E->dest, then this set is empty. Otherwise,
7841 all the basic blocks dominated by E->dest are removed.
7843 Also, to DF_IDOM we store the immediate dominators of the blocks in
7844 the dominance frontier of E (i.e., of the successors of the
7845 removed blocks, if there are any, and of E->dest otherwise). */
7846 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7851 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7853 none_removed
= true;
7858 df
= BITMAP_ALLOC (NULL
);
7859 df_idom
= BITMAP_ALLOC (NULL
);
7862 bitmap_set_bit (df_idom
,
7863 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7866 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7867 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7869 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7871 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7872 bitmap_set_bit (df
, f
->dest
->index
);
7875 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7876 bitmap_clear_bit (df
, bb
->index
);
7878 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7880 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7881 bitmap_set_bit (df_idom
,
7882 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7886 if (cfgcleanup_altered_bbs
)
7888 /* Record the set of the altered basic blocks. */
7889 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7890 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7893 /* Remove E and the cancelled blocks. */
7898 /* Walk backwards so as to get a chance to substitute all
7899 released DEFs into debug stmts. See
7900 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7902 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7903 delete_basic_block (bbs_to_remove
[i
]);
7906 /* Update the dominance information. The immediate dominator may change only
7907 for blocks whose immediate dominator belongs to DF_IDOM:
7909 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7910 removal. Let Z the arbitrary block such that idom(Z) = Y and
7911 Z dominates X after the removal. Before removal, there exists a path P
7912 from Y to X that avoids Z. Let F be the last edge on P that is
7913 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7914 dominates W, and because of P, Z does not dominate W), and W belongs to
7915 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7916 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7918 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7919 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7921 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7922 bbs_to_fix_dom
.safe_push (dbb
);
7925 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7928 BITMAP_FREE (df_idom
);
7929 bbs_to_remove
.release ();
7930 bbs_to_fix_dom
.release ();
7933 /* Purge dead EH edges from basic block BB. */
7936 gimple_purge_dead_eh_edges (basic_block bb
)
7938 bool changed
= false;
7941 gimple stmt
= last_stmt (bb
);
7943 if (stmt
&& stmt_can_throw_internal (stmt
))
7946 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7948 if (e
->flags
& EDGE_EH
)
7950 remove_edge_and_dominated_blocks (e
);
7960 /* Purge dead EH edges from basic block listed in BLOCKS. */
7963 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7965 bool changed
= false;
7969 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7971 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7973 /* Earlier gimple_purge_dead_eh_edges could have removed
7974 this basic block already. */
7975 gcc_assert (bb
|| changed
);
7977 changed
|= gimple_purge_dead_eh_edges (bb
);
7983 /* Purge dead abnormal call edges from basic block BB. */
7986 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7988 bool changed
= false;
7991 gimple stmt
= last_stmt (bb
);
7993 if (!cfun
->has_nonlocal_label
7994 && !cfun
->calls_setjmp
)
7997 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
8000 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
8002 if (e
->flags
& EDGE_ABNORMAL
)
8004 if (e
->flags
& EDGE_FALLTHRU
)
8005 e
->flags
&= ~EDGE_ABNORMAL
;
8007 remove_edge_and_dominated_blocks (e
);
8017 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8020 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
8022 bool changed
= false;
8026 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
8028 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
8030 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8031 this basic block already. */
8032 gcc_assert (bb
|| changed
);
8034 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
8040 /* This function is called whenever a new edge is created or
8044 gimple_execute_on_growing_pred (edge e
)
8046 basic_block bb
= e
->dest
;
8048 if (!gimple_seq_empty_p (phi_nodes (bb
)))
8049 reserve_phi_args_for_new_edge (bb
);
8052 /* This function is called immediately before edge E is removed from
8053 the edge vector E->dest->preds. */
8056 gimple_execute_on_shrinking_pred (edge e
)
8058 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
8059 remove_phi_args (e
);
8062 /*---------------------------------------------------------------------------
8063 Helper functions for Loop versioning
8064 ---------------------------------------------------------------------------*/
8066 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8067 of 'first'. Both of them are dominated by 'new_head' basic block. When
8068 'new_head' was created by 'second's incoming edge it received phi arguments
8069 on the edge by split_edge(). Later, additional edge 'e' was created to
8070 connect 'new_head' and 'first'. Now this routine adds phi args on this
8071 additional edge 'e' that new_head to second edge received as part of edge
8075 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8076 basic_block new_head
, edge e
)
8079 gphi_iterator psi1
, psi2
;
8081 edge e2
= find_edge (new_head
, second
);
8083 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8084 edge, we should always have an edge from NEW_HEAD to SECOND. */
8085 gcc_assert (e2
!= NULL
);
8087 /* Browse all 'second' basic block phi nodes and add phi args to
8088 edge 'e' for 'first' head. PHI args are always in correct order. */
8090 for (psi2
= gsi_start_phis (second
),
8091 psi1
= gsi_start_phis (first
);
8092 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8093 gsi_next (&psi2
), gsi_next (&psi1
))
8097 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8098 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8103 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8104 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8105 the destination of the ELSE part. */
8108 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8109 basic_block second_head ATTRIBUTE_UNUSED
,
8110 basic_block cond_bb
, void *cond_e
)
8112 gimple_stmt_iterator gsi
;
8113 gimple new_cond_expr
;
8114 tree cond_expr
= (tree
) cond_e
;
8117 /* Build new conditional expr */
8118 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8119 NULL_TREE
, NULL_TREE
);
8121 /* Add new cond in cond_bb. */
8122 gsi
= gsi_last_bb (cond_bb
);
8123 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8125 /* Adjust edges appropriately to connect new head with first head
8126 as well as second head. */
8127 e0
= single_succ_edge (cond_bb
);
8128 e0
->flags
&= ~EDGE_FALLTHRU
;
8129 e0
->flags
|= EDGE_FALSE_VALUE
;
8133 /* Do book-keeping of basic block BB for the profile consistency checker.
8134 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8135 then do post-pass accounting. Store the counting in RECORD. */
8137 gimple_account_profile_record (basic_block bb
, int after_pass
,
8138 struct profile_record
*record
)
8140 gimple_stmt_iterator i
;
8141 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8143 record
->size
[after_pass
]
8144 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8145 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
8146 record
->time
[after_pass
]
8147 += estimate_num_insns (gsi_stmt (i
),
8148 &eni_time_weights
) * bb
->count
;
8149 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8150 record
->time
[after_pass
]
8151 += estimate_num_insns (gsi_stmt (i
),
8152 &eni_time_weights
) * bb
->frequency
;
8156 struct cfg_hooks gimple_cfg_hooks
= {
8158 gimple_verify_flow_info
,
8159 gimple_dump_bb
, /* dump_bb */
8160 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8161 create_bb
, /* create_basic_block */
8162 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8163 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8164 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8165 remove_bb
, /* delete_basic_block */
8166 gimple_split_block
, /* split_block */
8167 gimple_move_block_after
, /* move_block_after */
8168 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8169 gimple_merge_blocks
, /* merge_blocks */
8170 gimple_predict_edge
, /* predict_edge */
8171 gimple_predicted_by_p
, /* predicted_by_p */
8172 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8173 gimple_duplicate_bb
, /* duplicate_block */
8174 gimple_split_edge
, /* split_edge */
8175 gimple_make_forwarder_block
, /* make_forward_block */
8176 NULL
, /* tidy_fallthru_edge */
8177 NULL
, /* force_nonfallthru */
8178 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8179 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8180 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8181 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8182 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8183 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8184 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8185 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8186 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8187 flush_pending_stmts
, /* flush_pending_stmts */
8188 gimple_empty_block_p
, /* block_empty_p */
8189 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8190 gimple_account_profile_record
,
8194 /* Split all critical edges. */
8197 split_critical_edges (void)
8203 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8204 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8205 mappings around the calls to split_edge. */
8206 start_recording_case_labels ();
8207 FOR_ALL_BB_FN (bb
, cfun
)
8209 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8211 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8213 /* PRE inserts statements to edges and expects that
8214 since split_critical_edges was done beforehand, committing edge
8215 insertions will not split more edges. In addition to critical
8216 edges we must split edges that have multiple successors and
8217 end by control flow statements, such as RESX.
8218 Go ahead and split them too. This matches the logic in
8219 gimple_find_edge_insert_loc. */
8220 else if ((!single_pred_p (e
->dest
)
8221 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8222 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8223 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8224 && !(e
->flags
& EDGE_ABNORMAL
))
8226 gimple_stmt_iterator gsi
;
8228 gsi
= gsi_last_bb (e
->src
);
8229 if (!gsi_end_p (gsi
)
8230 && stmt_ends_bb_p (gsi_stmt (gsi
))
8231 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8232 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8238 end_recording_case_labels ();
8244 const pass_data pass_data_split_crit_edges
=
8246 GIMPLE_PASS
, /* type */
8247 "crited", /* name */
8248 OPTGROUP_NONE
, /* optinfo_flags */
8249 TV_TREE_SPLIT_EDGES
, /* tv_id */
8250 PROP_cfg
, /* properties_required */
8251 PROP_no_crit_edges
, /* properties_provided */
8252 0, /* properties_destroyed */
8253 0, /* todo_flags_start */
8254 0, /* todo_flags_finish */
8257 class pass_split_crit_edges
: public gimple_opt_pass
8260 pass_split_crit_edges (gcc::context
*ctxt
)
8261 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8264 /* opt_pass methods: */
8265 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8267 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8268 }; // class pass_split_crit_edges
8273 make_pass_split_crit_edges (gcc::context
*ctxt
)
8275 return new pass_split_crit_edges (ctxt
);
8279 /* Insert COND expression which is GIMPLE_COND after STMT
8280 in basic block BB with appropriate basic block split
8281 and creation of a new conditionally executed basic block.
8282 Return created basic block. */
8284 insert_cond_bb (basic_block bb
, gimple stmt
, gimple cond
)
8286 edge fall
= split_block (bb
, stmt
);
8287 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8290 /* Insert cond statement. */
8291 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8292 if (gsi_end_p (iter
))
8293 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8295 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8297 /* Create conditionally executed block. */
8298 new_bb
= create_empty_bb (bb
);
8299 make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8300 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8302 /* Fix edge for split bb. */
8303 fall
->flags
= EDGE_FALSE_VALUE
;
8305 /* Update dominance info. */
8306 if (dom_info_available_p (CDI_DOMINATORS
))
8308 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8309 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8312 /* Update loop info. */
8314 add_bb_to_loop (new_bb
, bb
->loop_father
);
8319 /* Build a ternary operation and gimplify it. Emit code before GSI.
8320 Return the gimple_val holding the result. */
8323 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8324 tree type
, tree a
, tree b
, tree c
)
8327 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8329 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8332 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8336 /* Build a binary operation and gimplify it. Emit code before GSI.
8337 Return the gimple_val holding the result. */
8340 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8341 tree type
, tree a
, tree b
)
8345 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8348 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8352 /* Build a unary operation and gimplify it. Emit code before GSI.
8353 Return the gimple_val holding the result. */
8356 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8361 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8364 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8370 /* Given a basic block B which ends with a conditional and has
8371 precisely two successors, determine which of the edges is taken if
8372 the conditional is true and which is taken if the conditional is
8373 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8376 extract_true_false_edges_from_block (basic_block b
,
8380 edge e
= EDGE_SUCC (b
, 0);
8382 if (e
->flags
& EDGE_TRUE_VALUE
)
8385 *false_edge
= EDGE_SUCC (b
, 1);
8390 *true_edge
= EDGE_SUCC (b
, 1);
8394 /* Emit return warnings. */
8398 const pass_data pass_data_warn_function_return
=
8400 GIMPLE_PASS
, /* type */
8401 "*warn_function_return", /* name */
8402 OPTGROUP_NONE
, /* optinfo_flags */
8403 TV_NONE
, /* tv_id */
8404 PROP_cfg
, /* properties_required */
8405 0, /* properties_provided */
8406 0, /* properties_destroyed */
8407 0, /* todo_flags_start */
8408 0, /* todo_flags_finish */
8411 class pass_warn_function_return
: public gimple_opt_pass
8414 pass_warn_function_return (gcc::context
*ctxt
)
8415 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8418 /* opt_pass methods: */
8419 virtual unsigned int execute (function
*);
8421 }; // class pass_warn_function_return
8424 pass_warn_function_return::execute (function
*fun
)
8426 source_location location
;
8431 if (!targetm
.warn_func_return (fun
->decl
))
8434 /* If we have a path to EXIT, then we do return. */
8435 if (TREE_THIS_VOLATILE (fun
->decl
)
8436 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8438 location
= UNKNOWN_LOCATION
;
8439 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8441 last
= last_stmt (e
->src
);
8442 if ((gimple_code (last
) == GIMPLE_RETURN
8443 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8444 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8447 if (location
== UNKNOWN_LOCATION
)
8448 location
= cfun
->function_end_locus
;
8449 warning_at (location
, 0, "%<noreturn%> function does return");
8452 /* If we see "return;" in some basic block, then we do reach the end
8453 without returning a value. */
8454 else if (warn_return_type
8455 && !TREE_NO_WARNING (fun
->decl
)
8456 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8457 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8459 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8461 gimple last
= last_stmt (e
->src
);
8462 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
8464 && gimple_return_retval (return_stmt
) == NULL
8465 && !gimple_no_warning_p (last
))
8467 location
= gimple_location (last
);
8468 if (location
== UNKNOWN_LOCATION
)
8469 location
= fun
->function_end_locus
;
8470 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8471 TREE_NO_WARNING (fun
->decl
) = 1;
8482 make_pass_warn_function_return (gcc::context
*ctxt
)
8484 return new pass_warn_function_return (ctxt
);
8487 /* Walk a gimplified function and warn for functions whose return value is
8488 ignored and attribute((warn_unused_result)) is set. This is done before
8489 inlining, so we don't have to worry about that. */
8492 do_warn_unused_result (gimple_seq seq
)
8495 gimple_stmt_iterator i
;
8497 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8499 gimple g
= gsi_stmt (i
);
8501 switch (gimple_code (g
))
8504 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
8507 do_warn_unused_result (gimple_try_eval (g
));
8508 do_warn_unused_result (gimple_try_cleanup (g
));
8511 do_warn_unused_result (gimple_catch_handler (
8512 as_a
<gcatch
*> (g
)));
8514 case GIMPLE_EH_FILTER
:
8515 do_warn_unused_result (gimple_eh_filter_failure (g
));
8519 if (gimple_call_lhs (g
))
8521 if (gimple_call_internal_p (g
))
8524 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8525 LHS. All calls whose value is ignored should be
8526 represented like this. Look for the attribute. */
8527 fdecl
= gimple_call_fndecl (g
);
8528 ftype
= gimple_call_fntype (g
);
8530 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8532 location_t loc
= gimple_location (g
);
8535 warning_at (loc
, OPT_Wunused_result
,
8536 "ignoring return value of %qD, "
8537 "declared with attribute warn_unused_result",
8540 warning_at (loc
, OPT_Wunused_result
,
8541 "ignoring return value of function "
8542 "declared with attribute warn_unused_result");
8547 /* Not a container, not a call, or a call whose value is used. */
8555 const pass_data pass_data_warn_unused_result
=
8557 GIMPLE_PASS
, /* type */
8558 "*warn_unused_result", /* name */
8559 OPTGROUP_NONE
, /* optinfo_flags */
8560 TV_NONE
, /* tv_id */
8561 PROP_gimple_any
, /* properties_required */
8562 0, /* properties_provided */
8563 0, /* properties_destroyed */
8564 0, /* todo_flags_start */
8565 0, /* todo_flags_finish */
8568 class pass_warn_unused_result
: public gimple_opt_pass
8571 pass_warn_unused_result (gcc::context
*ctxt
)
8572 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8575 /* opt_pass methods: */
8576 virtual bool gate (function
*) { return flag_warn_unused_result
; }
8577 virtual unsigned int execute (function
*)
8579 do_warn_unused_result (gimple_body (current_function_decl
));
8583 }; // class pass_warn_unused_result
8588 make_pass_warn_unused_result (gcc::context
*ctxt
)
8590 return new pass_warn_unused_result (ctxt
);
8593 /* IPA passes, compilation of earlier functions or inlining
8594 might have changed some properties, such as marked functions nothrow,
8595 pure, const or noreturn.
8596 Remove redundant edges and basic blocks, and create new ones if necessary.
8598 This pass can't be executed as stand alone pass from pass manager, because
8599 in between inlining and this fixup the verify_flow_info would fail. */
8602 execute_fixup_cfg (void)
8605 gimple_stmt_iterator gsi
;
8607 gcov_type count_scale
;
8612 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl
)->count
,
8613 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8615 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8616 cgraph_node::get (current_function_decl
)->count
;
8617 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8618 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8621 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8622 e
->count
= apply_scale (e
->count
, count_scale
);
8624 FOR_EACH_BB_FN (bb
, cfun
)
8626 bb
->count
= apply_scale (bb
->count
, count_scale
);
8627 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
8629 gimple stmt
= gsi_stmt (gsi
);
8630 tree decl
= is_gimple_call (stmt
)
8631 ? gimple_call_fndecl (stmt
)
8635 int flags
= gimple_call_flags (stmt
);
8636 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8638 if (gimple_purge_dead_abnormal_call_edges (bb
))
8639 todo
|= TODO_cleanup_cfg
;
8641 if (gimple_in_ssa_p (cfun
))
8643 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8648 if (flags
& ECF_NORETURN
8649 && fixup_noreturn_call (stmt
))
8650 todo
|= TODO_cleanup_cfg
;
8653 /* Remove stores to variables we marked write-only.
8654 Keep access when store has side effect, i.e. in case when source
8656 if (gimple_store_p (stmt
)
8657 && !gimple_has_side_effects (stmt
))
8659 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8661 if (TREE_CODE (lhs
) == VAR_DECL
8662 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8663 && varpool_node::get (lhs
)->writeonly
)
8665 unlink_stmt_vdef (stmt
);
8666 gsi_remove (&gsi
, true);
8667 release_defs (stmt
);
8668 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8672 /* For calls we can simply remove LHS when it is known
8673 to be write-only. */
8674 if (is_gimple_call (stmt
)
8675 && gimple_get_lhs (stmt
))
8677 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8679 if (TREE_CODE (lhs
) == VAR_DECL
8680 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8681 && varpool_node::get (lhs
)->writeonly
)
8683 gimple_call_set_lhs (stmt
, NULL
);
8685 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8689 if (maybe_clean_eh_stmt (stmt
)
8690 && gimple_purge_dead_eh_edges (bb
))
8691 todo
|= TODO_cleanup_cfg
;
8695 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8696 e
->count
= apply_scale (e
->count
, count_scale
);
8698 /* If we have a basic block with no successors that does not
8699 end with a control statement or a noreturn call end it with
8700 a call to __builtin_unreachable. This situation can occur
8701 when inlining a noreturn call that does in fact return. */
8702 if (EDGE_COUNT (bb
->succs
) == 0)
8704 gimple stmt
= last_stmt (bb
);
8706 || (!is_ctrl_stmt (stmt
)
8707 && (!is_gimple_call (stmt
)
8708 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8710 if (stmt
&& is_gimple_call (stmt
))
8711 gimple_call_set_ctrl_altering (stmt
, false);
8712 stmt
= gimple_build_call
8713 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8714 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8715 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8719 if (count_scale
!= REG_BR_PROB_BASE
)
8720 compute_function_frequency ();
8722 /* Dump a textual representation of the flowgraph. */
8724 gimple_dump_cfg (dump_file
, dump_flags
);
8727 && (todo
& TODO_cleanup_cfg
))
8728 loops_state_set (LOOPS_NEED_FIXUP
);
8735 const pass_data pass_data_fixup_cfg
=
8737 GIMPLE_PASS
, /* type */
8738 "*free_cfg_annotations", /* name */
8739 OPTGROUP_NONE
, /* optinfo_flags */
8740 TV_NONE
, /* tv_id */
8741 PROP_cfg
, /* properties_required */
8742 0, /* properties_provided */
8743 0, /* properties_destroyed */
8744 0, /* todo_flags_start */
8745 0, /* todo_flags_finish */
8748 class pass_fixup_cfg
: public gimple_opt_pass
8751 pass_fixup_cfg (gcc::context
*ctxt
)
8752 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8755 /* opt_pass methods: */
8756 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8757 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
8759 }; // class pass_fixup_cfg
8764 make_pass_fixup_cfg (gcc::context
*ctxt
)
8766 return new pass_fixup_cfg (ctxt
);
8769 /* Garbage collection support for edge_def. */
8771 extern void gt_ggc_mx (tree
&);
8772 extern void gt_ggc_mx (gimple
&);
8773 extern void gt_ggc_mx (rtx
&);
8774 extern void gt_ggc_mx (basic_block
&);
8777 gt_ggc_mx (rtx_insn
*& x
)
8780 gt_ggc_mx_rtx_def ((void *) x
);
8784 gt_ggc_mx (edge_def
*e
)
8786 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8788 gt_ggc_mx (e
->dest
);
8789 if (current_ir_type () == IR_GIMPLE
)
8790 gt_ggc_mx (e
->insns
.g
);
8792 gt_ggc_mx (e
->insns
.r
);
8796 /* PCH support for edge_def. */
8798 extern void gt_pch_nx (tree
&);
8799 extern void gt_pch_nx (gimple
&);
8800 extern void gt_pch_nx (rtx
&);
8801 extern void gt_pch_nx (basic_block
&);
8804 gt_pch_nx (rtx_insn
*& x
)
8807 gt_pch_nx_rtx_def ((void *) x
);
8811 gt_pch_nx (edge_def
*e
)
8813 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8815 gt_pch_nx (e
->dest
);
8816 if (current_ir_type () == IR_GIMPLE
)
8817 gt_pch_nx (e
->insns
.g
);
8819 gt_pch_nx (e
->insns
.r
);
8824 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8826 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8827 op (&(e
->src
), cookie
);
8828 op (&(e
->dest
), cookie
);
8829 if (current_ir_type () == IR_GIMPLE
)
8830 op (&(e
->insns
.g
), cookie
);
8832 op (&(e
->insns
.r
), cookie
);
8833 op (&(block
), cookie
);