1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
37 #include "hard-reg-set.h"
40 #include "dominance.h"
43 #include "basic-block.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
50 #include "gimple-expr.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "gimple-walk.h"
56 #include "gimple-ssa.h"
57 #include "plugin-api.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-ssa-loop-manip.h"
66 #include "tree-ssa-loop-niter.h"
67 #include "tree-into-ssa.h"
71 #include "tree-dump.h"
72 #include "tree-pass.h"
73 #include "diagnostic-core.h"
76 #include "tree-ssa-propagate.h"
77 #include "value-prof.h"
78 #include "tree-inline.h"
80 #include "tree-ssa-live.h"
82 #include "tree-cfgcleanup.h"
84 #include "wide-int-print.h"
86 /* This file contains functions for building the Control Flow Graph (CFG)
87 for a function tree. */
89 /* Local declarations. */
91 /* Initial capacity for the basic block array. */
92 static const int initial_cfg_capacity
= 20;
94 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
95 which use a particular edge. The CASE_LABEL_EXPRs are chained together
96 via their CASE_CHAIN field, which we clear after we're done with the
97 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
99 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
100 update the case vector in response to edge redirections.
102 Right now this table is set up and torn down at key points in the
103 compilation process. It would be nice if we could make the table
104 more persistent. The key is getting notification of changes to
105 the CFG (particularly edge removal, creation and redirection). */
107 static hash_map
<edge
, tree
> *edge_to_cases
;
109 /* If we record edge_to_cases, this bitmap will hold indexes
110 of basic blocks that end in a GIMPLE_SWITCH which we touched
111 due to edge manipulations. */
113 static bitmap touched_switch_bbs
;
115 /* CFG statistics. */
118 long num_merged_labels
;
121 static struct cfg_stats_d cfg_stats
;
123 /* Hash table to store last discriminator assigned for each locus. */
124 struct locus_discrim_map
130 /* Hashtable helpers. */
132 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
134 typedef locus_discrim_map value_type
;
135 typedef locus_discrim_map compare_type
;
136 static inline hashval_t
hash (const value_type
*);
137 static inline bool equal (const value_type
*, const compare_type
*);
140 /* Trivial hash function for a location_t. ITEM is a pointer to
141 a hash table entry that maps a location_t to a discriminator. */
144 locus_discrim_hasher::hash (const value_type
*item
)
146 return LOCATION_LINE (item
->locus
);
149 /* Equality function for the locus-to-discriminator map. A and B
150 point to the two hash table entries to compare. */
153 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
155 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
158 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
160 /* Basic blocks and flowgraphs. */
161 static void make_blocks (gimple_seq
);
164 static void make_edges (void);
165 static void assign_discriminators (void);
166 static void make_cond_expr_edges (basic_block
);
167 static void make_gimple_switch_edges (gswitch
*, basic_block
);
168 static bool make_goto_expr_edges (basic_block
);
169 static void make_gimple_asm_edges (basic_block
);
170 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
171 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
173 /* Various helpers. */
174 static inline bool stmt_starts_bb_p (gimple
, gimple
);
175 static int gimple_verify_flow_info (void);
176 static void gimple_make_forwarder_block (edge
);
177 static gimple
first_non_label_stmt (basic_block
);
178 static bool verify_gimple_transaction (gtransaction
*);
179 static bool call_can_make_abnormal_goto (gimple
);
181 /* Flowgraph optimization and cleanup. */
182 static void gimple_merge_blocks (basic_block
, basic_block
);
183 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
184 static void remove_bb (basic_block
);
185 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
186 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
187 static edge
find_taken_edge_switch_expr (gswitch
*, basic_block
, tree
);
188 static tree
find_case_label_for_value (gswitch
*, tree
);
191 init_empty_tree_cfg_for_function (struct function
*fn
)
193 /* Initialize the basic block array. */
195 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
196 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
197 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
198 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
199 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
200 initial_cfg_capacity
);
202 /* Build a mapping of labels to their associated blocks. */
203 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
204 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
205 initial_cfg_capacity
);
207 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
208 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
210 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
211 = EXIT_BLOCK_PTR_FOR_FN (fn
);
212 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
213 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
217 init_empty_tree_cfg (void)
219 init_empty_tree_cfg_for_function (cfun
);
222 /*---------------------------------------------------------------------------
224 ---------------------------------------------------------------------------*/
226 /* Entry point to the CFG builder for trees. SEQ is the sequence of
227 statements to be added to the flowgraph. */
230 build_gimple_cfg (gimple_seq seq
)
232 /* Register specific gimple functions. */
233 gimple_register_cfg_hooks ();
235 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
237 init_empty_tree_cfg ();
241 /* Make sure there is always at least one block, even if it's empty. */
242 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
243 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
245 /* Adjust the size of the array. */
246 if (basic_block_info_for_fn (cfun
)->length ()
247 < (size_t) n_basic_blocks_for_fn (cfun
))
248 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
249 n_basic_blocks_for_fn (cfun
));
251 /* To speed up statement iterator walks, we first purge dead labels. */
252 cleanup_dead_labels ();
254 /* Group case nodes to reduce the number of edges.
255 We do this after cleaning up dead labels because otherwise we miss
256 a lot of obvious case merging opportunities. */
257 group_case_labels ();
259 /* Create the edges of the flowgraph. */
260 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
262 assign_discriminators ();
263 cleanup_dead_labels ();
264 delete discriminator_per_locus
;
265 discriminator_per_locus
= NULL
;
268 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
269 them and propagate the information to LOOP. We assume that the annotations
270 come immediately before the condition in BB, if any. */
273 replace_loop_annotate_in_block (basic_block bb
, struct loop
*loop
)
275 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
276 gimple stmt
= gsi_stmt (gsi
);
278 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
281 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
283 stmt
= gsi_stmt (gsi
);
284 if (gimple_code (stmt
) != GIMPLE_CALL
)
286 if (!gimple_call_internal_p (stmt
)
287 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
290 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
292 case annot_expr_ivdep_kind
:
293 loop
->safelen
= INT_MAX
;
295 case annot_expr_no_vector_kind
:
296 loop
->dont_vectorize
= true;
298 case annot_expr_vector_kind
:
299 loop
->force_vectorize
= true;
300 cfun
->has_force_vectorize_loops
= true;
306 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
307 gimple_call_arg (stmt
, 0));
308 gsi_replace (&gsi
, stmt
, true);
312 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
313 them and propagate the information to the loop. We assume that the
314 annotations come immediately before the condition of the loop. */
317 replace_loop_annotate (void)
321 gimple_stmt_iterator gsi
;
324 FOR_EACH_LOOP (loop
, 0)
326 /* First look into the header. */
327 replace_loop_annotate_in_block (loop
->header
, loop
);
329 /* Then look into the latch, if any. */
331 replace_loop_annotate_in_block (loop
->latch
, loop
);
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb
, cfun
)
337 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
339 stmt
= gsi_stmt (gsi
);
340 if (gimple_code (stmt
) != GIMPLE_CALL
)
342 if (!gimple_call_internal_p (stmt
)
343 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
346 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
348 case annot_expr_ivdep_kind
:
349 case annot_expr_no_vector_kind
:
350 case annot_expr_vector_kind
:
356 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
357 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
358 gimple_call_arg (stmt
, 0));
359 gsi_replace (&gsi
, stmt
, true);
366 execute_build_cfg (void)
368 gimple_seq body
= gimple_body (current_function_decl
);
370 build_gimple_cfg (body
);
371 gimple_set_body (current_function_decl
, NULL
);
372 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
374 fprintf (dump_file
, "Scope blocks:\n");
375 dump_scope_blocks (dump_file
, dump_flags
);
378 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
379 replace_loop_annotate ();
385 const pass_data pass_data_build_cfg
=
387 GIMPLE_PASS
, /* type */
389 OPTGROUP_NONE
, /* optinfo_flags */
390 TV_TREE_CFG
, /* tv_id */
391 PROP_gimple_leh
, /* properties_required */
392 ( PROP_cfg
| PROP_loops
), /* properties_provided */
393 0, /* properties_destroyed */
394 0, /* todo_flags_start */
395 0, /* todo_flags_finish */
398 class pass_build_cfg
: public gimple_opt_pass
401 pass_build_cfg (gcc::context
*ctxt
)
402 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
405 /* opt_pass methods: */
406 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
408 }; // class pass_build_cfg
413 make_pass_build_cfg (gcc::context
*ctxt
)
415 return new pass_build_cfg (ctxt
);
419 /* Return true if T is a computed goto. */
422 computed_goto_p (gimple t
)
424 return (gimple_code (t
) == GIMPLE_GOTO
425 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
428 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
429 the other edge points to a bb with just __builtin_unreachable ().
430 I.e. return true for C->M edge in:
438 __builtin_unreachable ();
442 assert_unreachable_fallthru_edge_p (edge e
)
444 basic_block pred_bb
= e
->src
;
445 gimple last
= last_stmt (pred_bb
);
446 if (last
&& gimple_code (last
) == GIMPLE_COND
)
448 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
449 if (other_bb
== e
->dest
)
450 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
451 if (EDGE_COUNT (other_bb
->succs
) == 0)
453 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
458 stmt
= gsi_stmt (gsi
);
459 while (is_gimple_debug (stmt
) || gimple_clobber_p (stmt
))
464 stmt
= gsi_stmt (gsi
);
466 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
473 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
474 could alter control flow except via eh. We initialize the flag at
475 CFG build time and only ever clear it later. */
478 gimple_call_initialize_ctrl_altering (gimple stmt
)
480 int flags
= gimple_call_flags (stmt
);
482 /* A call alters control flow if it can make an abnormal goto. */
483 if (call_can_make_abnormal_goto (stmt
)
484 /* A call also alters control flow if it does not return. */
485 || flags
& ECF_NORETURN
486 /* TM ending statements have backedges out of the transaction.
487 Return true so we split the basic block containing them.
488 Note that the TM_BUILTIN test is merely an optimization. */
489 || ((flags
& ECF_TM_BUILTIN
)
490 && is_tm_ending_fndecl (gimple_call_fndecl (stmt
)))
491 /* BUILT_IN_RETURN call is same as return statement. */
492 || gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
493 gimple_call_set_ctrl_altering (stmt
, true);
495 gimple_call_set_ctrl_altering (stmt
, false);
499 /* Build a flowgraph for the sequence of stmts SEQ. */
502 make_blocks (gimple_seq seq
)
504 gimple_stmt_iterator i
= gsi_start (seq
);
506 bool start_new_block
= true;
507 bool first_stmt_of_seq
= true;
508 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
510 while (!gsi_end_p (i
))
517 if (stmt
&& is_gimple_call (stmt
))
518 gimple_call_initialize_ctrl_altering (stmt
);
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
523 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
525 if (!first_stmt_of_seq
)
526 gsi_split_seq_before (&i
, &seq
);
527 bb
= create_basic_block (seq
, NULL
, bb
);
528 start_new_block
= false;
531 /* Now add STMT to BB and create the subgraphs for special statement
533 gimple_set_bb (stmt
, bb
);
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
537 if (stmt_ends_bb_p (stmt
))
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
544 if (gimple_has_lhs (stmt
)
545 && stmt_can_make_abnormal_goto (stmt
)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
548 tree lhs
= gimple_get_lhs (stmt
);
549 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
550 gimple s
= gimple_build_assign (lhs
, tmp
);
551 gimple_set_location (s
, gimple_location (stmt
));
552 gimple_set_block (s
, gimple_block (stmt
));
553 gimple_set_lhs (stmt
, tmp
);
554 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
556 DECL_GIMPLE_REG_P (tmp
) = 1;
557 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
559 start_new_block
= true;
563 first_stmt_of_seq
= false;
568 /* Create and return a new empty basic block after bb AFTER. */
571 create_bb (void *h
, void *e
, basic_block after
)
577 /* Create and initialize a new basic block. Since alloc_block uses
578 GC allocation that clears memory to allocate a basic block, we do
579 not have to clear the newly allocated basic block here. */
582 bb
->index
= last_basic_block_for_fn (cfun
);
584 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
586 /* Add the new block to the linked list of blocks. */
587 link_block (bb
, after
);
589 /* Grow the basic block array if needed. */
590 if ((size_t) last_basic_block_for_fn (cfun
)
591 == basic_block_info_for_fn (cfun
)->length ())
594 (last_basic_block_for_fn (cfun
)
595 + (last_basic_block_for_fn (cfun
) + 3) / 4);
596 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
599 /* Add the newly created block to the array. */
600 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
602 n_basic_blocks_for_fn (cfun
)++;
603 last_basic_block_for_fn (cfun
)++;
609 /*---------------------------------------------------------------------------
611 ---------------------------------------------------------------------------*/
613 /* Fold COND_EXPR_COND of each COND_EXPR. */
616 fold_cond_expr_cond (void)
620 FOR_EACH_BB_FN (bb
, cfun
)
622 gimple stmt
= last_stmt (bb
);
624 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
626 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
627 location_t loc
= gimple_location (stmt
);
631 fold_defer_overflow_warnings ();
632 cond
= fold_binary_loc (loc
, gimple_cond_code (cond_stmt
),
634 gimple_cond_lhs (cond_stmt
),
635 gimple_cond_rhs (cond_stmt
));
638 zerop
= integer_zerop (cond
);
639 onep
= integer_onep (cond
);
642 zerop
= onep
= false;
644 fold_undefer_overflow_warnings (zerop
|| onep
,
646 WARN_STRICT_OVERFLOW_CONDITIONAL
);
648 gimple_cond_make_false (cond_stmt
);
650 gimple_cond_make_true (cond_stmt
);
655 /* If basic block BB has an abnormal edge to a basic block
656 containing IFN_ABNORMAL_DISPATCHER internal call, return
657 that the dispatcher's basic block, otherwise return NULL. */
660 get_abnormal_succ_dispatcher (basic_block bb
)
665 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
666 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
668 gimple_stmt_iterator gsi
669 = gsi_start_nondebug_after_labels_bb (e
->dest
);
670 gimple g
= gsi_stmt (gsi
);
672 && is_gimple_call (g
)
673 && gimple_call_internal_p (g
)
674 && gimple_call_internal_fn (g
) == IFN_ABNORMAL_DISPATCHER
)
680 /* Helper function for make_edges. Create a basic block with
681 with ABNORMAL_DISPATCHER internal call in it if needed, and
682 create abnormal edges from BBS to it and from it to FOR_BB
683 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
686 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
687 basic_block for_bb
, int *bb_to_omp_idx
,
688 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
690 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
691 unsigned int idx
= 0;
697 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
698 if (bb_to_omp_idx
[for_bb
->index
] != 0)
702 /* If the dispatcher has been created already, then there are basic
703 blocks with abnormal edges to it, so just make a new edge to
705 if (*dispatcher
== NULL
)
707 /* Check if there are any basic blocks that need to have
708 abnormal edges to this dispatcher. If there are none, return
710 if (bb_to_omp_idx
== NULL
)
712 if (bbs
->is_empty ())
717 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
718 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
724 /* Create the dispatcher bb. */
725 *dispatcher
= create_basic_block (NULL
, NULL
, for_bb
);
728 /* Factor computed gotos into a common computed goto site. Also
729 record the location of that site so that we can un-factor the
730 gotos after we have converted back to normal form. */
731 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
733 /* Create the destination of the factored goto. Each original
734 computed goto will put its desired destination into this
735 variable and jump to the label we create immediately below. */
736 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
738 /* Build a label for the new block which will contain the
739 factored computed goto. */
740 tree factored_label_decl
741 = create_artificial_label (UNKNOWN_LOCATION
);
742 gimple factored_computed_goto_label
743 = gimple_build_label (factored_label_decl
);
744 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
746 /* Build our new computed goto. */
747 gimple factored_computed_goto
= gimple_build_goto (var
);
748 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
750 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
753 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
756 gsi
= gsi_last_bb (bb
);
757 gimple last
= gsi_stmt (gsi
);
759 gcc_assert (computed_goto_p (last
));
761 /* Copy the original computed goto's destination into VAR. */
763 = gimple_build_assign (var
, gimple_goto_dest (last
));
764 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
766 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
767 e
->goto_locus
= gimple_location (last
);
768 gsi_remove (&gsi
, true);
773 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
774 gimple g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
776 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
777 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
779 /* Create predecessor edges of the dispatcher. */
780 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
783 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
785 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
790 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
793 /* Join all the blocks in the flowgraph. */
799 struct omp_region
*cur_region
= NULL
;
800 auto_vec
<basic_block
> ab_edge_goto
;
801 auto_vec
<basic_block
> ab_edge_call
;
802 int *bb_to_omp_idx
= NULL
;
803 int cur_omp_region_idx
= 0;
805 /* Create an edge from entry to the first block with executable
807 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
808 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
811 /* Traverse the basic block array placing edges. */
812 FOR_EACH_BB_FN (bb
, cfun
)
814 gimple last
= last_stmt (bb
);
818 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
822 enum gimple_code code
= gimple_code (last
);
826 if (make_goto_expr_edges (bb
))
827 ab_edge_goto
.safe_push (bb
);
832 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
833 e
->goto_locus
= gimple_location (last
);
838 make_cond_expr_edges (bb
);
842 make_gimple_switch_edges (as_a
<gswitch
*> (last
), bb
);
846 make_eh_edges (last
);
849 case GIMPLE_EH_DISPATCH
:
850 fallthru
= make_eh_dispatch_edges (as_a
<geh_dispatch
*> (last
));
854 /* If this function receives a nonlocal goto, then we need to
855 make edges from this call site to all the nonlocal goto
857 if (stmt_can_make_abnormal_goto (last
))
858 ab_edge_call
.safe_push (bb
);
860 /* If this statement has reachable exception handlers, then
861 create abnormal edges to them. */
862 make_eh_edges (last
);
864 /* BUILTIN_RETURN is really a return statement. */
865 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
867 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
870 /* Some calls are known not to return. */
872 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
876 /* A GIMPLE_ASSIGN may throw internally and thus be considered
878 if (is_ctrl_altering_stmt (last
))
879 make_eh_edges (last
);
884 make_gimple_asm_edges (bb
);
889 fallthru
= make_gimple_omp_edges (bb
, &cur_region
,
890 &cur_omp_region_idx
);
891 if (cur_region
&& bb_to_omp_idx
== NULL
)
892 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
895 case GIMPLE_TRANSACTION
:
898 = gimple_transaction_label (as_a
<gtransaction
*> (last
));
900 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
906 gcc_assert (!stmt_ends_bb_p (last
));
914 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
917 /* Computed gotos are hell to deal with, especially if there are
918 lots of them with a large number of destinations. So we factor
919 them to a common computed goto location before we build the
920 edge list. After we convert back to normal form, we will un-factor
921 the computed gotos since factoring introduces an unwanted jump.
922 For non-local gotos and abnormal edges from calls to calls that return
923 twice or forced labels, factor the abnormal edges too, by having all
924 abnormal edges from the calls go to a common artificial basic block
925 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
926 basic block to all forced labels and calls returning twice.
927 We do this per-OpenMP structured block, because those regions
928 are guaranteed to be single entry single exit by the standard,
929 so it is not allowed to enter or exit such regions abnormally this way,
930 thus all computed gotos, non-local gotos and setjmp/longjmp calls
931 must not transfer control across SESE region boundaries. */
932 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
934 gimple_stmt_iterator gsi
;
935 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
936 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
937 int count
= n_basic_blocks_for_fn (cfun
);
940 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
942 FOR_EACH_BB_FN (bb
, cfun
)
944 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
946 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
952 target
= gimple_label_label (label_stmt
);
954 /* Make an edge to every label block that has been marked as a
955 potential target for a computed goto or a non-local goto. */
956 if (FORCED_LABEL (target
))
957 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
958 &ab_edge_goto
, true);
959 if (DECL_NONLOCAL (target
))
961 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
962 &ab_edge_call
, false);
967 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
968 gsi_next_nondebug (&gsi
);
969 if (!gsi_end_p (gsi
))
971 /* Make an edge to every setjmp-like call. */
972 gimple call_stmt
= gsi_stmt (gsi
);
973 if (is_gimple_call (call_stmt
)
974 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
975 || gimple_call_builtin_p (call_stmt
,
976 BUILT_IN_SETJMP_RECEIVER
)))
977 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
978 &ab_edge_call
, false);
983 XDELETE (dispatcher_bbs
);
986 XDELETE (bb_to_omp_idx
);
990 /* Fold COND_EXPR_COND of each COND_EXPR. */
991 fold_cond_expr_cond ();
994 /* Find the next available discriminator value for LOCUS. The
995 discriminator distinguishes among several basic blocks that
996 share a common locus, allowing for more accurate sample-based
1000 next_discriminator_for_locus (location_t locus
)
1002 struct locus_discrim_map item
;
1003 struct locus_discrim_map
**slot
;
1006 item
.discriminator
= 0;
1007 slot
= discriminator_per_locus
->find_slot_with_hash (
1008 &item
, LOCATION_LINE (locus
), INSERT
);
1010 if (*slot
== HTAB_EMPTY_ENTRY
)
1012 *slot
= XNEW (struct locus_discrim_map
);
1014 (*slot
)->locus
= locus
;
1015 (*slot
)->discriminator
= 0;
1017 (*slot
)->discriminator
++;
1018 return (*slot
)->discriminator
;
1021 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1024 same_line_p (location_t locus1
, location_t locus2
)
1026 expanded_location from
, to
;
1028 if (locus1
== locus2
)
1031 from
= expand_location (locus1
);
1032 to
= expand_location (locus2
);
1034 if (from
.line
!= to
.line
)
1036 if (from
.file
== to
.file
)
1038 return (from
.file
!= NULL
1040 && filename_cmp (from
.file
, to
.file
) == 0);
1043 /* Assign discriminators to each basic block. */
1046 assign_discriminators (void)
1050 FOR_EACH_BB_FN (bb
, cfun
)
1054 gimple last
= last_stmt (bb
);
1055 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
1057 if (locus
== UNKNOWN_LOCATION
)
1060 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1062 gimple first
= first_non_label_stmt (e
->dest
);
1063 gimple last
= last_stmt (e
->dest
);
1064 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1065 || (last
&& same_line_p (locus
, gimple_location (last
))))
1067 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1068 bb
->discriminator
= next_discriminator_for_locus (locus
);
1070 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1076 /* Create the edges for a GIMPLE_COND starting at block BB. */
1079 make_cond_expr_edges (basic_block bb
)
1081 gcond
*entry
= as_a
<gcond
*> (last_stmt (bb
));
1082 gimple then_stmt
, else_stmt
;
1083 basic_block then_bb
, else_bb
;
1084 tree then_label
, else_label
;
1088 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1090 /* Entry basic blocks for each component. */
1091 then_label
= gimple_cond_true_label (entry
);
1092 else_label
= gimple_cond_false_label (entry
);
1093 then_bb
= label_to_block (then_label
);
1094 else_bb
= label_to_block (else_label
);
1095 then_stmt
= first_stmt (then_bb
);
1096 else_stmt
= first_stmt (else_bb
);
1098 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1099 e
->goto_locus
= gimple_location (then_stmt
);
1100 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1102 e
->goto_locus
= gimple_location (else_stmt
);
1104 /* We do not need the labels anymore. */
1105 gimple_cond_set_true_label (entry
, NULL_TREE
);
1106 gimple_cond_set_false_label (entry
, NULL_TREE
);
1110 /* Called for each element in the hash table (P) as we delete the
1111 edge to cases hash table.
1113 Clear all the TREE_CHAINs to prevent problems with copying of
1114 SWITCH_EXPRs and structure sharing rules, then free the hash table
1118 edge_to_cases_cleanup (edge
const &, tree
const &value
, void *)
1122 for (t
= value
; t
; t
= next
)
1124 next
= CASE_CHAIN (t
);
1125 CASE_CHAIN (t
) = NULL
;
1131 /* Start recording information mapping edges to case labels. */
1134 start_recording_case_labels (void)
1136 gcc_assert (edge_to_cases
== NULL
);
1137 edge_to_cases
= new hash_map
<edge
, tree
>;
1138 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1141 /* Return nonzero if we are recording information for case labels. */
1144 recording_case_labels_p (void)
1146 return (edge_to_cases
!= NULL
);
1149 /* Stop recording information mapping edges to case labels and
1150 remove any information we have recorded. */
1152 end_recording_case_labels (void)
1156 edge_to_cases
->traverse
<void *, edge_to_cases_cleanup
> (NULL
);
1157 delete edge_to_cases
;
1158 edge_to_cases
= NULL
;
1159 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1161 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1164 gimple stmt
= last_stmt (bb
);
1165 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1166 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1169 BITMAP_FREE (touched_switch_bbs
);
1172 /* If we are inside a {start,end}_recording_cases block, then return
1173 a chain of CASE_LABEL_EXPRs from T which reference E.
1175 Otherwise return NULL. */
1178 get_cases_for_edge (edge e
, gswitch
*t
)
1183 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1184 chains available. Return NULL so the caller can detect this case. */
1185 if (!recording_case_labels_p ())
1188 slot
= edge_to_cases
->get (e
);
1192 /* If we did not find E in the hash table, then this must be the first
1193 time we have been queried for information about E & T. Add all the
1194 elements from T to the hash table then perform the query again. */
1196 n
= gimple_switch_num_labels (t
);
1197 for (i
= 0; i
< n
; i
++)
1199 tree elt
= gimple_switch_label (t
, i
);
1200 tree lab
= CASE_LABEL (elt
);
1201 basic_block label_bb
= label_to_block (lab
);
1202 edge this_edge
= find_edge (e
->src
, label_bb
);
1204 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1206 tree
&s
= edge_to_cases
->get_or_insert (this_edge
);
1207 CASE_CHAIN (elt
) = s
;
1211 return *edge_to_cases
->get (e
);
1214 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1217 make_gimple_switch_edges (gswitch
*entry
, basic_block bb
)
1221 n
= gimple_switch_num_labels (entry
);
1223 for (i
= 0; i
< n
; ++i
)
1225 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1226 basic_block label_bb
= label_to_block (lab
);
1227 make_edge (bb
, label_bb
, 0);
1232 /* Return the basic block holding label DEST. */
1235 label_to_block_fn (struct function
*ifun
, tree dest
)
1237 int uid
= LABEL_DECL_UID (dest
);
1239 /* We would die hard when faced by an undefined label. Emit a label to
1240 the very first basic block. This will hopefully make even the dataflow
1241 and undefined variable warnings quite right. */
1242 if (seen_error () && uid
< 0)
1244 gimple_stmt_iterator gsi
=
1245 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1248 stmt
= gimple_build_label (dest
);
1249 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1250 uid
= LABEL_DECL_UID (dest
);
1252 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1254 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1257 /* Create edges for a goto statement at block BB. Returns true
1258 if abnormal edges should be created. */
1261 make_goto_expr_edges (basic_block bb
)
1263 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1264 gimple goto_t
= gsi_stmt (last
);
1266 /* A simple GOTO creates normal edges. */
1267 if (simple_goto_p (goto_t
))
1269 tree dest
= gimple_goto_dest (goto_t
);
1270 basic_block label_bb
= label_to_block (dest
);
1271 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1272 e
->goto_locus
= gimple_location (goto_t
);
1273 gsi_remove (&last
, true);
1277 /* A computed GOTO creates abnormal edges. */
1281 /* Create edges for an asm statement with labels at block BB. */
1284 make_gimple_asm_edges (basic_block bb
)
1286 gasm
*stmt
= as_a
<gasm
*> (last_stmt (bb
));
1287 int i
, n
= gimple_asm_nlabels (stmt
);
1289 for (i
= 0; i
< n
; ++i
)
1291 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1292 basic_block label_bb
= label_to_block (label
);
1293 make_edge (bb
, label_bb
, 0);
1297 /*---------------------------------------------------------------------------
1299 ---------------------------------------------------------------------------*/
1301 /* Cleanup useless labels in basic blocks. This is something we wish
1302 to do early because it allows us to group case labels before creating
1303 the edges for the CFG, and it speeds up block statement iterators in
1304 all passes later on.
1305 We rerun this pass after CFG is created, to get rid of the labels that
1306 are no longer referenced. After then we do not run it any more, since
1307 (almost) no new labels should be created. */
1309 /* A map from basic block index to the leading label of that block. */
1310 static struct label_record
1315 /* True if the label is referenced from somewhere. */
1319 /* Given LABEL return the first label in the same basic block. */
1322 main_block_label (tree label
)
1324 basic_block bb
= label_to_block (label
);
1325 tree main_label
= label_for_bb
[bb
->index
].label
;
1327 /* label_to_block possibly inserted undefined label into the chain. */
1330 label_for_bb
[bb
->index
].label
= label
;
1334 label_for_bb
[bb
->index
].used
= true;
1338 /* Clean up redundant labels within the exception tree. */
1341 cleanup_dead_labels_eh (void)
1348 if (cfun
->eh
== NULL
)
1351 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1352 if (lp
&& lp
->post_landing_pad
)
1354 lab
= main_block_label (lp
->post_landing_pad
);
1355 if (lab
!= lp
->post_landing_pad
)
1357 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1358 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1362 FOR_ALL_EH_REGION (r
)
1366 case ERT_MUST_NOT_THROW
:
1372 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1376 c
->label
= main_block_label (lab
);
1381 case ERT_ALLOWED_EXCEPTIONS
:
1382 lab
= r
->u
.allowed
.label
;
1384 r
->u
.allowed
.label
= main_block_label (lab
);
1390 /* Cleanup redundant labels. This is a three-step process:
1391 1) Find the leading label for each block.
1392 2) Redirect all references to labels to the leading labels.
1393 3) Cleanup all useless labels. */
1396 cleanup_dead_labels (void)
1399 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1401 /* Find a suitable label for each block. We use the first user-defined
1402 label if there is one, or otherwise just the first label we see. */
1403 FOR_EACH_BB_FN (bb
, cfun
)
1405 gimple_stmt_iterator i
;
1407 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1410 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1415 label
= gimple_label_label (label_stmt
);
1417 /* If we have not yet seen a label for the current block,
1418 remember this one and see if there are more labels. */
1419 if (!label_for_bb
[bb
->index
].label
)
1421 label_for_bb
[bb
->index
].label
= label
;
1425 /* If we did see a label for the current block already, but it
1426 is an artificially created label, replace it if the current
1427 label is a user defined label. */
1428 if (!DECL_ARTIFICIAL (label
)
1429 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1431 label_for_bb
[bb
->index
].label
= label
;
1437 /* Now redirect all jumps/branches to the selected label.
1438 First do so for each block ending in a control statement. */
1439 FOR_EACH_BB_FN (bb
, cfun
)
1441 gimple stmt
= last_stmt (bb
);
1442 tree label
, new_label
;
1447 switch (gimple_code (stmt
))
1451 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1452 label
= gimple_cond_true_label (cond_stmt
);
1455 new_label
= main_block_label (label
);
1456 if (new_label
!= label
)
1457 gimple_cond_set_true_label (cond_stmt
, new_label
);
1460 label
= gimple_cond_false_label (cond_stmt
);
1463 new_label
= main_block_label (label
);
1464 if (new_label
!= label
)
1465 gimple_cond_set_false_label (cond_stmt
, new_label
);
1472 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
1473 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
1475 /* Replace all destination labels. */
1476 for (i
= 0; i
< n
; ++i
)
1478 tree case_label
= gimple_switch_label (switch_stmt
, i
);
1479 label
= CASE_LABEL (case_label
);
1480 new_label
= main_block_label (label
);
1481 if (new_label
!= label
)
1482 CASE_LABEL (case_label
) = new_label
;
1489 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1490 int i
, n
= gimple_asm_nlabels (asm_stmt
);
1492 for (i
= 0; i
< n
; ++i
)
1494 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
1495 tree label
= main_block_label (TREE_VALUE (cons
));
1496 TREE_VALUE (cons
) = label
;
1501 /* We have to handle gotos until they're removed, and we don't
1502 remove them until after we've created the CFG edges. */
1504 if (!computed_goto_p (stmt
))
1506 ggoto
*goto_stmt
= as_a
<ggoto
*> (stmt
);
1507 label
= gimple_goto_dest (goto_stmt
);
1508 new_label
= main_block_label (label
);
1509 if (new_label
!= label
)
1510 gimple_goto_set_dest (goto_stmt
, new_label
);
1514 case GIMPLE_TRANSACTION
:
1516 gtransaction
*trans_stmt
= as_a
<gtransaction
*> (stmt
);
1517 tree label
= gimple_transaction_label (trans_stmt
);
1520 tree new_label
= main_block_label (label
);
1521 if (new_label
!= label
)
1522 gimple_transaction_set_label (trans_stmt
, new_label
);
1532 /* Do the same for the exception region tree labels. */
1533 cleanup_dead_labels_eh ();
1535 /* Finally, purge dead labels. All user-defined labels and labels that
1536 can be the target of non-local gotos and labels which have their
1537 address taken are preserved. */
1538 FOR_EACH_BB_FN (bb
, cfun
)
1540 gimple_stmt_iterator i
;
1541 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1543 if (!label_for_this_bb
)
1546 /* If the main label of the block is unused, we may still remove it. */
1547 if (!label_for_bb
[bb
->index
].used
)
1548 label_for_this_bb
= NULL
;
1550 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1553 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
1558 label
= gimple_label_label (label_stmt
);
1560 if (label
== label_for_this_bb
1561 || !DECL_ARTIFICIAL (label
)
1562 || DECL_NONLOCAL (label
)
1563 || FORCED_LABEL (label
))
1566 gsi_remove (&i
, true);
1570 free (label_for_bb
);
1573 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1574 the ones jumping to the same label.
1575 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1578 group_case_labels_stmt (gswitch
*stmt
)
1580 int old_size
= gimple_switch_num_labels (stmt
);
1581 int i
, j
, new_size
= old_size
;
1582 basic_block default_bb
= NULL
;
1584 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1586 /* Look for possible opportunities to merge cases. */
1588 while (i
< old_size
)
1590 tree base_case
, base_high
;
1591 basic_block base_bb
;
1593 base_case
= gimple_switch_label (stmt
, i
);
1595 gcc_assert (base_case
);
1596 base_bb
= label_to_block (CASE_LABEL (base_case
));
1598 /* Discard cases that have the same destination as the
1600 if (base_bb
== default_bb
)
1602 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1608 base_high
= CASE_HIGH (base_case
)
1609 ? CASE_HIGH (base_case
)
1610 : CASE_LOW (base_case
);
1613 /* Try to merge case labels. Break out when we reach the end
1614 of the label vector or when we cannot merge the next case
1615 label with the current one. */
1616 while (i
< old_size
)
1618 tree merge_case
= gimple_switch_label (stmt
, i
);
1619 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1620 wide_int bhp1
= wi::add (base_high
, 1);
1622 /* Merge the cases if they jump to the same place,
1623 and their ranges are consecutive. */
1624 if (merge_bb
== base_bb
1625 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1627 base_high
= CASE_HIGH (merge_case
) ?
1628 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1629 CASE_HIGH (base_case
) = base_high
;
1630 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1639 /* Compress the case labels in the label vector, and adjust the
1640 length of the vector. */
1641 for (i
= 0, j
= 0; i
< new_size
; i
++)
1643 while (! gimple_switch_label (stmt
, j
))
1645 gimple_switch_set_label (stmt
, i
,
1646 gimple_switch_label (stmt
, j
++));
1649 gcc_assert (new_size
<= old_size
);
1650 gimple_switch_set_num_labels (stmt
, new_size
);
1653 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1654 and scan the sorted vector of cases. Combine the ones jumping to the
1658 group_case_labels (void)
1662 FOR_EACH_BB_FN (bb
, cfun
)
1664 gimple stmt
= last_stmt (bb
);
1665 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1666 group_case_labels_stmt (as_a
<gswitch
*> (stmt
));
1670 /* Checks whether we can merge block B into block A. */
1673 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1677 if (!single_succ_p (a
))
1680 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1683 if (single_succ (a
) != b
)
1686 if (!single_pred_p (b
))
1689 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1692 /* If A ends by a statement causing exceptions or something similar, we
1693 cannot merge the blocks. */
1694 stmt
= last_stmt (a
);
1695 if (stmt
&& stmt_ends_bb_p (stmt
))
1698 /* Do not allow a block with only a non-local label to be merged. */
1700 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1701 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
1704 /* Examine the labels at the beginning of B. */
1705 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);
1709 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
1712 lab
= gimple_label_label (label_stmt
);
1714 /* Do not remove user forced labels or for -O0 any user labels. */
1715 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1719 /* Protect simple loop latches. We only want to avoid merging
1720 the latch with the loop header in this case. */
1722 && b
->loop_father
->latch
== b
1723 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
)
1724 && b
->loop_father
->header
== a
)
1727 /* It must be possible to eliminate all phi nodes in B. If ssa form
1728 is not up-to-date and a name-mapping is registered, we cannot eliminate
1729 any phis. Symbols marked for renaming are never a problem though. */
1730 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);
1733 gphi
*phi
= gsi
.phi ();
1734 /* Technically only new names matter. */
1735 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1739 /* When not optimizing, don't merge if we'd lose goto_locus. */
1741 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1743 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1744 gimple_stmt_iterator prev
, next
;
1745 prev
= gsi_last_nondebug_bb (a
);
1746 next
= gsi_after_labels (b
);
1747 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1748 gsi_next_nondebug (&next
);
1749 if ((gsi_end_p (prev
)
1750 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1751 && (gsi_end_p (next
)
1752 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1759 /* Replaces all uses of NAME by VAL. */
1762 replace_uses_by (tree name
, tree val
)
1764 imm_use_iterator imm_iter
;
1769 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1771 /* Mark the block if we change the last stmt in it. */
1772 if (cfgcleanup_altered_bbs
1773 && stmt_ends_bb_p (stmt
))
1774 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1776 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1778 replace_exp (use
, val
);
1780 if (gimple_code (stmt
) == GIMPLE_PHI
)
1782 e
= gimple_phi_arg_edge (as_a
<gphi
*> (stmt
),
1783 PHI_ARG_INDEX_FROM_USE (use
));
1784 if (e
->flags
& EDGE_ABNORMAL
)
1786 /* This can only occur for virtual operands, since
1787 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1788 would prevent replacement. */
1789 gcc_checking_assert (virtual_operand_p (name
));
1790 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1795 if (gimple_code (stmt
) != GIMPLE_PHI
)
1797 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1798 gimple orig_stmt
= stmt
;
1801 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1802 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1803 only change sth from non-invariant to invariant, and only
1804 when propagating constants. */
1805 if (is_gimple_min_invariant (val
))
1806 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1808 tree op
= gimple_op (stmt
, i
);
1809 /* Operands may be empty here. For example, the labels
1810 of a GIMPLE_COND are nulled out following the creation
1811 of the corresponding CFG edges. */
1812 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1813 recompute_tree_invariant_for_addr_expr (op
);
1816 if (fold_stmt (&gsi
))
1817 stmt
= gsi_stmt (gsi
);
1819 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1820 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1826 gcc_checking_assert (has_zero_uses (name
));
1828 /* Also update the trees stored in loop structures. */
1833 FOR_EACH_LOOP (loop
, 0)
1835 substitute_in_loop_info (loop
, name
, val
);
1840 /* Merge block B into block A. */
1843 gimple_merge_blocks (basic_block a
, basic_block b
)
1845 gimple_stmt_iterator last
, gsi
;
1849 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1851 /* Remove all single-valued PHI nodes from block B of the form
1852 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1853 gsi
= gsi_last_bb (a
);
1854 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1856 gimple phi
= gsi_stmt (psi
);
1857 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1859 bool may_replace_uses
= (virtual_operand_p (def
)
1860 || may_propagate_copy (def
, use
));
1862 /* In case we maintain loop closed ssa form, do not propagate arguments
1863 of loop exit phi nodes. */
1865 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1866 && !virtual_operand_p (def
)
1867 && TREE_CODE (use
) == SSA_NAME
1868 && a
->loop_father
!= b
->loop_father
)
1869 may_replace_uses
= false;
1871 if (!may_replace_uses
)
1873 gcc_assert (!virtual_operand_p (def
));
1875 /* Note that just emitting the copies is fine -- there is no problem
1876 with ordering of phi nodes. This is because A is the single
1877 predecessor of B, therefore results of the phi nodes cannot
1878 appear as arguments of the phi nodes. */
1879 copy
= gimple_build_assign (def
, use
);
1880 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1881 remove_phi_node (&psi
, false);
1885 /* If we deal with a PHI for virtual operands, we can simply
1886 propagate these without fussing with folding or updating
1888 if (virtual_operand_p (def
))
1890 imm_use_iterator iter
;
1891 use_operand_p use_p
;
1894 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1895 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1896 SET_USE (use_p
, use
);
1898 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1899 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1902 replace_uses_by (def
, use
);
1904 remove_phi_node (&psi
, true);
1908 /* Ensure that B follows A. */
1909 move_block_after (b
, a
);
1911 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1912 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1914 /* Remove labels from B and set gimple_bb to A for other statements. */
1915 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1917 gimple stmt
= gsi_stmt (gsi
);
1918 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
1920 tree label
= gimple_label_label (label_stmt
);
1923 gsi_remove (&gsi
, false);
1925 /* Now that we can thread computed gotos, we might have
1926 a situation where we have a forced label in block B
1927 However, the label at the start of block B might still be
1928 used in other ways (think about the runtime checking for
1929 Fortran assigned gotos). So we can not just delete the
1930 label. Instead we move the label to the start of block A. */
1931 if (FORCED_LABEL (label
))
1933 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1934 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1936 /* Other user labels keep around in a form of a debug stmt. */
1937 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1939 gimple dbg
= gimple_build_debug_bind (label
,
1942 gimple_debug_bind_reset_value (dbg
);
1943 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1946 lp_nr
= EH_LANDING_PAD_NR (label
);
1949 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1950 lp
->post_landing_pad
= NULL
;
1955 gimple_set_bb (stmt
, a
);
1960 /* When merging two BBs, if their counts are different, the larger count
1961 is selected as the new bb count. This is to handle inconsistent
1963 if (a
->loop_father
== b
->loop_father
)
1965 a
->count
= MAX (a
->count
, b
->count
);
1966 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
1969 /* Merge the sequences. */
1970 last
= gsi_last_bb (a
);
1971 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1972 set_bb_seq (b
, NULL
);
1974 if (cfgcleanup_altered_bbs
)
1975 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1979 /* Return the one of two successors of BB that is not reachable by a
1980 complex edge, if there is one. Else, return BB. We use
1981 this in optimizations that use post-dominators for their heuristics,
1982 to catch the cases in C++ where function calls are involved. */
1985 single_noncomplex_succ (basic_block bb
)
1988 if (EDGE_COUNT (bb
->succs
) != 2)
1991 e0
= EDGE_SUCC (bb
, 0);
1992 e1
= EDGE_SUCC (bb
, 1);
1993 if (e0
->flags
& EDGE_COMPLEX
)
1995 if (e1
->flags
& EDGE_COMPLEX
)
2001 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2004 notice_special_calls (gcall
*call
)
2006 int flags
= gimple_call_flags (call
);
2008 if (flags
& ECF_MAY_BE_ALLOCA
)
2009 cfun
->calls_alloca
= true;
2010 if (flags
& ECF_RETURNS_TWICE
)
2011 cfun
->calls_setjmp
= true;
2015 /* Clear flags set by notice_special_calls. Used by dead code removal
2016 to update the flags. */
2019 clear_special_calls (void)
2021 cfun
->calls_alloca
= false;
2022 cfun
->calls_setjmp
= false;
2025 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2028 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
2030 /* Since this block is no longer reachable, we can just delete all
2031 of its PHI nodes. */
2032 remove_phi_nodes (bb
);
2034 /* Remove edges to BB's successors. */
2035 while (EDGE_COUNT (bb
->succs
) > 0)
2036 remove_edge (EDGE_SUCC (bb
, 0));
2040 /* Remove statements of basic block BB. */
2043 remove_bb (basic_block bb
)
2045 gimple_stmt_iterator i
;
2049 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
2050 if (dump_flags
& TDF_DETAILS
)
2052 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
2053 fprintf (dump_file
, "\n");
2059 struct loop
*loop
= bb
->loop_father
;
2061 /* If a loop gets removed, clean up the information associated
2063 if (loop
->latch
== bb
2064 || loop
->header
== bb
)
2065 free_numbers_of_iterations_estimates_loop (loop
);
2068 /* Remove all the instructions in the block. */
2069 if (bb_seq (bb
) != NULL
)
2071 /* Walk backwards so as to get a chance to substitute all
2072 released DEFs into debug stmts. See
2073 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2075 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2077 gimple stmt
= gsi_stmt (i
);
2078 glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
);
2080 && (FORCED_LABEL (gimple_label_label (label_stmt
))
2081 || DECL_NONLOCAL (gimple_label_label (label_stmt
))))
2084 gimple_stmt_iterator new_gsi
;
2086 /* A non-reachable non-local label may still be referenced.
2087 But it no longer needs to carry the extra semantics of
2089 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
2091 DECL_NONLOCAL (gimple_label_label (label_stmt
)) = 0;
2092 FORCED_LABEL (gimple_label_label (label_stmt
)) = 1;
2095 new_bb
= bb
->prev_bb
;
2096 new_gsi
= gsi_start_bb (new_bb
);
2097 gsi_remove (&i
, false);
2098 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2102 /* Release SSA definitions if we are in SSA. Note that we
2103 may be called when not in SSA. For example,
2104 final_cleanup calls this function via
2105 cleanup_tree_cfg. */
2106 if (gimple_in_ssa_p (cfun
))
2107 release_defs (stmt
);
2109 gsi_remove (&i
, true);
2113 i
= gsi_last_bb (bb
);
2119 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2120 bb
->il
.gimple
.seq
= NULL
;
2121 bb
->il
.gimple
.phi_nodes
= NULL
;
2125 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2126 predicate VAL, return the edge that will be taken out of the block.
2127 If VAL does not match a unique edge, NULL is returned. */
2130 find_taken_edge (basic_block bb
, tree val
)
2134 stmt
= last_stmt (bb
);
2137 gcc_assert (is_ctrl_stmt (stmt
));
2142 if (!is_gimple_min_invariant (val
))
2145 if (gimple_code (stmt
) == GIMPLE_COND
)
2146 return find_taken_edge_cond_expr (bb
, val
);
2148 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2149 return find_taken_edge_switch_expr (as_a
<gswitch
*> (stmt
), bb
, val
);
2151 if (computed_goto_p (stmt
))
2153 /* Only optimize if the argument is a label, if the argument is
2154 not a label then we can not construct a proper CFG.
2156 It may be the case that we only need to allow the LABEL_REF to
2157 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2158 appear inside a LABEL_EXPR just to be safe. */
2159 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2160 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2161 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2168 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2169 statement, determine which of the outgoing edges will be taken out of the
2170 block. Return NULL if either edge may be taken. */
2173 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2178 dest
= label_to_block (val
);
2181 e
= find_edge (bb
, dest
);
2182 gcc_assert (e
!= NULL
);
2188 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2189 statement, determine which of the two edges will be taken out of the
2190 block. Return NULL if either edge may be taken. */
2193 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2195 edge true_edge
, false_edge
;
2197 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2199 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2200 return (integer_zerop (val
) ? false_edge
: true_edge
);
2203 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2204 statement, determine which edge will be taken out of the block. Return
2205 NULL if any edge may be taken. */
2208 find_taken_edge_switch_expr (gswitch
*switch_stmt
, basic_block bb
,
2211 basic_block dest_bb
;
2215 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2216 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2218 e
= find_edge (bb
, dest_bb
);
2224 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2225 We can make optimal use here of the fact that the case labels are
2226 sorted: We can do a binary search for a case matching VAL. */
2229 find_case_label_for_value (gswitch
*switch_stmt
, tree val
)
2231 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2232 tree default_case
= gimple_switch_default_label (switch_stmt
);
2234 for (low
= 0, high
= n
; high
- low
> 1; )
2236 size_t i
= (high
+ low
) / 2;
2237 tree t
= gimple_switch_label (switch_stmt
, i
);
2240 /* Cache the result of comparing CASE_LOW and val. */
2241 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2248 if (CASE_HIGH (t
) == NULL
)
2250 /* A singe-valued case label. */
2256 /* A case range. We can only handle integer ranges. */
2257 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2262 return default_case
;
2266 /* Dump a basic block on stderr. */
2269 gimple_debug_bb (basic_block bb
)
2271 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2275 /* Dump basic block with index N on stderr. */
2278 gimple_debug_bb_n (int n
)
2280 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2281 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2285 /* Dump the CFG on stderr.
2287 FLAGS are the same used by the tree dumping functions
2288 (see TDF_* in dumpfile.h). */
2291 gimple_debug_cfg (int flags
)
2293 gimple_dump_cfg (stderr
, flags
);
2297 /* Dump the program showing basic block boundaries on the given FILE.
2299 FLAGS are the same used by the tree dumping functions (see TDF_* in
2303 gimple_dump_cfg (FILE *file
, int flags
)
2305 if (flags
& TDF_DETAILS
)
2307 dump_function_header (file
, current_function_decl
, flags
);
2308 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2309 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2310 last_basic_block_for_fn (cfun
));
2312 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2313 fprintf (file
, "\n");
2316 if (flags
& TDF_STATS
)
2317 dump_cfg_stats (file
);
2319 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2323 /* Dump CFG statistics on FILE. */
2326 dump_cfg_stats (FILE *file
)
2328 static long max_num_merged_labels
= 0;
2329 unsigned long size
, total
= 0;
2332 const char * const fmt_str
= "%-30s%-13s%12s\n";
2333 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2334 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2335 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2336 const char *funcname
= current_function_name ();
2338 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2340 fprintf (file
, "---------------------------------------------------------\n");
2341 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2342 fprintf (file
, fmt_str
, "", " instances ", "used ");
2343 fprintf (file
, "---------------------------------------------------------\n");
2345 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2347 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2348 SCALE (size
), LABEL (size
));
2351 FOR_EACH_BB_FN (bb
, cfun
)
2352 num_edges
+= EDGE_COUNT (bb
->succs
);
2353 size
= num_edges
* sizeof (struct edge_def
);
2355 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2357 fprintf (file
, "---------------------------------------------------------\n");
2358 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2360 fprintf (file
, "---------------------------------------------------------\n");
2361 fprintf (file
, "\n");
2363 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2364 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2366 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2367 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2369 fprintf (file
, "\n");
2373 /* Dump CFG statistics on stderr. Keep extern so that it's always
2374 linked in the final executable. */
2377 debug_cfg_stats (void)
2379 dump_cfg_stats (stderr
);
2382 /*---------------------------------------------------------------------------
2383 Miscellaneous helpers
2384 ---------------------------------------------------------------------------*/
2386 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2387 flow. Transfers of control flow associated with EH are excluded. */
2390 call_can_make_abnormal_goto (gimple t
)
2392 /* If the function has no non-local labels, then a call cannot make an
2393 abnormal transfer of control. */
2394 if (!cfun
->has_nonlocal_label
2395 && !cfun
->calls_setjmp
)
2398 /* Likewise if the call has no side effects. */
2399 if (!gimple_has_side_effects (t
))
2402 /* Likewise if the called function is leaf. */
2403 if (gimple_call_flags (t
) & ECF_LEAF
)
2410 /* Return true if T can make an abnormal transfer of control flow.
2411 Transfers of control flow associated with EH are excluded. */
2414 stmt_can_make_abnormal_goto (gimple t
)
2416 if (computed_goto_p (t
))
2418 if (is_gimple_call (t
))
2419 return call_can_make_abnormal_goto (t
);
2424 /* Return true if T represents a stmt that always transfers control. */
2427 is_ctrl_stmt (gimple t
)
2429 switch (gimple_code (t
))
2443 /* Return true if T is a statement that may alter the flow of control
2444 (e.g., a call to a non-returning function). */
2447 is_ctrl_altering_stmt (gimple t
)
2451 switch (gimple_code (t
))
2454 /* Per stmt call flag indicates whether the call could alter
2456 if (gimple_call_ctrl_altering_p (t
))
2460 case GIMPLE_EH_DISPATCH
:
2461 /* EH_DISPATCH branches to the individual catch handlers at
2462 this level of a try or allowed-exceptions region. It can
2463 fallthru to the next statement as well. */
2467 if (gimple_asm_nlabels (as_a
<gasm
*> (t
)) > 0)
2472 /* OpenMP directives alter control flow. */
2475 case GIMPLE_TRANSACTION
:
2476 /* A transaction start alters control flow. */
2483 /* If a statement can throw, it alters control flow. */
2484 return stmt_can_throw_internal (t
);
2488 /* Return true if T is a simple local goto. */
2491 simple_goto_p (gimple t
)
2493 return (gimple_code (t
) == GIMPLE_GOTO
2494 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2498 /* Return true if STMT should start a new basic block. PREV_STMT is
2499 the statement preceding STMT. It is used when STMT is a label or a
2500 case label. Labels should only start a new basic block if their
2501 previous statement wasn't a label. Otherwise, sequence of labels
2502 would generate unnecessary basic blocks that only contain a single
2506 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2511 /* Labels start a new basic block only if the preceding statement
2512 wasn't a label of the same type. This prevents the creation of
2513 consecutive blocks that have nothing but a single label. */
2514 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
2516 /* Nonlocal and computed GOTO targets always start a new block. */
2517 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
2518 || FORCED_LABEL (gimple_label_label (label_stmt
)))
2521 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2523 if (DECL_NONLOCAL (gimple_label_label (
2524 as_a
<glabel
*> (prev_stmt
))))
2527 cfg_stats
.num_merged_labels
++;
2533 else if (gimple_code (stmt
) == GIMPLE_CALL
2534 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2535 /* setjmp acts similar to a nonlocal GOTO target and thus should
2536 start a new block. */
2543 /* Return true if T should end a basic block. */
2546 stmt_ends_bb_p (gimple t
)
2548 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2551 /* Remove block annotations and other data structures. */
2554 delete_tree_cfg_annotations (void)
2556 vec_free (label_to_block_map_for_fn (cfun
));
2560 /* Return the first statement in basic block BB. */
2563 first_stmt (basic_block bb
)
2565 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2568 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2576 /* Return the first non-label statement in basic block BB. */
2579 first_non_label_stmt (basic_block bb
)
2581 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2582 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2584 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2587 /* Return the last statement in basic block BB. */
2590 last_stmt (basic_block bb
)
2592 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2595 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2603 /* Return the last statement of an otherwise empty block. Return NULL
2604 if the block is totally empty, or if it contains more than one
2608 last_and_only_stmt (basic_block bb
)
2610 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2616 last
= gsi_stmt (i
);
2617 gsi_prev_nondebug (&i
);
2621 /* Empty statements should no longer appear in the instruction stream.
2622 Everything that might have appeared before should be deleted by
2623 remove_useless_stmts, and the optimizers should just gsi_remove
2624 instead of smashing with build_empty_stmt.
2626 Thus the only thing that should appear here in a block containing
2627 one executable statement is a label. */
2628 prev
= gsi_stmt (i
);
2629 if (gimple_code (prev
) == GIMPLE_LABEL
)
2635 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2638 reinstall_phi_args (edge new_edge
, edge old_edge
)
2644 vec
<edge_var_map
> *v
= redirect_edge_var_map_vector (old_edge
);
2648 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2649 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2650 i
++, gsi_next (&phis
))
2652 gphi
*phi
= phis
.phi ();
2653 tree result
= redirect_edge_var_map_result (vm
);
2654 tree arg
= redirect_edge_var_map_def (vm
);
2656 gcc_assert (result
== gimple_phi_result (phi
));
2658 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2661 redirect_edge_var_map_clear (old_edge
);
2664 /* Returns the basic block after which the new basic block created
2665 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2666 near its "logical" location. This is of most help to humans looking
2667 at debugging dumps. */
2670 split_edge_bb_loc (edge edge_in
)
2672 basic_block dest
= edge_in
->dest
;
2673 basic_block dest_prev
= dest
->prev_bb
;
2677 edge e
= find_edge (dest_prev
, dest
);
2678 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2679 return edge_in
->src
;
2684 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2685 Abort on abnormal edges. */
2688 gimple_split_edge (edge edge_in
)
2690 basic_block new_bb
, after_bb
, dest
;
2693 /* Abnormal edges cannot be split. */
2694 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2696 dest
= edge_in
->dest
;
2698 after_bb
= split_edge_bb_loc (edge_in
);
2700 new_bb
= create_empty_bb (after_bb
);
2701 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2702 new_bb
->count
= edge_in
->count
;
2703 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2704 new_edge
->probability
= REG_BR_PROB_BASE
;
2705 new_edge
->count
= edge_in
->count
;
2707 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2708 gcc_assert (e
== edge_in
);
2709 reinstall_phi_args (new_edge
, e
);
2715 /* Verify properties of the address expression T with base object BASE. */
2718 verify_address (tree t
, tree base
)
2721 bool old_side_effects
;
2723 bool new_side_effects
;
2725 old_constant
= TREE_CONSTANT (t
);
2726 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2728 recompute_tree_invariant_for_addr_expr (t
);
2729 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2730 new_constant
= TREE_CONSTANT (t
);
2732 if (old_constant
!= new_constant
)
2734 error ("constant not recomputed when ADDR_EXPR changed");
2737 if (old_side_effects
!= new_side_effects
)
2739 error ("side effects not recomputed when ADDR_EXPR changed");
2743 if (!(TREE_CODE (base
) == VAR_DECL
2744 || TREE_CODE (base
) == PARM_DECL
2745 || TREE_CODE (base
) == RESULT_DECL
))
2748 if (DECL_GIMPLE_REG_P (base
))
2750 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2757 /* Callback for walk_tree, check that all elements with address taken are
2758 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2759 inside a PHI node. */
2762 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2769 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2770 #define CHECK_OP(N, MSG) \
2771 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2772 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2774 switch (TREE_CODE (t
))
2777 if (SSA_NAME_IN_FREE_LIST (t
))
2779 error ("SSA name in freelist but still referenced");
2785 error ("INDIRECT_REF in gimple IL");
2789 x
= TREE_OPERAND (t
, 0);
2790 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2791 || !is_gimple_mem_ref_addr (x
))
2793 error ("invalid first operand of MEM_REF");
2796 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2797 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2799 error ("invalid offset operand of MEM_REF");
2800 return TREE_OPERAND (t
, 1);
2802 if (TREE_CODE (x
) == ADDR_EXPR
2803 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2809 x
= fold (ASSERT_EXPR_COND (t
));
2810 if (x
== boolean_false_node
)
2812 error ("ASSERT_EXPR with an always-false condition");
2818 error ("MODIFY_EXPR not expected while having tuples");
2825 gcc_assert (is_gimple_address (t
));
2827 /* Skip any references (they will be checked when we recurse down the
2828 tree) and ensure that any variable used as a prefix is marked
2830 for (x
= TREE_OPERAND (t
, 0);
2831 handled_component_p (x
);
2832 x
= TREE_OPERAND (x
, 0))
2835 if ((tem
= verify_address (t
, x
)))
2838 if (!(TREE_CODE (x
) == VAR_DECL
2839 || TREE_CODE (x
) == PARM_DECL
2840 || TREE_CODE (x
) == RESULT_DECL
))
2843 if (!TREE_ADDRESSABLE (x
))
2845 error ("address taken, but ADDRESSABLE bit not set");
2853 x
= COND_EXPR_COND (t
);
2854 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2856 error ("non-integral used in condition");
2859 if (!is_gimple_condexpr (x
))
2861 error ("invalid conditional operand");
2866 case NON_LVALUE_EXPR
:
2867 case TRUTH_NOT_EXPR
:
2871 case FIX_TRUNC_EXPR
:
2876 CHECK_OP (0, "invalid operand to unary operator");
2882 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2884 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2888 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2890 tree t0
= TREE_OPERAND (t
, 0);
2891 tree t1
= TREE_OPERAND (t
, 1);
2892 tree t2
= TREE_OPERAND (t
, 2);
2893 if (!tree_fits_uhwi_p (t1
)
2894 || !tree_fits_uhwi_p (t2
))
2896 error ("invalid position or size operand to BIT_FIELD_REF");
2899 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2900 && (TYPE_PRECISION (TREE_TYPE (t
))
2901 != tree_to_uhwi (t1
)))
2903 error ("integral result type precision does not match "
2904 "field size of BIT_FIELD_REF");
2907 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2908 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2909 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2910 != tree_to_uhwi (t1
)))
2912 error ("mode precision of non-integral result does not "
2913 "match field size of BIT_FIELD_REF");
2916 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2917 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2918 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2920 error ("position plus size exceeds size of referenced object in "
2925 t
= TREE_OPERAND (t
, 0);
2930 case ARRAY_RANGE_REF
:
2931 case VIEW_CONVERT_EXPR
:
2932 /* We have a nest of references. Verify that each of the operands
2933 that determine where to reference is either a constant or a variable,
2934 verify that the base is valid, and then show we've already checked
2936 while (handled_component_p (t
))
2938 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2939 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2940 else if (TREE_CODE (t
) == ARRAY_REF
2941 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2943 CHECK_OP (1, "invalid array index");
2944 if (TREE_OPERAND (t
, 2))
2945 CHECK_OP (2, "invalid array lower bound");
2946 if (TREE_OPERAND (t
, 3))
2947 CHECK_OP (3, "invalid array stride");
2949 else if (TREE_CODE (t
) == BIT_FIELD_REF
2950 || TREE_CODE (t
) == REALPART_EXPR
2951 || TREE_CODE (t
) == IMAGPART_EXPR
)
2953 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2958 t
= TREE_OPERAND (t
, 0);
2961 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2963 error ("invalid reference prefix");
2970 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2971 POINTER_PLUS_EXPR. */
2972 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2974 error ("invalid operand to plus/minus, type is a pointer");
2977 CHECK_OP (0, "invalid operand to binary operator");
2978 CHECK_OP (1, "invalid operand to binary operator");
2981 case POINTER_PLUS_EXPR
:
2982 /* Check to make sure the first operand is a pointer or reference type. */
2983 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2985 error ("invalid operand to pointer plus, first operand is not a pointer");
2988 /* Check to make sure the second operand is a ptrofftype. */
2989 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2991 error ("invalid operand to pointer plus, second operand is not an "
2992 "integer type of appropriate width");
3002 case UNORDERED_EXPR
:
3011 case TRUNC_DIV_EXPR
:
3013 case FLOOR_DIV_EXPR
:
3014 case ROUND_DIV_EXPR
:
3015 case TRUNC_MOD_EXPR
:
3017 case FLOOR_MOD_EXPR
:
3018 case ROUND_MOD_EXPR
:
3020 case EXACT_DIV_EXPR
:
3030 CHECK_OP (0, "invalid operand to binary operator");
3031 CHECK_OP (1, "invalid operand to binary operator");
3035 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3039 case CASE_LABEL_EXPR
:
3042 error ("invalid CASE_CHAIN");
3056 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3057 Returns true if there is an error, otherwise false. */
3060 verify_types_in_gimple_min_lval (tree expr
)
3064 if (is_gimple_id (expr
))
3067 if (TREE_CODE (expr
) != TARGET_MEM_REF
3068 && TREE_CODE (expr
) != MEM_REF
)
3070 error ("invalid expression for min lvalue");
3074 /* TARGET_MEM_REFs are strange beasts. */
3075 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3078 op
= TREE_OPERAND (expr
, 0);
3079 if (!is_gimple_val (op
))
3081 error ("invalid operand in indirect reference");
3082 debug_generic_stmt (op
);
3085 /* Memory references now generally can involve a value conversion. */
3090 /* Verify if EXPR is a valid GIMPLE reference expression. If
3091 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3092 if there is an error, otherwise false. */
3095 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3097 while (handled_component_p (expr
))
3099 tree op
= TREE_OPERAND (expr
, 0);
3101 if (TREE_CODE (expr
) == ARRAY_REF
3102 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3104 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3105 || (TREE_OPERAND (expr
, 2)
3106 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3107 || (TREE_OPERAND (expr
, 3)
3108 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3110 error ("invalid operands to array reference");
3111 debug_generic_stmt (expr
);
3116 /* Verify if the reference array element types are compatible. */
3117 if (TREE_CODE (expr
) == ARRAY_REF
3118 && !useless_type_conversion_p (TREE_TYPE (expr
),
3119 TREE_TYPE (TREE_TYPE (op
))))
3121 error ("type mismatch in array reference");
3122 debug_generic_stmt (TREE_TYPE (expr
));
3123 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3126 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3127 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3128 TREE_TYPE (TREE_TYPE (op
))))
3130 error ("type mismatch in array range reference");
3131 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3132 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3136 if ((TREE_CODE (expr
) == REALPART_EXPR
3137 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3138 && !useless_type_conversion_p (TREE_TYPE (expr
),
3139 TREE_TYPE (TREE_TYPE (op
))))
3141 error ("type mismatch in real/imagpart reference");
3142 debug_generic_stmt (TREE_TYPE (expr
));
3143 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3147 if (TREE_CODE (expr
) == COMPONENT_REF
3148 && !useless_type_conversion_p (TREE_TYPE (expr
),
3149 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3151 error ("type mismatch in component reference");
3152 debug_generic_stmt (TREE_TYPE (expr
));
3153 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3157 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3159 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3160 that their operand is not an SSA name or an invariant when
3161 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3162 bug). Otherwise there is nothing to verify, gross mismatches at
3163 most invoke undefined behavior. */
3165 && (TREE_CODE (op
) == SSA_NAME
3166 || is_gimple_min_invariant (op
)))
3168 error ("conversion of an SSA_NAME on the left hand side");
3169 debug_generic_stmt (expr
);
3172 else if (TREE_CODE (op
) == SSA_NAME
3173 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3175 error ("conversion of register to a different size");
3176 debug_generic_stmt (expr
);
3179 else if (!handled_component_p (op
))
3186 if (TREE_CODE (expr
) == MEM_REF
)
3188 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3190 error ("invalid address operand in MEM_REF");
3191 debug_generic_stmt (expr
);
3194 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3195 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3197 error ("invalid offset operand in MEM_REF");
3198 debug_generic_stmt (expr
);
3202 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3204 if (!TMR_BASE (expr
)
3205 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3207 error ("invalid address operand in TARGET_MEM_REF");
3210 if (!TMR_OFFSET (expr
)
3211 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3212 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3214 error ("invalid offset operand in TARGET_MEM_REF");
3215 debug_generic_stmt (expr
);
3220 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3221 && verify_types_in_gimple_min_lval (expr
));
3224 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3225 list of pointer-to types that is trivially convertible to DEST. */
3228 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3232 if (!TYPE_POINTER_TO (src_obj
))
3235 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3236 if (useless_type_conversion_p (dest
, src
))
3242 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3243 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3246 valid_fixed_convert_types_p (tree type1
, tree type2
)
3248 return (FIXED_POINT_TYPE_P (type1
)
3249 && (INTEGRAL_TYPE_P (type2
)
3250 || SCALAR_FLOAT_TYPE_P (type2
)
3251 || FIXED_POINT_TYPE_P (type2
)));
3254 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3255 is a problem, otherwise false. */
3258 verify_gimple_call (gcall
*stmt
)
3260 tree fn
= gimple_call_fn (stmt
);
3261 tree fntype
, fndecl
;
3264 if (gimple_call_internal_p (stmt
))
3268 error ("gimple call has two targets");
3269 debug_generic_stmt (fn
);
3277 error ("gimple call has no target");
3282 if (fn
&& !is_gimple_call_addr (fn
))
3284 error ("invalid function in gimple call");
3285 debug_generic_stmt (fn
);
3290 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3291 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3292 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3294 error ("non-function in gimple call");
3298 fndecl
= gimple_call_fndecl (stmt
);
3300 && TREE_CODE (fndecl
) == FUNCTION_DECL
3301 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3302 && !DECL_PURE_P (fndecl
)
3303 && !TREE_READONLY (fndecl
))
3305 error ("invalid pure const state for function");
3309 if (gimple_call_lhs (stmt
)
3310 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3311 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3313 error ("invalid LHS in gimple call");
3317 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3319 error ("LHS in noreturn call");
3323 fntype
= gimple_call_fntype (stmt
);
3325 && gimple_call_lhs (stmt
)
3326 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3328 /* ??? At least C++ misses conversions at assignments from
3329 void * call results.
3330 ??? Java is completely off. Especially with functions
3331 returning java.lang.Object.
3332 For now simply allow arbitrary pointer type conversions. */
3333 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3334 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3336 error ("invalid conversion in gimple call");
3337 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3338 debug_generic_stmt (TREE_TYPE (fntype
));
3342 if (gimple_call_chain (stmt
)
3343 && !is_gimple_val (gimple_call_chain (stmt
)))
3345 error ("invalid static chain in gimple call");
3346 debug_generic_stmt (gimple_call_chain (stmt
));
3350 /* If there is a static chain argument, the call should either be
3351 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3352 if (gimple_call_chain (stmt
)
3354 && !DECL_STATIC_CHAIN (fndecl
))
3356 error ("static chain with function that doesn%'t use one");
3360 /* ??? The C frontend passes unpromoted arguments in case it
3361 didn't see a function declaration before the call. So for now
3362 leave the call arguments mostly unverified. Once we gimplify
3363 unit-at-a-time we have a chance to fix this. */
3365 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3367 tree arg
= gimple_call_arg (stmt
, i
);
3368 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3369 && !is_gimple_val (arg
))
3370 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3371 && !is_gimple_lvalue (arg
)))
3373 error ("invalid argument to gimple call");
3374 debug_generic_expr (arg
);
3382 /* Verifies the gimple comparison with the result type TYPE and
3383 the operands OP0 and OP1. */
3386 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3388 tree op0_type
= TREE_TYPE (op0
);
3389 tree op1_type
= TREE_TYPE (op1
);
3391 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3393 error ("invalid operands in gimple comparison");
3397 /* For comparisons we do not have the operations type as the
3398 effective type the comparison is carried out in. Instead
3399 we require that either the first operand is trivially
3400 convertible into the second, or the other way around.
3401 Because we special-case pointers to void we allow
3402 comparisons of pointers with the same mode as well. */
3403 if (!useless_type_conversion_p (op0_type
, op1_type
)
3404 && !useless_type_conversion_p (op1_type
, op0_type
)
3405 && (!POINTER_TYPE_P (op0_type
)
3406 || !POINTER_TYPE_P (op1_type
)
3407 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3409 error ("mismatching comparison operand types");
3410 debug_generic_expr (op0_type
);
3411 debug_generic_expr (op1_type
);
3415 /* The resulting type of a comparison may be an effective boolean type. */
3416 if (INTEGRAL_TYPE_P (type
)
3417 && (TREE_CODE (type
) == BOOLEAN_TYPE
3418 || TYPE_PRECISION (type
) == 1))
3420 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3421 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3423 error ("vector comparison returning a boolean");
3424 debug_generic_expr (op0_type
);
3425 debug_generic_expr (op1_type
);
3429 /* Or an integer vector type with the same size and element count
3430 as the comparison operand types. */
3431 else if (TREE_CODE (type
) == VECTOR_TYPE
3432 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3434 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3435 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3437 error ("non-vector operands in vector comparison");
3438 debug_generic_expr (op0_type
);
3439 debug_generic_expr (op1_type
);
3443 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3444 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3445 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3446 /* The result of a vector comparison is of signed
3448 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3450 error ("invalid vector comparison resulting type");
3451 debug_generic_expr (type
);
3457 error ("bogus comparison result type");
3458 debug_generic_expr (type
);
3465 /* Verify a gimple assignment statement STMT with an unary rhs.
3466 Returns true if anything is wrong. */
3469 verify_gimple_assign_unary (gassign
*stmt
)
3471 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3472 tree lhs
= gimple_assign_lhs (stmt
);
3473 tree lhs_type
= TREE_TYPE (lhs
);
3474 tree rhs1
= gimple_assign_rhs1 (stmt
);
3475 tree rhs1_type
= TREE_TYPE (rhs1
);
3477 if (!is_gimple_reg (lhs
))
3479 error ("non-register as LHS of unary operation");
3483 if (!is_gimple_val (rhs1
))
3485 error ("invalid operand in unary operation");
3489 /* First handle conversions. */
3494 /* Allow conversions from pointer type to integral type only if
3495 there is no sign or zero extension involved.
3496 For targets were the precision of ptrofftype doesn't match that
3497 of pointers we need to allow arbitrary conversions to ptrofftype. */
3498 if ((POINTER_TYPE_P (lhs_type
)
3499 && INTEGRAL_TYPE_P (rhs1_type
))
3500 || (POINTER_TYPE_P (rhs1_type
)
3501 && INTEGRAL_TYPE_P (lhs_type
)
3502 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3503 || ptrofftype_p (sizetype
))))
3506 /* Allow conversion from integral to offset type and vice versa. */
3507 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3508 && INTEGRAL_TYPE_P (rhs1_type
))
3509 || (INTEGRAL_TYPE_P (lhs_type
)
3510 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3513 /* Otherwise assert we are converting between types of the
3515 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3517 error ("invalid types in nop conversion");
3518 debug_generic_expr (lhs_type
);
3519 debug_generic_expr (rhs1_type
);
3526 case ADDR_SPACE_CONVERT_EXPR
:
3528 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3529 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3530 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3532 error ("invalid types in address space conversion");
3533 debug_generic_expr (lhs_type
);
3534 debug_generic_expr (rhs1_type
);
3541 case FIXED_CONVERT_EXPR
:
3543 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3544 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3546 error ("invalid types in fixed-point conversion");
3547 debug_generic_expr (lhs_type
);
3548 debug_generic_expr (rhs1_type
);
3557 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3558 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3559 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3561 error ("invalid types in conversion to floating point");
3562 debug_generic_expr (lhs_type
);
3563 debug_generic_expr (rhs1_type
);
3570 case FIX_TRUNC_EXPR
:
3572 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3573 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3574 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3576 error ("invalid types in conversion to integer");
3577 debug_generic_expr (lhs_type
);
3578 debug_generic_expr (rhs1_type
);
3584 case REDUC_MAX_EXPR
:
3585 case REDUC_MIN_EXPR
:
3586 case REDUC_PLUS_EXPR
:
3587 if (!VECTOR_TYPE_P (rhs1_type
)
3588 || !useless_type_conversion_p (lhs_type
, TREE_TYPE (rhs1_type
)))
3590 error ("reduction should convert from vector to element type");
3591 debug_generic_expr (lhs_type
);
3592 debug_generic_expr (rhs1_type
);
3597 case VEC_UNPACK_HI_EXPR
:
3598 case VEC_UNPACK_LO_EXPR
:
3599 case VEC_UNPACK_FLOAT_HI_EXPR
:
3600 case VEC_UNPACK_FLOAT_LO_EXPR
:
3615 /* For the remaining codes assert there is no conversion involved. */
3616 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3618 error ("non-trivial conversion in unary operation");
3619 debug_generic_expr (lhs_type
);
3620 debug_generic_expr (rhs1_type
);
3627 /* Verify a gimple assignment statement STMT with a binary rhs.
3628 Returns true if anything is wrong. */
3631 verify_gimple_assign_binary (gassign
*stmt
)
3633 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3634 tree lhs
= gimple_assign_lhs (stmt
);
3635 tree lhs_type
= TREE_TYPE (lhs
);
3636 tree rhs1
= gimple_assign_rhs1 (stmt
);
3637 tree rhs1_type
= TREE_TYPE (rhs1
);
3638 tree rhs2
= gimple_assign_rhs2 (stmt
);
3639 tree rhs2_type
= TREE_TYPE (rhs2
);
3641 if (!is_gimple_reg (lhs
))
3643 error ("non-register as LHS of binary operation");
3647 if (!is_gimple_val (rhs1
)
3648 || !is_gimple_val (rhs2
))
3650 error ("invalid operands in binary operation");
3654 /* First handle operations that involve different types. */
3659 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3660 || !(INTEGRAL_TYPE_P (rhs1_type
)
3661 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3662 || !(INTEGRAL_TYPE_P (rhs2_type
)
3663 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3665 error ("type mismatch in complex expression");
3666 debug_generic_expr (lhs_type
);
3667 debug_generic_expr (rhs1_type
);
3668 debug_generic_expr (rhs2_type
);
3680 /* Shifts and rotates are ok on integral types, fixed point
3681 types and integer vector types. */
3682 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3683 && !FIXED_POINT_TYPE_P (rhs1_type
)
3684 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3685 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3686 || (!INTEGRAL_TYPE_P (rhs2_type
)
3687 /* Vector shifts of vectors are also ok. */
3688 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3689 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3690 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3691 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3692 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3694 error ("type mismatch in shift expression");
3695 debug_generic_expr (lhs_type
);
3696 debug_generic_expr (rhs1_type
);
3697 debug_generic_expr (rhs2_type
);
3704 case WIDEN_LSHIFT_EXPR
:
3706 if (!INTEGRAL_TYPE_P (lhs_type
)
3707 || !INTEGRAL_TYPE_P (rhs1_type
)
3708 || TREE_CODE (rhs2
) != INTEGER_CST
3709 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3711 error ("type mismatch in widening vector shift expression");
3712 debug_generic_expr (lhs_type
);
3713 debug_generic_expr (rhs1_type
);
3714 debug_generic_expr (rhs2_type
);
3721 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3722 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3724 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3725 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3726 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3727 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3728 || TREE_CODE (rhs2
) != INTEGER_CST
3729 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3730 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3732 error ("type mismatch in widening vector shift expression");
3733 debug_generic_expr (lhs_type
);
3734 debug_generic_expr (rhs1_type
);
3735 debug_generic_expr (rhs2_type
);
3745 tree lhs_etype
= lhs_type
;
3746 tree rhs1_etype
= rhs1_type
;
3747 tree rhs2_etype
= rhs2_type
;
3748 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3750 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3751 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3753 error ("invalid non-vector operands to vector valued plus");
3756 lhs_etype
= TREE_TYPE (lhs_type
);
3757 rhs1_etype
= TREE_TYPE (rhs1_type
);
3758 rhs2_etype
= TREE_TYPE (rhs2_type
);
3760 if (POINTER_TYPE_P (lhs_etype
)
3761 || POINTER_TYPE_P (rhs1_etype
)
3762 || POINTER_TYPE_P (rhs2_etype
))
3764 error ("invalid (pointer) operands to plus/minus");
3768 /* Continue with generic binary expression handling. */
3772 case POINTER_PLUS_EXPR
:
3774 if (!POINTER_TYPE_P (rhs1_type
)
3775 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3776 || !ptrofftype_p (rhs2_type
))
3778 error ("type mismatch in pointer plus expression");
3779 debug_generic_stmt (lhs_type
);
3780 debug_generic_stmt (rhs1_type
);
3781 debug_generic_stmt (rhs2_type
);
3788 case TRUTH_ANDIF_EXPR
:
3789 case TRUTH_ORIF_EXPR
:
3790 case TRUTH_AND_EXPR
:
3792 case TRUTH_XOR_EXPR
:
3802 case UNORDERED_EXPR
:
3810 /* Comparisons are also binary, but the result type is not
3811 connected to the operand types. */
3812 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3814 case WIDEN_MULT_EXPR
:
3815 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3817 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3818 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3820 case WIDEN_SUM_EXPR
:
3821 case VEC_WIDEN_MULT_HI_EXPR
:
3822 case VEC_WIDEN_MULT_LO_EXPR
:
3823 case VEC_WIDEN_MULT_EVEN_EXPR
:
3824 case VEC_WIDEN_MULT_ODD_EXPR
:
3825 case VEC_PACK_TRUNC_EXPR
:
3826 case VEC_PACK_SAT_EXPR
:
3827 case VEC_PACK_FIX_TRUNC_EXPR
:
3832 case MULT_HIGHPART_EXPR
:
3833 case TRUNC_DIV_EXPR
:
3835 case FLOOR_DIV_EXPR
:
3836 case ROUND_DIV_EXPR
:
3837 case TRUNC_MOD_EXPR
:
3839 case FLOOR_MOD_EXPR
:
3840 case ROUND_MOD_EXPR
:
3842 case EXACT_DIV_EXPR
:
3848 /* Continue with generic binary expression handling. */
3855 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3856 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3858 error ("type mismatch in binary expression");
3859 debug_generic_stmt (lhs_type
);
3860 debug_generic_stmt (rhs1_type
);
3861 debug_generic_stmt (rhs2_type
);
3868 /* Verify a gimple assignment statement STMT with a ternary rhs.
3869 Returns true if anything is wrong. */
3872 verify_gimple_assign_ternary (gassign
*stmt
)
3874 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3875 tree lhs
= gimple_assign_lhs (stmt
);
3876 tree lhs_type
= TREE_TYPE (lhs
);
3877 tree rhs1
= gimple_assign_rhs1 (stmt
);
3878 tree rhs1_type
= TREE_TYPE (rhs1
);
3879 tree rhs2
= gimple_assign_rhs2 (stmt
);
3880 tree rhs2_type
= TREE_TYPE (rhs2
);
3881 tree rhs3
= gimple_assign_rhs3 (stmt
);
3882 tree rhs3_type
= TREE_TYPE (rhs3
);
3884 if (!is_gimple_reg (lhs
))
3886 error ("non-register as LHS of ternary operation");
3890 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3891 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3892 || !is_gimple_val (rhs2
)
3893 || !is_gimple_val (rhs3
))
3895 error ("invalid operands in ternary operation");
3899 /* First handle operations that involve different types. */
3902 case WIDEN_MULT_PLUS_EXPR
:
3903 case WIDEN_MULT_MINUS_EXPR
:
3904 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3905 && !FIXED_POINT_TYPE_P (rhs1_type
))
3906 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3907 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3908 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3909 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3911 error ("type mismatch in widening multiply-accumulate expression");
3912 debug_generic_expr (lhs_type
);
3913 debug_generic_expr (rhs1_type
);
3914 debug_generic_expr (rhs2_type
);
3915 debug_generic_expr (rhs3_type
);
3921 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3922 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3923 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3925 error ("type mismatch in fused multiply-add expression");
3926 debug_generic_expr (lhs_type
);
3927 debug_generic_expr (rhs1_type
);
3928 debug_generic_expr (rhs2_type
);
3929 debug_generic_expr (rhs3_type
);
3936 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3937 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3939 error ("type mismatch in conditional expression");
3940 debug_generic_expr (lhs_type
);
3941 debug_generic_expr (rhs2_type
);
3942 debug_generic_expr (rhs3_type
);
3948 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3949 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3951 error ("type mismatch in vector permute expression");
3952 debug_generic_expr (lhs_type
);
3953 debug_generic_expr (rhs1_type
);
3954 debug_generic_expr (rhs2_type
);
3955 debug_generic_expr (rhs3_type
);
3959 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3960 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3961 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3963 error ("vector types expected in vector permute expression");
3964 debug_generic_expr (lhs_type
);
3965 debug_generic_expr (rhs1_type
);
3966 debug_generic_expr (rhs2_type
);
3967 debug_generic_expr (rhs3_type
);
3971 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3972 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3973 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3974 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3975 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3977 error ("vectors with different element number found "
3978 "in vector permute expression");
3979 debug_generic_expr (lhs_type
);
3980 debug_generic_expr (rhs1_type
);
3981 debug_generic_expr (rhs2_type
);
3982 debug_generic_expr (rhs3_type
);
3986 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3987 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3988 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3990 error ("invalid mask type in vector permute expression");
3991 debug_generic_expr (lhs_type
);
3992 debug_generic_expr (rhs1_type
);
3993 debug_generic_expr (rhs2_type
);
3994 debug_generic_expr (rhs3_type
);
4001 if (!useless_type_conversion_p (rhs1_type
, rhs2_type
)
4002 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
4003 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4004 (TYPE_MODE (TREE_TYPE (rhs1_type
))))
4005 > GET_MODE_BITSIZE (GET_MODE_INNER
4006 (TYPE_MODE (TREE_TYPE (lhs_type
)))))
4008 error ("type mismatch in sad expression");
4009 debug_generic_expr (lhs_type
);
4010 debug_generic_expr (rhs1_type
);
4011 debug_generic_expr (rhs2_type
);
4012 debug_generic_expr (rhs3_type
);
4016 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
4017 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
4018 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
4020 error ("vector types expected in sad expression");
4021 debug_generic_expr (lhs_type
);
4022 debug_generic_expr (rhs1_type
);
4023 debug_generic_expr (rhs2_type
);
4024 debug_generic_expr (rhs3_type
);
4031 case REALIGN_LOAD_EXPR
:
4041 /* Verify a gimple assignment statement STMT with a single rhs.
4042 Returns true if anything is wrong. */
4045 verify_gimple_assign_single (gassign
*stmt
)
4047 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
4048 tree lhs
= gimple_assign_lhs (stmt
);
4049 tree lhs_type
= TREE_TYPE (lhs
);
4050 tree rhs1
= gimple_assign_rhs1 (stmt
);
4051 tree rhs1_type
= TREE_TYPE (rhs1
);
4054 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4056 error ("non-trivial conversion at assignment");
4057 debug_generic_expr (lhs_type
);
4058 debug_generic_expr (rhs1_type
);
4062 if (gimple_clobber_p (stmt
)
4063 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4065 error ("non-decl/MEM_REF LHS in clobber statement");
4066 debug_generic_expr (lhs
);
4070 if (handled_component_p (lhs
)
4071 || TREE_CODE (lhs
) == MEM_REF
4072 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4073 res
|= verify_types_in_gimple_reference (lhs
, true);
4075 /* Special codes we cannot handle via their class. */
4080 tree op
= TREE_OPERAND (rhs1
, 0);
4081 if (!is_gimple_addressable (op
))
4083 error ("invalid operand in unary expression");
4087 /* Technically there is no longer a need for matching types, but
4088 gimple hygiene asks for this check. In LTO we can end up
4089 combining incompatible units and thus end up with addresses
4090 of globals that change their type to a common one. */
4092 && !types_compatible_p (TREE_TYPE (op
),
4093 TREE_TYPE (TREE_TYPE (rhs1
)))
4094 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4097 error ("type mismatch in address expression");
4098 debug_generic_stmt (TREE_TYPE (rhs1
));
4099 debug_generic_stmt (TREE_TYPE (op
));
4103 return verify_types_in_gimple_reference (op
, true);
4108 error ("INDIRECT_REF in gimple IL");
4114 case ARRAY_RANGE_REF
:
4115 case VIEW_CONVERT_EXPR
:
4118 case TARGET_MEM_REF
:
4120 if (!is_gimple_reg (lhs
)
4121 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4123 error ("invalid rhs for gimple memory store");
4124 debug_generic_stmt (lhs
);
4125 debug_generic_stmt (rhs1
);
4128 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4140 /* tcc_declaration */
4145 if (!is_gimple_reg (lhs
)
4146 && !is_gimple_reg (rhs1
)
4147 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4149 error ("invalid rhs for gimple memory store");
4150 debug_generic_stmt (lhs
);
4151 debug_generic_stmt (rhs1
);
4157 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4160 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4162 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4164 /* For vector CONSTRUCTORs we require that either it is empty
4165 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4166 (then the element count must be correct to cover the whole
4167 outer vector and index must be NULL on all elements, or it is
4168 a CONSTRUCTOR of scalar elements, where we as an exception allow
4169 smaller number of elements (assuming zero filling) and
4170 consecutive indexes as compared to NULL indexes (such
4171 CONSTRUCTORs can appear in the IL from FEs). */
4172 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4174 if (elt_t
== NULL_TREE
)
4176 elt_t
= TREE_TYPE (elt_v
);
4177 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4179 tree elt_t
= TREE_TYPE (elt_v
);
4180 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4183 error ("incorrect type of vector CONSTRUCTOR"
4185 debug_generic_stmt (rhs1
);
4188 else if (CONSTRUCTOR_NELTS (rhs1
)
4189 * TYPE_VECTOR_SUBPARTS (elt_t
)
4190 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4192 error ("incorrect number of vector CONSTRUCTOR"
4194 debug_generic_stmt (rhs1
);
4198 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4201 error ("incorrect type of vector CONSTRUCTOR elements");
4202 debug_generic_stmt (rhs1
);
4205 else if (CONSTRUCTOR_NELTS (rhs1
)
4206 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4208 error ("incorrect number of vector CONSTRUCTOR elements");
4209 debug_generic_stmt (rhs1
);
4213 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4215 error ("incorrect type of vector CONSTRUCTOR elements");
4216 debug_generic_stmt (rhs1
);
4219 if (elt_i
!= NULL_TREE
4220 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4221 || TREE_CODE (elt_i
) != INTEGER_CST
4222 || compare_tree_int (elt_i
, i
) != 0))
4224 error ("vector CONSTRUCTOR with non-NULL element index");
4225 debug_generic_stmt (rhs1
);
4228 if (!is_gimple_val (elt_v
))
4230 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4231 debug_generic_stmt (rhs1
);
4236 else if (CONSTRUCTOR_NELTS (rhs1
) != 0)
4238 error ("non-vector CONSTRUCTOR with elements");
4239 debug_generic_stmt (rhs1
);
4245 case WITH_SIZE_EXPR
:
4255 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4256 is a problem, otherwise false. */
4259 verify_gimple_assign (gassign
*stmt
)
4261 switch (gimple_assign_rhs_class (stmt
))
4263 case GIMPLE_SINGLE_RHS
:
4264 return verify_gimple_assign_single (stmt
);
4266 case GIMPLE_UNARY_RHS
:
4267 return verify_gimple_assign_unary (stmt
);
4269 case GIMPLE_BINARY_RHS
:
4270 return verify_gimple_assign_binary (stmt
);
4272 case GIMPLE_TERNARY_RHS
:
4273 return verify_gimple_assign_ternary (stmt
);
4280 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4281 is a problem, otherwise false. */
4284 verify_gimple_return (greturn
*stmt
)
4286 tree op
= gimple_return_retval (stmt
);
4287 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4289 /* We cannot test for present return values as we do not fix up missing
4290 return values from the original source. */
4294 if (!is_gimple_val (op
)
4295 && TREE_CODE (op
) != RESULT_DECL
)
4297 error ("invalid operand in return statement");
4298 debug_generic_stmt (op
);
4302 if ((TREE_CODE (op
) == RESULT_DECL
4303 && DECL_BY_REFERENCE (op
))
4304 || (TREE_CODE (op
) == SSA_NAME
4305 && SSA_NAME_VAR (op
)
4306 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4307 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4308 op
= TREE_TYPE (op
);
4310 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4312 error ("invalid conversion in return statement");
4313 debug_generic_stmt (restype
);
4314 debug_generic_stmt (TREE_TYPE (op
));
4322 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4323 is a problem, otherwise false. */
4326 verify_gimple_goto (ggoto
*stmt
)
4328 tree dest
= gimple_goto_dest (stmt
);
4330 /* ??? We have two canonical forms of direct goto destinations, a
4331 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4332 if (TREE_CODE (dest
) != LABEL_DECL
4333 && (!is_gimple_val (dest
)
4334 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4336 error ("goto destination is neither a label nor a pointer");
4343 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4344 is a problem, otherwise false. */
4347 verify_gimple_switch (gswitch
*stmt
)
4350 tree elt
, prev_upper_bound
= NULL_TREE
;
4351 tree index_type
, elt_type
= NULL_TREE
;
4353 if (!is_gimple_val (gimple_switch_index (stmt
)))
4355 error ("invalid operand to switch statement");
4356 debug_generic_stmt (gimple_switch_index (stmt
));
4360 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4361 if (! INTEGRAL_TYPE_P (index_type
))
4363 error ("non-integral type switch statement");
4364 debug_generic_expr (index_type
);
4368 elt
= gimple_switch_label (stmt
, 0);
4369 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4371 error ("invalid default case label in switch statement");
4372 debug_generic_expr (elt
);
4376 n
= gimple_switch_num_labels (stmt
);
4377 for (i
= 1; i
< n
; i
++)
4379 elt
= gimple_switch_label (stmt
, i
);
4381 if (! CASE_LOW (elt
))
4383 error ("invalid case label in switch statement");
4384 debug_generic_expr (elt
);
4388 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4390 error ("invalid case range in switch statement");
4391 debug_generic_expr (elt
);
4397 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4398 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4400 error ("type mismatch for case label in switch statement");
4401 debug_generic_expr (elt
);
4407 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4408 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4410 error ("type precision mismatch in switch statement");
4415 if (prev_upper_bound
)
4417 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4419 error ("case labels not sorted in switch statement");
4424 prev_upper_bound
= CASE_HIGH (elt
);
4425 if (! prev_upper_bound
)
4426 prev_upper_bound
= CASE_LOW (elt
);
4432 /* Verify a gimple debug statement STMT.
4433 Returns true if anything is wrong. */
4436 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4438 /* There isn't much that could be wrong in a gimple debug stmt. A
4439 gimple debug bind stmt, for example, maps a tree, that's usually
4440 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4441 component or member of an aggregate type, to another tree, that
4442 can be an arbitrary expression. These stmts expand into debug
4443 insns, and are converted to debug notes by var-tracking.c. */
4447 /* Verify a gimple label statement STMT.
4448 Returns true if anything is wrong. */
4451 verify_gimple_label (glabel
*stmt
)
4453 tree decl
= gimple_label_label (stmt
);
4457 if (TREE_CODE (decl
) != LABEL_DECL
)
4459 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4460 && DECL_CONTEXT (decl
) != current_function_decl
)
4462 error ("label's context is not the current function decl");
4466 uid
= LABEL_DECL_UID (decl
);
4469 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4471 error ("incorrect entry in label_to_block_map");
4475 uid
= EH_LANDING_PAD_NR (decl
);
4478 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4479 if (decl
!= lp
->post_landing_pad
)
4481 error ("incorrect setting of landing pad number");
4489 /* Verify a gimple cond statement STMT.
4490 Returns true if anything is wrong. */
4493 verify_gimple_cond (gcond
*stmt
)
4495 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4497 error ("invalid comparison code in gimple cond");
4500 if (!(!gimple_cond_true_label (stmt
)
4501 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4502 || !(!gimple_cond_false_label (stmt
)
4503 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4505 error ("invalid labels in gimple cond");
4509 return verify_gimple_comparison (boolean_type_node
,
4510 gimple_cond_lhs (stmt
),
4511 gimple_cond_rhs (stmt
));
4514 /* Verify the GIMPLE statement STMT. Returns true if there is an
4515 error, otherwise false. */
4518 verify_gimple_stmt (gimple stmt
)
4520 switch (gimple_code (stmt
))
4523 return verify_gimple_assign (as_a
<gassign
*> (stmt
));
4526 return verify_gimple_label (as_a
<glabel
*> (stmt
));
4529 return verify_gimple_call (as_a
<gcall
*> (stmt
));
4532 return verify_gimple_cond (as_a
<gcond
*> (stmt
));
4535 return verify_gimple_goto (as_a
<ggoto
*> (stmt
));
4538 return verify_gimple_switch (as_a
<gswitch
*> (stmt
));
4541 return verify_gimple_return (as_a
<greturn
*> (stmt
));
4546 case GIMPLE_TRANSACTION
:
4547 return verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4549 /* Tuples that do not have tree operands. */
4551 case GIMPLE_PREDICT
:
4553 case GIMPLE_EH_DISPATCH
:
4554 case GIMPLE_EH_MUST_NOT_THROW
:
4558 /* OpenMP directives are validated by the FE and never operated
4559 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4560 non-gimple expressions when the main index variable has had
4561 its address taken. This does not affect the loop itself
4562 because the header of an GIMPLE_OMP_FOR is merely used to determine
4563 how to setup the parallel iteration. */
4567 return verify_gimple_debug (stmt
);
4574 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4575 and false otherwise. */
4578 verify_gimple_phi (gimple phi
)
4582 tree phi_result
= gimple_phi_result (phi
);
4587 error ("invalid PHI result");
4591 virtual_p
= virtual_operand_p (phi_result
);
4592 if (TREE_CODE (phi_result
) != SSA_NAME
4594 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4596 error ("invalid PHI result");
4600 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4602 tree t
= gimple_phi_arg_def (phi
, i
);
4606 error ("missing PHI def");
4610 /* Addressable variables do have SSA_NAMEs but they
4611 are not considered gimple values. */
4612 else if ((TREE_CODE (t
) == SSA_NAME
4613 && virtual_p
!= virtual_operand_p (t
))
4615 && (TREE_CODE (t
) != SSA_NAME
4616 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4618 && !is_gimple_val (t
)))
4620 error ("invalid PHI argument");
4621 debug_generic_expr (t
);
4624 #ifdef ENABLE_TYPES_CHECKING
4625 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4627 error ("incompatible types in PHI argument %u", i
);
4628 debug_generic_stmt (TREE_TYPE (phi_result
));
4629 debug_generic_stmt (TREE_TYPE (t
));
4638 /* Verify the GIMPLE statements inside the sequence STMTS. */
4641 verify_gimple_in_seq_2 (gimple_seq stmts
)
4643 gimple_stmt_iterator ittr
;
4646 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4648 gimple stmt
= gsi_stmt (ittr
);
4650 switch (gimple_code (stmt
))
4653 err
|= verify_gimple_in_seq_2 (
4654 gimple_bind_body (as_a
<gbind
*> (stmt
)));
4658 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4659 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4662 case GIMPLE_EH_FILTER
:
4663 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4666 case GIMPLE_EH_ELSE
:
4668 geh_else
*eh_else
= as_a
<geh_else
*> (stmt
);
4669 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else
));
4670 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else
));
4675 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (
4676 as_a
<gcatch
*> (stmt
)));
4679 case GIMPLE_TRANSACTION
:
4680 err
|= verify_gimple_transaction (as_a
<gtransaction
*> (stmt
));
4685 bool err2
= verify_gimple_stmt (stmt
);
4687 debug_gimple_stmt (stmt
);
4696 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4697 is a problem, otherwise false. */
4700 verify_gimple_transaction (gtransaction
*stmt
)
4702 tree lab
= gimple_transaction_label (stmt
);
4703 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4705 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4709 /* Verify the GIMPLE statements inside the statement list STMTS. */
4712 verify_gimple_in_seq (gimple_seq stmts
)
4714 timevar_push (TV_TREE_STMT_VERIFY
);
4715 if (verify_gimple_in_seq_2 (stmts
))
4716 internal_error ("verify_gimple failed");
4717 timevar_pop (TV_TREE_STMT_VERIFY
);
4720 /* Return true when the T can be shared. */
4723 tree_node_can_be_shared (tree t
)
4725 if (IS_TYPE_OR_DECL_P (t
)
4726 || is_gimple_min_invariant (t
)
4727 || TREE_CODE (t
) == SSA_NAME
4728 || t
== error_mark_node
4729 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4732 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4741 /* Called via walk_tree. Verify tree sharing. */
4744 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4746 hash_set
<void *> *visited
= (hash_set
<void *> *) data
;
4748 if (tree_node_can_be_shared (*tp
))
4750 *walk_subtrees
= false;
4754 if (visited
->add (*tp
))
4760 /* Called via walk_gimple_stmt. Verify tree sharing. */
4763 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4765 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4766 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4769 static bool eh_error_found
;
4771 verify_eh_throw_stmt_node (const gimple
&stmt
, const int &,
4772 hash_set
<gimple
> *visited
)
4774 if (!visited
->contains (stmt
))
4776 error ("dead STMT in EH table");
4777 debug_gimple_stmt (stmt
);
4778 eh_error_found
= true;
4783 /* Verify if the location LOCs block is in BLOCKS. */
4786 verify_location (hash_set
<tree
> *blocks
, location_t loc
)
4788 tree block
= LOCATION_BLOCK (loc
);
4789 if (block
!= NULL_TREE
4790 && !blocks
->contains (block
))
4792 error ("location references block not in block tree");
4795 if (block
!= NULL_TREE
)
4796 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4800 /* Called via walk_tree. Verify that expressions have no blocks. */
4803 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4807 *walk_subtrees
= false;
4811 location_t loc
= EXPR_LOCATION (*tp
);
4812 if (LOCATION_BLOCK (loc
) != NULL
)
4818 /* Called via walk_tree. Verify locations of expressions. */
4821 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4823 hash_set
<tree
> *blocks
= (hash_set
<tree
> *) data
;
4825 if (TREE_CODE (*tp
) == VAR_DECL
4826 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4828 tree t
= DECL_DEBUG_EXPR (*tp
);
4829 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4833 if ((TREE_CODE (*tp
) == VAR_DECL
4834 || TREE_CODE (*tp
) == PARM_DECL
4835 || TREE_CODE (*tp
) == RESULT_DECL
)
4836 && DECL_HAS_VALUE_EXPR_P (*tp
))
4838 tree t
= DECL_VALUE_EXPR (*tp
);
4839 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4846 *walk_subtrees
= false;
4850 location_t loc
= EXPR_LOCATION (*tp
);
4851 if (verify_location (blocks
, loc
))
4857 /* Called via walk_gimple_op. Verify locations of expressions. */
4860 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4862 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4863 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4866 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4869 collect_subblocks (hash_set
<tree
> *blocks
, tree block
)
4872 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4875 collect_subblocks (blocks
, t
);
4879 /* Verify the GIMPLE statements in the CFG of FN. */
4882 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
4887 timevar_push (TV_TREE_STMT_VERIFY
);
4888 hash_set
<void *> visited
;
4889 hash_set
<gimple
> visited_stmts
;
4891 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4892 hash_set
<tree
> blocks
;
4893 if (DECL_INITIAL (fn
->decl
))
4895 blocks
.add (DECL_INITIAL (fn
->decl
));
4896 collect_subblocks (&blocks
, DECL_INITIAL (fn
->decl
));
4899 FOR_EACH_BB_FN (bb
, fn
)
4901 gimple_stmt_iterator gsi
;
4903 for (gphi_iterator gpi
= gsi_start_phis (bb
);
4907 gphi
*phi
= gpi
.phi ();
4911 visited_stmts
.add (phi
);
4913 if (gimple_bb (phi
) != bb
)
4915 error ("gimple_bb (phi) is set to a wrong basic block");
4919 err2
|= verify_gimple_phi (phi
);
4921 /* Only PHI arguments have locations. */
4922 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4924 error ("PHI node with location");
4928 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4930 tree arg
= gimple_phi_arg_def (phi
, i
);
4931 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4935 error ("incorrect sharing of tree nodes");
4936 debug_generic_expr (addr
);
4939 location_t loc
= gimple_phi_arg_location (phi
, i
);
4940 if (virtual_operand_p (gimple_phi_result (phi
))
4941 && loc
!= UNKNOWN_LOCATION
)
4943 error ("virtual PHI with argument locations");
4946 addr
= walk_tree (&arg
, verify_expr_location_1
, &blocks
, NULL
);
4949 debug_generic_expr (addr
);
4952 err2
|= verify_location (&blocks
, loc
);
4956 debug_gimple_stmt (phi
);
4960 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4962 gimple stmt
= gsi_stmt (gsi
);
4964 struct walk_stmt_info wi
;
4968 visited_stmts
.add (stmt
);
4970 if (gimple_bb (stmt
) != bb
)
4972 error ("gimple_bb (stmt) is set to a wrong basic block");
4976 err2
|= verify_gimple_stmt (stmt
);
4977 err2
|= verify_location (&blocks
, gimple_location (stmt
));
4979 memset (&wi
, 0, sizeof (wi
));
4980 wi
.info
= (void *) &visited
;
4981 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4984 error ("incorrect sharing of tree nodes");
4985 debug_generic_expr (addr
);
4989 memset (&wi
, 0, sizeof (wi
));
4990 wi
.info
= (void *) &blocks
;
4991 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4994 debug_generic_expr (addr
);
4998 /* ??? Instead of not checking these stmts at all the walker
4999 should know its context via wi. */
5000 if (!is_gimple_debug (stmt
)
5001 && !is_gimple_omp (stmt
))
5003 memset (&wi
, 0, sizeof (wi
));
5004 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
5007 debug_generic_expr (addr
);
5008 inform (gimple_location (stmt
), "in statement");
5013 /* If the statement is marked as part of an EH region, then it is
5014 expected that the statement could throw. Verify that when we
5015 have optimizations that simplify statements such that we prove
5016 that they cannot throw, that we update other data structures
5018 lp_nr
= lookup_stmt_eh_lp (stmt
);
5021 if (!stmt_could_throw_p (stmt
))
5025 error ("statement marked for throw, but doesn%'t");
5029 else if (!gsi_one_before_end_p (gsi
))
5031 error ("statement marked for throw in middle of block");
5037 debug_gimple_stmt (stmt
);
5042 eh_error_found
= false;
5043 hash_map
<gimple
, int> *eh_table
= get_eh_throw_stmt_table (cfun
);
5045 eh_table
->traverse
<hash_set
<gimple
> *, verify_eh_throw_stmt_node
>
5048 if (err
|| eh_error_found
)
5049 internal_error ("verify_gimple failed");
5051 verify_histograms ();
5052 timevar_pop (TV_TREE_STMT_VERIFY
);
5056 /* Verifies that the flow information is OK. */
5059 gimple_verify_flow_info (void)
5063 gimple_stmt_iterator gsi
;
5068 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5069 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5071 error ("ENTRY_BLOCK has IL associated with it");
5075 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
5076 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5078 error ("EXIT_BLOCK has IL associated with it");
5082 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5083 if (e
->flags
& EDGE_FALLTHRU
)
5085 error ("fallthru to exit from bb %d", e
->src
->index
);
5089 FOR_EACH_BB_FN (bb
, cfun
)
5091 bool found_ctrl_stmt
= false;
5095 /* Skip labels on the start of basic block. */
5096 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5099 gimple prev_stmt
= stmt
;
5101 stmt
= gsi_stmt (gsi
);
5103 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5106 label
= gimple_label_label (as_a
<glabel
*> (stmt
));
5107 if (prev_stmt
&& DECL_NONLOCAL (label
))
5109 error ("nonlocal label ");
5110 print_generic_expr (stderr
, label
, 0);
5111 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5116 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5118 error ("EH landing pad label ");
5119 print_generic_expr (stderr
, label
, 0);
5120 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5125 if (label_to_block (label
) != bb
)
5128 print_generic_expr (stderr
, label
, 0);
5129 fprintf (stderr
, " to block does not match in bb %d",
5134 if (decl_function_context (label
) != current_function_decl
)
5137 print_generic_expr (stderr
, label
, 0);
5138 fprintf (stderr
, " has incorrect context in bb %d",
5144 /* Verify that body of basic block BB is free of control flow. */
5145 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5147 gimple stmt
= gsi_stmt (gsi
);
5149 if (found_ctrl_stmt
)
5151 error ("control flow in the middle of basic block %d",
5156 if (stmt_ends_bb_p (stmt
))
5157 found_ctrl_stmt
= true;
5159 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
5162 print_generic_expr (stderr
, gimple_label_label (label_stmt
), 0);
5163 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5168 gsi
= gsi_last_bb (bb
);
5169 if (gsi_end_p (gsi
))
5172 stmt
= gsi_stmt (gsi
);
5174 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5177 err
|= verify_eh_edges (stmt
);
5179 if (is_ctrl_stmt (stmt
))
5181 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5182 if (e
->flags
& EDGE_FALLTHRU
)
5184 error ("fallthru edge after a control statement in bb %d",
5190 if (gimple_code (stmt
) != GIMPLE_COND
)
5192 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5193 after anything else but if statement. */
5194 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5195 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5197 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5203 switch (gimple_code (stmt
))
5210 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5214 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5215 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5216 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5217 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5218 || EDGE_COUNT (bb
->succs
) >= 3)
5220 error ("wrong outgoing edge flags at end of bb %d",
5228 if (simple_goto_p (stmt
))
5230 error ("explicit goto at end of bb %d", bb
->index
);
5235 /* FIXME. We should double check that the labels in the
5236 destination blocks have their address taken. */
5237 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5238 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5239 | EDGE_FALSE_VALUE
))
5240 || !(e
->flags
& EDGE_ABNORMAL
))
5242 error ("wrong outgoing edge flags at end of bb %d",
5250 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5252 /* ... fallthru ... */
5254 if (!single_succ_p (bb
)
5255 || (single_succ_edge (bb
)->flags
5256 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5257 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5259 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5262 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5264 error ("return edge does not point to exit in bb %d",
5272 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5277 n
= gimple_switch_num_labels (switch_stmt
);
5279 /* Mark all the destination basic blocks. */
5280 for (i
= 0; i
< n
; ++i
)
5282 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5283 basic_block label_bb
= label_to_block (lab
);
5284 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5285 label_bb
->aux
= (void *)1;
5288 /* Verify that the case labels are sorted. */
5289 prev
= gimple_switch_label (switch_stmt
, 0);
5290 for (i
= 1; i
< n
; ++i
)
5292 tree c
= gimple_switch_label (switch_stmt
, i
);
5295 error ("found default case not at the start of "
5301 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5303 error ("case labels not sorted: ");
5304 print_generic_expr (stderr
, prev
, 0);
5305 fprintf (stderr
," is greater than ");
5306 print_generic_expr (stderr
, c
, 0);
5307 fprintf (stderr
," but comes before it.\n");
5312 /* VRP will remove the default case if it can prove it will
5313 never be executed. So do not verify there always exists
5314 a default case here. */
5316 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5320 error ("extra outgoing edge %d->%d",
5321 bb
->index
, e
->dest
->index
);
5325 e
->dest
->aux
= (void *)2;
5326 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5327 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5329 error ("wrong outgoing edge flags at end of bb %d",
5335 /* Check that we have all of them. */
5336 for (i
= 0; i
< n
; ++i
)
5338 tree lab
= CASE_LABEL (gimple_switch_label (switch_stmt
, i
));
5339 basic_block label_bb
= label_to_block (lab
);
5341 if (label_bb
->aux
!= (void *)2)
5343 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5348 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5349 e
->dest
->aux
= (void *)0;
5353 case GIMPLE_EH_DISPATCH
:
5354 err
|= verify_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
));
5362 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5363 verify_dominators (CDI_DOMINATORS
);
5369 /* Updates phi nodes after creating a forwarder block joined
5370 by edge FALLTHRU. */
5373 gimple_make_forwarder_block (edge fallthru
)
5377 basic_block dummy
, bb
;
5381 dummy
= fallthru
->src
;
5382 bb
= fallthru
->dest
;
5384 if (single_pred_p (bb
))
5387 /* If we redirected a branch we must create new PHI nodes at the
5389 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5391 gphi
*phi
, *new_phi
;
5394 var
= gimple_phi_result (phi
);
5395 new_phi
= create_phi_node (var
, bb
);
5396 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5397 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5401 /* Add the arguments we have stored on edges. */
5402 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5407 flush_pending_stmts (e
);
5412 /* Return a non-special label in the head of basic block BLOCK.
5413 Create one if it doesn't exist. */
5416 gimple_block_label (basic_block bb
)
5418 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5423 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5425 stmt
= dyn_cast
<glabel
*> (gsi_stmt (i
));
5428 label
= gimple_label_label (stmt
);
5429 if (!DECL_NONLOCAL (label
))
5432 gsi_move_before (&i
, &s
);
5437 label
= create_artificial_label (UNKNOWN_LOCATION
);
5438 stmt
= gimple_build_label (label
);
5439 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5444 /* Attempt to perform edge redirection by replacing a possibly complex
5445 jump instruction by a goto or by removing the jump completely.
5446 This can apply only if all edges now point to the same block. The
5447 parameters and return values are equivalent to
5448 redirect_edge_and_branch. */
5451 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5453 basic_block src
= e
->src
;
5454 gimple_stmt_iterator i
;
5457 /* We can replace or remove a complex jump only when we have exactly
5459 if (EDGE_COUNT (src
->succs
) != 2
5460 /* Verify that all targets will be TARGET. Specifically, the
5461 edge that is not E must also go to TARGET. */
5462 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5465 i
= gsi_last_bb (src
);
5469 stmt
= gsi_stmt (i
);
5471 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5473 gsi_remove (&i
, true);
5474 e
= ssa_redirect_edge (e
, target
);
5475 e
->flags
= EDGE_FALLTHRU
;
5483 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5484 edge representing the redirected branch. */
5487 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5489 basic_block bb
= e
->src
;
5490 gimple_stmt_iterator gsi
;
5494 if (e
->flags
& EDGE_ABNORMAL
)
5497 if (e
->dest
== dest
)
5500 if (e
->flags
& EDGE_EH
)
5501 return redirect_eh_edge (e
, dest
);
5503 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5505 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5510 gsi
= gsi_last_bb (bb
);
5511 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5513 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5516 /* For COND_EXPR, we only need to redirect the edge. */
5520 /* No non-abnormal edges should lead from a non-simple goto, and
5521 simple ones should be represented implicitly. */
5526 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
5527 tree label
= gimple_block_label (dest
);
5528 tree cases
= get_cases_for_edge (e
, switch_stmt
);
5530 /* If we have a list of cases associated with E, then use it
5531 as it's a lot faster than walking the entire case vector. */
5534 edge e2
= find_edge (e
->src
, dest
);
5541 CASE_LABEL (cases
) = label
;
5542 cases
= CASE_CHAIN (cases
);
5545 /* If there was already an edge in the CFG, then we need
5546 to move all the cases associated with E to E2. */
5549 tree cases2
= get_cases_for_edge (e2
, switch_stmt
);
5551 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5552 CASE_CHAIN (cases2
) = first
;
5554 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5558 size_t i
, n
= gimple_switch_num_labels (switch_stmt
);
5560 for (i
= 0; i
< n
; i
++)
5562 tree elt
= gimple_switch_label (switch_stmt
, i
);
5563 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5564 CASE_LABEL (elt
) = label
;
5572 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5573 int i
, n
= gimple_asm_nlabels (asm_stmt
);
5576 for (i
= 0; i
< n
; ++i
)
5578 tree cons
= gimple_asm_label_op (asm_stmt
, i
);
5579 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5582 label
= gimple_block_label (dest
);
5583 TREE_VALUE (cons
) = label
;
5587 /* If we didn't find any label matching the former edge in the
5588 asm labels, we must be redirecting the fallthrough
5590 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5595 gsi_remove (&gsi
, true);
5596 e
->flags
|= EDGE_FALLTHRU
;
5599 case GIMPLE_OMP_RETURN
:
5600 case GIMPLE_OMP_CONTINUE
:
5601 case GIMPLE_OMP_SECTIONS_SWITCH
:
5602 case GIMPLE_OMP_FOR
:
5603 /* The edges from OMP constructs can be simply redirected. */
5606 case GIMPLE_EH_DISPATCH
:
5607 if (!(e
->flags
& EDGE_FALLTHRU
))
5608 redirect_eh_dispatch_edge (as_a
<geh_dispatch
*> (stmt
), e
, dest
);
5611 case GIMPLE_TRANSACTION
:
5612 /* The ABORT edge has a stored label associated with it, otherwise
5613 the edges are simply redirectable. */
5615 gimple_transaction_set_label (as_a
<gtransaction
*> (stmt
),
5616 gimple_block_label (dest
));
5620 /* Otherwise it must be a fallthru edge, and we don't need to
5621 do anything besides redirecting it. */
5622 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5626 /* Update/insert PHI nodes as necessary. */
5628 /* Now update the edges in the CFG. */
5629 e
= ssa_redirect_edge (e
, dest
);
5634 /* Returns true if it is possible to remove edge E by redirecting
5635 it to the destination of the other edge from E->src. */
5638 gimple_can_remove_branch_p (const_edge e
)
5640 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5646 /* Simple wrapper, as we can always redirect fallthru edges. */
5649 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5651 e
= gimple_redirect_edge_and_branch (e
, dest
);
5658 /* Splits basic block BB after statement STMT (but at least after the
5659 labels). If STMT is NULL, BB is split just after the labels. */
5662 gimple_split_block (basic_block bb
, void *stmt
)
5664 gimple_stmt_iterator gsi
;
5665 gimple_stmt_iterator gsi_tgt
;
5672 new_bb
= create_empty_bb (bb
);
5674 /* Redirect the outgoing edges. */
5675 new_bb
->succs
= bb
->succs
;
5677 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5680 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5683 /* Move everything from GSI to the new basic block. */
5684 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5686 act
= gsi_stmt (gsi
);
5687 if (gimple_code (act
) == GIMPLE_LABEL
)
5700 if (gsi_end_p (gsi
))
5703 /* Split the statement list - avoid re-creating new containers as this
5704 brings ugly quadratic memory consumption in the inliner.
5705 (We are still quadratic since we need to update stmt BB pointers,
5707 gsi_split_seq_before (&gsi
, &list
);
5708 set_bb_seq (new_bb
, list
);
5709 for (gsi_tgt
= gsi_start (list
);
5710 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5711 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5717 /* Moves basic block BB after block AFTER. */
5720 gimple_move_block_after (basic_block bb
, basic_block after
)
5722 if (bb
->prev_bb
== after
)
5726 link_block (bb
, after
);
5732 /* Return TRUE if block BB has no executable statements, otherwise return
5736 gimple_empty_block_p (basic_block bb
)
5738 /* BB must have no executable statements. */
5739 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5742 if (gsi_end_p (gsi
))
5744 if (is_gimple_debug (gsi_stmt (gsi
)))
5745 gsi_next_nondebug (&gsi
);
5746 return gsi_end_p (gsi
);
5750 /* Split a basic block if it ends with a conditional branch and if the
5751 other part of the block is not empty. */
5754 gimple_split_block_before_cond_jump (basic_block bb
)
5756 gimple last
, split_point
;
5757 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5758 if (gsi_end_p (gsi
))
5760 last
= gsi_stmt (gsi
);
5761 if (gimple_code (last
) != GIMPLE_COND
5762 && gimple_code (last
) != GIMPLE_SWITCH
)
5764 gsi_prev_nondebug (&gsi
);
5765 split_point
= gsi_stmt (gsi
);
5766 return split_block (bb
, split_point
)->dest
;
5770 /* Return true if basic_block can be duplicated. */
5773 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5778 /* Create a duplicate of the basic block BB. NOTE: This does not
5779 preserve SSA form. */
5782 gimple_duplicate_bb (basic_block bb
)
5785 gimple_stmt_iterator gsi_tgt
;
5787 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5789 /* Copy the PHI nodes. We ignore PHI node arguments here because
5790 the incoming edges have not been setup yet. */
5791 for (gphi_iterator gpi
= gsi_start_phis (bb
);
5797 copy
= create_phi_node (NULL_TREE
, new_bb
);
5798 create_new_def_for (gimple_phi_result (phi
), copy
,
5799 gimple_phi_result_ptr (copy
));
5800 gimple_set_uid (copy
, gimple_uid (phi
));
5803 gsi_tgt
= gsi_start_bb (new_bb
);
5804 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5808 def_operand_p def_p
;
5809 ssa_op_iter op_iter
;
5813 stmt
= gsi_stmt (gsi
);
5814 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5817 /* Don't duplicate label debug stmts. */
5818 if (gimple_debug_bind_p (stmt
)
5819 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5823 /* Create a new copy of STMT and duplicate STMT's virtual
5825 copy
= gimple_copy (stmt
);
5826 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5828 maybe_duplicate_eh_stmt (copy
, stmt
);
5829 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5831 /* When copying around a stmt writing into a local non-user
5832 aggregate, make sure it won't share stack slot with other
5834 lhs
= gimple_get_lhs (stmt
);
5835 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5837 tree base
= get_base_address (lhs
);
5839 && (TREE_CODE (base
) == VAR_DECL
5840 || TREE_CODE (base
) == RESULT_DECL
)
5841 && DECL_IGNORED_P (base
)
5842 && !TREE_STATIC (base
)
5843 && !DECL_EXTERNAL (base
)
5844 && (TREE_CODE (base
) != VAR_DECL
5845 || !DECL_HAS_VALUE_EXPR_P (base
)))
5846 DECL_NONSHAREABLE (base
) = 1;
5849 /* Create new names for all the definitions created by COPY and
5850 add replacement mappings for each new name. */
5851 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5852 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5858 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5861 add_phi_args_after_copy_edge (edge e_copy
)
5863 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5866 gphi
*phi
, *phi_copy
;
5868 gphi_iterator psi
, psi_copy
;
5870 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5873 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5875 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5876 dest
= get_bb_original (e_copy
->dest
);
5878 dest
= e_copy
->dest
;
5880 e
= find_edge (bb
, dest
);
5883 /* During loop unrolling the target of the latch edge is copied.
5884 In this case we are not looking for edge to dest, but to
5885 duplicated block whose original was dest. */
5886 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5888 if ((e
->dest
->flags
& BB_DUPLICATED
)
5889 && get_bb_original (e
->dest
) == dest
)
5893 gcc_assert (e
!= NULL
);
5896 for (psi
= gsi_start_phis (e
->dest
),
5897 psi_copy
= gsi_start_phis (e_copy
->dest
);
5899 gsi_next (&psi
), gsi_next (&psi_copy
))
5902 phi_copy
= psi_copy
.phi ();
5903 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5904 add_phi_arg (phi_copy
, def
, e_copy
,
5905 gimple_phi_arg_location_from_edge (phi
, e
));
5910 /* Basic block BB_COPY was created by code duplication. Add phi node
5911 arguments for edges going out of BB_COPY. The blocks that were
5912 duplicated have BB_DUPLICATED set. */
5915 add_phi_args_after_copy_bb (basic_block bb_copy
)
5920 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5922 add_phi_args_after_copy_edge (e_copy
);
5926 /* Blocks in REGION_COPY array of length N_REGION were created by
5927 duplication of basic blocks. Add phi node arguments for edges
5928 going from these blocks. If E_COPY is not NULL, also add
5929 phi node arguments for its destination.*/
5932 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5937 for (i
= 0; i
< n_region
; i
++)
5938 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5940 for (i
= 0; i
< n_region
; i
++)
5941 add_phi_args_after_copy_bb (region_copy
[i
]);
5943 add_phi_args_after_copy_edge (e_copy
);
5945 for (i
= 0; i
< n_region
; i
++)
5946 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5949 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5950 important exit edge EXIT. By important we mean that no SSA name defined
5951 inside region is live over the other exit edges of the region. All entry
5952 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5953 to the duplicate of the region. Dominance and loop information is
5954 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5955 UPDATE_DOMINANCE is false then we assume that the caller will update the
5956 dominance information after calling this function. The new basic
5957 blocks are stored to REGION_COPY in the same order as they had in REGION,
5958 provided that REGION_COPY is not NULL.
5959 The function returns false if it is unable to copy the region,
5963 gimple_duplicate_sese_region (edge entry
, edge exit
,
5964 basic_block
*region
, unsigned n_region
,
5965 basic_block
*region_copy
,
5966 bool update_dominance
)
5969 bool free_region_copy
= false, copying_header
= false;
5970 struct loop
*loop
= entry
->dest
->loop_father
;
5972 vec
<basic_block
> doms
;
5974 int total_freq
= 0, entry_freq
= 0;
5975 gcov_type total_count
= 0, entry_count
= 0;
5977 if (!can_copy_bbs_p (region
, n_region
))
5980 /* Some sanity checking. Note that we do not check for all possible
5981 missuses of the functions. I.e. if you ask to copy something weird,
5982 it will work, but the state of structures probably will not be
5984 for (i
= 0; i
< n_region
; i
++)
5986 /* We do not handle subloops, i.e. all the blocks must belong to the
5988 if (region
[i
]->loop_father
!= loop
)
5991 if (region
[i
] != entry
->dest
5992 && region
[i
] == loop
->header
)
5996 /* In case the function is used for loop header copying (which is the primary
5997 use), ensure that EXIT and its copy will be new latch and entry edges. */
5998 if (loop
->header
== entry
->dest
)
6000 copying_header
= true;
6002 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
6005 for (i
= 0; i
< n_region
; i
++)
6006 if (region
[i
] != exit
->src
6007 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
6011 initialize_original_copy_tables ();
6014 set_loop_copy (loop
, loop_outer (loop
));
6016 set_loop_copy (loop
, loop
);
6020 region_copy
= XNEWVEC (basic_block
, n_region
);
6021 free_region_copy
= true;
6024 /* Record blocks outside the region that are dominated by something
6026 if (update_dominance
)
6029 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6032 if (entry
->dest
->count
)
6034 total_count
= entry
->dest
->count
;
6035 entry_count
= entry
->count
;
6036 /* Fix up corner cases, to avoid division by zero or creation of negative
6038 if (entry_count
> total_count
)
6039 entry_count
= total_count
;
6043 total_freq
= entry
->dest
->frequency
;
6044 entry_freq
= EDGE_FREQUENCY (entry
);
6045 /* Fix up corner cases, to avoid division by zero or creation of negative
6047 if (total_freq
== 0)
6049 else if (entry_freq
> total_freq
)
6050 entry_freq
= total_freq
;
6053 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
6054 split_edge_bb_loc (entry
), update_dominance
);
6057 scale_bbs_frequencies_gcov_type (region
, n_region
,
6058 total_count
- entry_count
,
6060 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
6065 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
6067 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
6072 loop
->header
= exit
->dest
;
6073 loop
->latch
= exit
->src
;
6076 /* Redirect the entry and add the phi node arguments. */
6077 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
6078 gcc_assert (redirected
!= NULL
);
6079 flush_pending_stmts (entry
);
6081 /* Concerning updating of dominators: We must recount dominators
6082 for entry block and its copy. Anything that is outside of the
6083 region, but was dominated by something inside needs recounting as
6085 if (update_dominance
)
6087 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6088 doms
.safe_push (get_bb_original (entry
->dest
));
6089 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6093 /* Add the other PHI node arguments. */
6094 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6096 if (free_region_copy
)
6099 free_original_copy_tables ();
6103 /* Checks if BB is part of the region defined by N_REGION BBS. */
6105 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6109 for (n
= 0; n
< n_region
; n
++)
6117 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6118 are stored to REGION_COPY in the same order in that they appear
6119 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6120 the region, EXIT an exit from it. The condition guarding EXIT
6121 is moved to ENTRY. Returns true if duplication succeeds, false
6147 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6148 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6149 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6152 bool free_region_copy
= false;
6153 struct loop
*loop
= exit
->dest
->loop_father
;
6154 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6155 basic_block switch_bb
, entry_bb
, nentry_bb
;
6156 vec
<basic_block
> doms
;
6157 int total_freq
= 0, exit_freq
= 0;
6158 gcov_type total_count
= 0, exit_count
= 0;
6159 edge exits
[2], nexits
[2], e
;
6160 gimple_stmt_iterator gsi
;
6163 basic_block exit_bb
;
6167 struct loop
*target
, *aloop
, *cloop
;
6169 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6171 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6173 if (!can_copy_bbs_p (region
, n_region
))
6176 initialize_original_copy_tables ();
6177 set_loop_copy (orig_loop
, loop
);
6180 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6182 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6184 cloop
= duplicate_loop (aloop
, target
);
6185 duplicate_subloops (aloop
, cloop
);
6191 region_copy
= XNEWVEC (basic_block
, n_region
);
6192 free_region_copy
= true;
6195 gcc_assert (!need_ssa_update_p (cfun
));
6197 /* Record blocks outside the region that are dominated by something
6199 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6201 if (exit
->src
->count
)
6203 total_count
= exit
->src
->count
;
6204 exit_count
= exit
->count
;
6205 /* Fix up corner cases, to avoid division by zero or creation of negative
6207 if (exit_count
> total_count
)
6208 exit_count
= total_count
;
6212 total_freq
= exit
->src
->frequency
;
6213 exit_freq
= EDGE_FREQUENCY (exit
);
6214 /* Fix up corner cases, to avoid division by zero or creation of negative
6216 if (total_freq
== 0)
6218 if (exit_freq
> total_freq
)
6219 exit_freq
= total_freq
;
6222 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6223 split_edge_bb_loc (exit
), true);
6226 scale_bbs_frequencies_gcov_type (region
, n_region
,
6227 total_count
- exit_count
,
6229 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6234 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6236 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6239 /* Create the switch block, and put the exit condition to it. */
6240 entry_bb
= entry
->dest
;
6241 nentry_bb
= get_bb_copy (entry_bb
);
6242 if (!last_stmt (entry
->src
)
6243 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6244 switch_bb
= entry
->src
;
6246 switch_bb
= split_edge (entry
);
6247 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6249 gsi
= gsi_last_bb (switch_bb
);
6250 cond_stmt
= last_stmt (exit
->src
);
6251 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6252 cond_stmt
= gimple_copy (cond_stmt
);
6254 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6256 sorig
= single_succ_edge (switch_bb
);
6257 sorig
->flags
= exits
[1]->flags
;
6258 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6260 /* Register the new edge from SWITCH_BB in loop exit lists. */
6261 rescan_loop_exit (snew
, true, false);
6263 /* Add the PHI node arguments. */
6264 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6266 /* Get rid of now superfluous conditions and associated edges (and phi node
6268 exit_bb
= exit
->dest
;
6270 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6271 PENDING_STMT (e
) = NULL
;
6273 /* The latch of ORIG_LOOP was copied, and so was the backedge
6274 to the original header. We redirect this backedge to EXIT_BB. */
6275 for (i
= 0; i
< n_region
; i
++)
6276 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6278 gcc_assert (single_succ_edge (region_copy
[i
]));
6279 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6280 PENDING_STMT (e
) = NULL
;
6281 for (psi
= gsi_start_phis (exit_bb
);
6286 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6287 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6290 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6291 PENDING_STMT (e
) = NULL
;
6293 /* Anything that is outside of the region, but was dominated by something
6294 inside needs to update dominance info. */
6295 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6297 /* Update the SSA web. */
6298 update_ssa (TODO_update_ssa
);
6300 if (free_region_copy
)
6303 free_original_copy_tables ();
6307 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6308 adding blocks when the dominator traversal reaches EXIT. This
6309 function silently assumes that ENTRY strictly dominates EXIT. */
6312 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6313 vec
<basic_block
> *bbs_p
)
6317 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6319 son
= next_dom_son (CDI_DOMINATORS
, son
))
6321 bbs_p
->safe_push (son
);
6323 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6327 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6328 The duplicates are recorded in VARS_MAP. */
6331 replace_by_duplicate_decl (tree
*tp
, hash_map
<tree
, tree
> *vars_map
,
6334 tree t
= *tp
, new_t
;
6335 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6337 if (DECL_CONTEXT (t
) == to_context
)
6341 tree
&loc
= vars_map
->get_or_insert (t
, &existed
);
6347 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6348 add_local_decl (f
, new_t
);
6352 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6353 new_t
= copy_node (t
);
6355 DECL_CONTEXT (new_t
) = to_context
;
6366 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6367 VARS_MAP maps old ssa names and var_decls to the new ones. */
6370 replace_ssa_name (tree name
, hash_map
<tree
, tree
> *vars_map
,
6375 gcc_assert (!virtual_operand_p (name
));
6377 tree
*loc
= vars_map
->get (name
);
6381 tree decl
= SSA_NAME_VAR (name
);
6384 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6385 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6386 decl
, SSA_NAME_DEF_STMT (name
));
6387 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6388 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6392 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6393 name
, SSA_NAME_DEF_STMT (name
));
6395 vars_map
->put (name
, new_name
);
6409 hash_map
<tree
, tree
> *vars_map
;
6410 htab_t new_label_map
;
6411 hash_map
<void *, void *> *eh_map
;
6415 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6416 contained in *TP if it has been ORIG_BLOCK previously and change the
6417 DECL_CONTEXT of every local variable referenced in *TP. */
6420 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6422 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6423 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6428 tree block
= TREE_BLOCK (t
);
6429 if (block
== p
->orig_block
6430 || (p
->orig_block
== NULL_TREE
6431 && block
!= NULL_TREE
))
6432 TREE_SET_BLOCK (t
, p
->new_block
);
6433 #ifdef ENABLE_CHECKING
6434 else if (block
!= NULL_TREE
)
6436 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6437 block
= BLOCK_SUPERCONTEXT (block
);
6438 gcc_assert (block
== p
->orig_block
);
6442 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6444 if (TREE_CODE (t
) == SSA_NAME
)
6445 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6446 else if (TREE_CODE (t
) == LABEL_DECL
)
6448 if (p
->new_label_map
)
6450 struct tree_map in
, *out
;
6452 out
= (struct tree_map
*)
6453 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6458 DECL_CONTEXT (t
) = p
->to_context
;
6460 else if (p
->remap_decls_p
)
6462 /* Replace T with its duplicate. T should no longer appear in the
6463 parent function, so this looks wasteful; however, it may appear
6464 in referenced_vars, and more importantly, as virtual operands of
6465 statements, and in alias lists of other variables. It would be
6466 quite difficult to expunge it from all those places. ??? It might
6467 suffice to do this for addressable variables. */
6468 if ((TREE_CODE (t
) == VAR_DECL
6469 && !is_global_var (t
))
6470 || TREE_CODE (t
) == CONST_DECL
)
6471 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6475 else if (TYPE_P (t
))
6481 /* Helper for move_stmt_r. Given an EH region number for the source
6482 function, map that to the duplicate EH regio number in the dest. */
6485 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6487 eh_region old_r
, new_r
;
6489 old_r
= get_eh_region_from_number (old_nr
);
6490 new_r
= static_cast<eh_region
> (*p
->eh_map
->get (old_r
));
6492 return new_r
->index
;
6495 /* Similar, but operate on INTEGER_CSTs. */
6498 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6502 old_nr
= tree_to_shwi (old_t_nr
);
6503 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6505 return build_int_cst (integer_type_node
, new_nr
);
6508 /* Like move_stmt_op, but for gimple statements.
6510 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6511 contained in the current statement in *GSI_P and change the
6512 DECL_CONTEXT of every local variable referenced in the current
6516 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6517 struct walk_stmt_info
*wi
)
6519 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6520 gimple stmt
= gsi_stmt (*gsi_p
);
6521 tree block
= gimple_block (stmt
);
6523 if (block
== p
->orig_block
6524 || (p
->orig_block
== NULL_TREE
6525 && block
!= NULL_TREE
))
6526 gimple_set_block (stmt
, p
->new_block
);
6528 switch (gimple_code (stmt
))
6531 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6533 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6534 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6535 switch (DECL_FUNCTION_CODE (fndecl
))
6537 case BUILT_IN_EH_COPY_VALUES
:
6538 r
= gimple_call_arg (stmt
, 1);
6539 r
= move_stmt_eh_region_tree_nr (r
, p
);
6540 gimple_call_set_arg (stmt
, 1, r
);
6543 case BUILT_IN_EH_POINTER
:
6544 case BUILT_IN_EH_FILTER
:
6545 r
= gimple_call_arg (stmt
, 0);
6546 r
= move_stmt_eh_region_tree_nr (r
, p
);
6547 gimple_call_set_arg (stmt
, 0, r
);
6558 gresx
*resx_stmt
= as_a
<gresx
*> (stmt
);
6559 int r
= gimple_resx_region (resx_stmt
);
6560 r
= move_stmt_eh_region_nr (r
, p
);
6561 gimple_resx_set_region (resx_stmt
, r
);
6565 case GIMPLE_EH_DISPATCH
:
6567 geh_dispatch
*eh_dispatch_stmt
= as_a
<geh_dispatch
*> (stmt
);
6568 int r
= gimple_eh_dispatch_region (eh_dispatch_stmt
);
6569 r
= move_stmt_eh_region_nr (r
, p
);
6570 gimple_eh_dispatch_set_region (eh_dispatch_stmt
, r
);
6574 case GIMPLE_OMP_RETURN
:
6575 case GIMPLE_OMP_CONTINUE
:
6578 if (is_gimple_omp (stmt
))
6580 /* Do not remap variables inside OMP directives. Variables
6581 referenced in clauses and directive header belong to the
6582 parent function and should not be moved into the child
6584 bool save_remap_decls_p
= p
->remap_decls_p
;
6585 p
->remap_decls_p
= false;
6586 *handled_ops_p
= true;
6588 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6591 p
->remap_decls_p
= save_remap_decls_p
;
6599 /* Move basic block BB from function CFUN to function DEST_FN. The
6600 block is moved out of the original linked list and placed after
6601 block AFTER in the new list. Also, the block is removed from the
6602 original array of blocks and placed in DEST_FN's array of blocks.
6603 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6604 updated to reflect the moved edges.
6606 The local variables are remapped to new instances, VARS_MAP is used
6607 to record the mapping. */
6610 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6611 basic_block after
, bool update_edge_count_p
,
6612 struct move_stmt_d
*d
)
6614 struct control_flow_graph
*cfg
;
6617 gimple_stmt_iterator si
;
6618 unsigned old_len
, new_len
;
6620 /* Remove BB from dominance structures. */
6621 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6623 /* Move BB from its current loop to the copy in the new function. */
6626 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6628 bb
->loop_father
= new_loop
;
6631 /* Link BB to the new linked list. */
6632 move_block_after (bb
, after
);
6634 /* Update the edge count in the corresponding flowgraphs. */
6635 if (update_edge_count_p
)
6636 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6638 cfun
->cfg
->x_n_edges
--;
6639 dest_cfun
->cfg
->x_n_edges
++;
6642 /* Remove BB from the original basic block array. */
6643 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6644 cfun
->cfg
->x_n_basic_blocks
--;
6646 /* Grow DEST_CFUN's basic block array if needed. */
6647 cfg
= dest_cfun
->cfg
;
6648 cfg
->x_n_basic_blocks
++;
6649 if (bb
->index
>= cfg
->x_last_basic_block
)
6650 cfg
->x_last_basic_block
= bb
->index
+ 1;
6652 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6653 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6655 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6656 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6659 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6661 /* Remap the variables in phi nodes. */
6662 for (gphi_iterator psi
= gsi_start_phis (bb
);
6665 gphi
*phi
= psi
.phi ();
6667 tree op
= PHI_RESULT (phi
);
6671 if (virtual_operand_p (op
))
6673 /* Remove the phi nodes for virtual operands (alias analysis will be
6674 run for the new function, anyway). */
6675 remove_phi_node (&psi
, true);
6679 SET_PHI_RESULT (phi
,
6680 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6681 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6683 op
= USE_FROM_PTR (use
);
6684 if (TREE_CODE (op
) == SSA_NAME
)
6685 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6688 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6690 location_t locus
= gimple_phi_arg_location (phi
, i
);
6691 tree block
= LOCATION_BLOCK (locus
);
6693 if (locus
== UNKNOWN_LOCATION
)
6695 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6697 if (d
->new_block
== NULL_TREE
)
6698 locus
= LOCATION_LOCUS (locus
);
6700 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6701 gimple_phi_arg_set_location (phi
, i
, locus
);
6708 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6710 gimple stmt
= gsi_stmt (si
);
6711 struct walk_stmt_info wi
;
6713 memset (&wi
, 0, sizeof (wi
));
6715 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6717 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (stmt
))
6719 tree label
= gimple_label_label (label_stmt
);
6720 int uid
= LABEL_DECL_UID (label
);
6722 gcc_assert (uid
> -1);
6724 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6725 if (old_len
<= (unsigned) uid
)
6727 new_len
= 3 * uid
/ 2 + 1;
6728 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6731 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6732 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6734 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6736 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6737 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6740 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6741 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6743 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6744 gimple_remove_stmt_histograms (cfun
, stmt
);
6746 /* We cannot leave any operands allocated from the operand caches of
6747 the current function. */
6748 free_stmt_operands (cfun
, stmt
);
6749 push_cfun (dest_cfun
);
6754 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6755 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6757 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6758 if (d
->orig_block
== NULL_TREE
6759 || block
== d
->orig_block
)
6760 e
->goto_locus
= d
->new_block
?
6761 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6762 LOCATION_LOCUS (e
->goto_locus
);
6766 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6767 the outermost EH region. Use REGION as the incoming base EH region. */
6770 find_outermost_region_in_block (struct function
*src_cfun
,
6771 basic_block bb
, eh_region region
)
6773 gimple_stmt_iterator si
;
6775 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6777 gimple stmt
= gsi_stmt (si
);
6778 eh_region stmt_region
;
6781 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6782 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6786 region
= stmt_region
;
6787 else if (stmt_region
!= region
)
6789 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6790 gcc_assert (region
!= NULL
);
6799 new_label_mapper (tree decl
, void *data
)
6801 htab_t hash
= (htab_t
) data
;
6805 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6807 m
= XNEW (struct tree_map
);
6808 m
->hash
= DECL_UID (decl
);
6809 m
->base
.from
= decl
;
6810 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6811 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6812 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6813 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6815 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6816 gcc_assert (*slot
== NULL
);
6823 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6827 replace_block_vars_by_duplicates (tree block
, hash_map
<tree
, tree
> *vars_map
,
6832 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6835 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6837 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6840 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6842 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6843 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6845 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6850 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6851 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6854 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6858 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6861 /* Discard it from the old loop array. */
6862 (*get_loops (fn1
))[loop
->num
] = NULL
;
6864 /* Place it in the new loop array, assigning it a new number. */
6865 loop
->num
= number_of_loops (fn2
);
6866 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6868 /* Recurse to children. */
6869 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6870 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6873 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6874 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6875 single basic block in the original CFG and the new basic block is
6876 returned. DEST_CFUN must not have a CFG yet.
6878 Note that the region need not be a pure SESE region. Blocks inside
6879 the region may contain calls to abort/exit. The only restriction
6880 is that ENTRY_BB should be the only entry point and it must
6883 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6884 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6885 to the new function.
6887 All local variables referenced in the region are assumed to be in
6888 the corresponding BLOCK_VARS and unexpanded variable lists
6889 associated with DEST_CFUN. */
6892 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6893 basic_block exit_bb
, tree orig_block
)
6895 vec
<basic_block
> bbs
, dom_bbs
;
6896 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6897 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6898 struct function
*saved_cfun
= cfun
;
6899 int *entry_flag
, *exit_flag
;
6900 unsigned *entry_prob
, *exit_prob
;
6901 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6904 htab_t new_label_map
;
6905 hash_map
<void *, void *> *eh_map
;
6906 struct loop
*loop
= entry_bb
->loop_father
;
6907 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6908 struct move_stmt_d d
;
6910 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6912 gcc_assert (entry_bb
!= exit_bb
6914 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6916 /* Collect all the blocks in the region. Manually add ENTRY_BB
6917 because it won't be added by dfs_enumerate_from. */
6919 bbs
.safe_push (entry_bb
);
6920 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6922 /* The blocks that used to be dominated by something in BBS will now be
6923 dominated by the new block. */
6924 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6928 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6929 the predecessor edges to ENTRY_BB and the successor edges to
6930 EXIT_BB so that we can re-attach them to the new basic block that
6931 will replace the region. */
6932 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6933 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6934 entry_flag
= XNEWVEC (int, num_entry_edges
);
6935 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6937 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6939 entry_prob
[i
] = e
->probability
;
6940 entry_flag
[i
] = e
->flags
;
6941 entry_pred
[i
++] = e
->src
;
6947 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6948 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6949 exit_flag
= XNEWVEC (int, num_exit_edges
);
6950 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6952 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6954 exit_prob
[i
] = e
->probability
;
6955 exit_flag
[i
] = e
->flags
;
6956 exit_succ
[i
++] = e
->dest
;
6968 /* Switch context to the child function to initialize DEST_FN's CFG. */
6969 gcc_assert (dest_cfun
->cfg
== NULL
);
6970 push_cfun (dest_cfun
);
6972 init_empty_tree_cfg ();
6974 /* Initialize EH information for the new function. */
6976 new_label_map
= NULL
;
6979 eh_region region
= NULL
;
6981 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6982 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6984 init_eh_for_function ();
6987 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6988 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6989 new_label_mapper
, new_label_map
);
6993 /* Initialize an empty loop tree. */
6994 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
6995 init_loops_structure (dest_cfun
, loops
, 1);
6996 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6997 set_loops_for_fn (dest_cfun
, loops
);
6999 /* Move the outlined loop tree part. */
7000 num_nodes
= bbs
.length ();
7001 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7003 if (bb
->loop_father
->header
== bb
)
7005 struct loop
*this_loop
= bb
->loop_father
;
7006 struct loop
*outer
= loop_outer (this_loop
);
7008 /* If the SESE region contains some bbs ending with
7009 a noreturn call, those are considered to belong
7010 to the outermost loop in saved_cfun, rather than
7011 the entry_bb's loop_father. */
7015 num_nodes
-= this_loop
->num_nodes
;
7016 flow_loop_tree_node_remove (bb
->loop_father
);
7017 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
7018 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
7021 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
7024 /* Remove loop exits from the outlined region. */
7025 if (loops_for_fn (saved_cfun
)->exits
)
7026 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7028 struct loops
*l
= loops_for_fn (saved_cfun
);
7030 = l
->exits
->find_slot_with_hash (e
, htab_hash_pointer (e
),
7033 l
->exits
->clear_slot (slot
);
7038 /* Adjust the number of blocks in the tree root of the outlined part. */
7039 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
7041 /* Setup a mapping to be used by move_block_to_fn. */
7042 loop
->aux
= current_loops
->tree_root
;
7043 loop0
->aux
= current_loops
->tree_root
;
7047 /* Move blocks from BBS into DEST_CFUN. */
7048 gcc_assert (bbs
.length () >= 2);
7049 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
7050 hash_map
<tree
, tree
> vars_map
;
7052 memset (&d
, 0, sizeof (d
));
7053 d
.orig_block
= orig_block
;
7054 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
7055 d
.from_context
= cfun
->decl
;
7056 d
.to_context
= dest_cfun
->decl
;
7057 d
.vars_map
= &vars_map
;
7058 d
.new_label_map
= new_label_map
;
7060 d
.remap_decls_p
= true;
7062 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
7064 /* No need to update edge counts on the last block. It has
7065 already been updated earlier when we detached the region from
7066 the original CFG. */
7067 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
7073 /* Loop sizes are no longer correct, fix them up. */
7074 loop
->num_nodes
-= num_nodes
;
7075 for (struct loop
*outer
= loop_outer (loop
);
7076 outer
; outer
= loop_outer (outer
))
7077 outer
->num_nodes
-= num_nodes
;
7078 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
7080 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7083 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7088 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7090 dest_cfun
->has_simduid_loops
= true;
7092 if (aloop
->force_vectorize
)
7093 dest_cfun
->has_force_vectorize_loops
= true;
7097 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7101 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7103 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7104 = BLOCK_SUBBLOCKS (orig_block
);
7105 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7106 block
; block
= BLOCK_CHAIN (block
))
7107 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7108 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7111 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7112 &vars_map
, dest_cfun
->decl
);
7115 htab_delete (new_label_map
);
7119 /* Rewire the entry and exit blocks. The successor to the entry
7120 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7121 the child function. Similarly, the predecessor of DEST_FN's
7122 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7123 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7124 various CFG manipulation function get to the right CFG.
7126 FIXME, this is silly. The CFG ought to become a parameter to
7128 push_cfun (dest_cfun
);
7129 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7131 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7134 /* Back in the original function, the SESE region has disappeared,
7135 create a new basic block in its place. */
7136 bb
= create_empty_bb (entry_pred
[0]);
7138 add_bb_to_loop (bb
, loop
);
7139 for (i
= 0; i
< num_entry_edges
; i
++)
7141 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7142 e
->probability
= entry_prob
[i
];
7145 for (i
= 0; i
< num_exit_edges
; i
++)
7147 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7148 e
->probability
= exit_prob
[i
];
7151 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7152 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7153 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7171 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7175 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
7177 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7178 struct function
*dsf
;
7179 bool ignore_topmost_bind
= false, any_var
= false;
7182 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7183 && decl_is_tm_clone (fndecl
));
7184 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7186 current_function_decl
= fndecl
;
7187 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7189 arg
= DECL_ARGUMENTS (fndecl
);
7192 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7193 fprintf (file
, " ");
7194 print_generic_expr (file
, arg
, dump_flags
);
7195 if (flags
& TDF_VERBOSE
)
7196 print_node (file
, "", arg
, 4);
7197 if (DECL_CHAIN (arg
))
7198 fprintf (file
, ", ");
7199 arg
= DECL_CHAIN (arg
);
7201 fprintf (file
, ")\n");
7203 if (flags
& TDF_VERBOSE
)
7204 print_node (file
, "", fndecl
, 2);
7206 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7207 if (dsf
&& (flags
& TDF_EH
))
7208 dump_eh_tree (file
, dsf
);
7210 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7212 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7213 current_function_decl
= old_current_fndecl
;
7217 /* When GIMPLE is lowered, the variables are no longer available in
7218 BIND_EXPRs, so display them separately. */
7219 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7222 ignore_topmost_bind
= true;
7224 fprintf (file
, "{\n");
7225 if (!vec_safe_is_empty (fun
->local_decls
))
7226 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7228 print_generic_decl (file
, var
, flags
);
7229 if (flags
& TDF_VERBOSE
)
7230 print_node (file
, "", var
, 4);
7231 fprintf (file
, "\n");
7235 if (gimple_in_ssa_p (cfun
))
7236 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7238 tree name
= ssa_name (ix
);
7239 if (name
&& !SSA_NAME_VAR (name
))
7241 fprintf (file
, " ");
7242 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7243 fprintf (file
, " ");
7244 print_generic_expr (file
, name
, flags
);
7245 fprintf (file
, ";\n");
7252 if (fun
&& fun
->decl
== fndecl
7254 && basic_block_info_for_fn (fun
))
7256 /* If the CFG has been built, emit a CFG-based dump. */
7257 if (!ignore_topmost_bind
)
7258 fprintf (file
, "{\n");
7260 if (any_var
&& n_basic_blocks_for_fn (fun
))
7261 fprintf (file
, "\n");
7263 FOR_EACH_BB_FN (bb
, fun
)
7264 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7266 fprintf (file
, "}\n");
7268 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7270 /* The function is now in GIMPLE form but the CFG has not been
7271 built yet. Emit the single sequence of GIMPLE statements
7272 that make up its body. */
7273 gimple_seq body
= gimple_body (fndecl
);
7275 if (gimple_seq_first_stmt (body
)
7276 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7277 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7278 print_gimple_seq (file
, body
, 0, flags
);
7281 if (!ignore_topmost_bind
)
7282 fprintf (file
, "{\n");
7285 fprintf (file
, "\n");
7287 print_gimple_seq (file
, body
, 2, flags
);
7288 fprintf (file
, "}\n");
7295 /* Make a tree based dump. */
7296 chain
= DECL_SAVED_TREE (fndecl
);
7297 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7299 if (ignore_topmost_bind
)
7301 chain
= BIND_EXPR_BODY (chain
);
7309 if (!ignore_topmost_bind
)
7310 fprintf (file
, "{\n");
7315 fprintf (file
, "\n");
7317 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7318 if (ignore_topmost_bind
)
7319 fprintf (file
, "}\n");
7322 if (flags
& TDF_ENUMERATE_LOCALS
)
7323 dump_enumerated_decls (file
, flags
);
7324 fprintf (file
, "\n\n");
7326 current_function_decl
= old_current_fndecl
;
7329 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7332 debug_function (tree fn
, int flags
)
7334 dump_function_to_file (fn
, stderr
, flags
);
7338 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7341 print_pred_bbs (FILE *file
, basic_block bb
)
7346 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7347 fprintf (file
, "bb_%d ", e
->src
->index
);
7351 /* Print on FILE the indexes for the successors of basic_block BB. */
7354 print_succ_bbs (FILE *file
, basic_block bb
)
7359 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7360 fprintf (file
, "bb_%d ", e
->dest
->index
);
7363 /* Print to FILE the basic block BB following the VERBOSITY level. */
7366 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7368 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7369 memset ((void *) s_indent
, ' ', (size_t) indent
);
7370 s_indent
[indent
] = '\0';
7372 /* Print basic_block's header. */
7375 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7376 print_pred_bbs (file
, bb
);
7377 fprintf (file
, "}, succs = {");
7378 print_succ_bbs (file
, bb
);
7379 fprintf (file
, "})\n");
7382 /* Print basic_block's body. */
7385 fprintf (file
, "%s {\n", s_indent
);
7386 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7387 fprintf (file
, "%s }\n", s_indent
);
7391 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7393 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7394 VERBOSITY level this outputs the contents of the loop, or just its
7398 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7406 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7407 memset ((void *) s_indent
, ' ', (size_t) indent
);
7408 s_indent
[indent
] = '\0';
7410 /* Print loop's header. */
7411 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7413 fprintf (file
, "header = %d", loop
->header
->index
);
7416 fprintf (file
, "deleted)\n");
7420 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7422 fprintf (file
, ", multiple latches");
7423 fprintf (file
, ", niter = ");
7424 print_generic_expr (file
, loop
->nb_iterations
, 0);
7426 if (loop
->any_upper_bound
)
7428 fprintf (file
, ", upper_bound = ");
7429 print_decu (loop
->nb_iterations_upper_bound
, file
);
7432 if (loop
->any_estimate
)
7434 fprintf (file
, ", estimate = ");
7435 print_decu (loop
->nb_iterations_estimate
, file
);
7437 fprintf (file
, ")\n");
7439 /* Print loop's body. */
7442 fprintf (file
, "%s{\n", s_indent
);
7443 FOR_EACH_BB_FN (bb
, cfun
)
7444 if (bb
->loop_father
== loop
)
7445 print_loops_bb (file
, bb
, indent
, verbosity
);
7447 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7448 fprintf (file
, "%s}\n", s_indent
);
7452 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7453 spaces. Following VERBOSITY level this outputs the contents of the
7454 loop, or just its structure. */
7457 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7463 print_loop (file
, loop
, indent
, verbosity
);
7464 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7467 /* Follow a CFG edge from the entry point of the program, and on entry
7468 of a loop, pretty print the loop structure on FILE. */
7471 print_loops (FILE *file
, int verbosity
)
7475 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7476 if (bb
&& bb
->loop_father
)
7477 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7483 debug (struct loop
&ref
)
7485 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7489 debug (struct loop
*ptr
)
7494 fprintf (stderr
, "<nil>\n");
7497 /* Dump a loop verbosely. */
7500 debug_verbose (struct loop
&ref
)
7502 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7506 debug_verbose (struct loop
*ptr
)
7511 fprintf (stderr
, "<nil>\n");
7515 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7518 debug_loops (int verbosity
)
7520 print_loops (stderr
, verbosity
);
7523 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7526 debug_loop (struct loop
*loop
, int verbosity
)
7528 print_loop (stderr
, loop
, 0, verbosity
);
7531 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7535 debug_loop_num (unsigned num
, int verbosity
)
7537 debug_loop (get_loop (cfun
, num
), verbosity
);
7540 /* Return true if BB ends with a call, possibly followed by some
7541 instructions that must stay with the call. Return false,
7545 gimple_block_ends_with_call_p (basic_block bb
)
7547 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7548 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7552 /* Return true if BB ends with a conditional branch. Return false,
7556 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7558 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7559 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7563 /* Return true if we need to add fake edge to exit at statement T.
7564 Helper function for gimple_flow_call_edges_add. */
7567 need_fake_edge_p (gimple t
)
7569 tree fndecl
= NULL_TREE
;
7572 /* NORETURN and LONGJMP calls already have an edge to exit.
7573 CONST and PURE calls do not need one.
7574 We don't currently check for CONST and PURE here, although
7575 it would be a good idea, because those attributes are
7576 figured out from the RTL in mark_constant_function, and
7577 the counter incrementation code from -fprofile-arcs
7578 leads to different results from -fbranch-probabilities. */
7579 if (is_gimple_call (t
))
7581 fndecl
= gimple_call_fndecl (t
);
7582 call_flags
= gimple_call_flags (t
);
7585 if (is_gimple_call (t
)
7587 && DECL_BUILT_IN (fndecl
)
7588 && (call_flags
& ECF_NOTHROW
)
7589 && !(call_flags
& ECF_RETURNS_TWICE
)
7590 /* fork() doesn't really return twice, but the effect of
7591 wrapping it in __gcov_fork() which calls __gcov_flush()
7592 and clears the counters before forking has the same
7593 effect as returning twice. Force a fake edge. */
7594 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7595 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7598 if (is_gimple_call (t
))
7604 if (!(call_flags
& ECF_NORETURN
))
7608 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7609 if ((e
->flags
& EDGE_FAKE
) == 0)
7613 if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
7614 if (gimple_asm_volatile_p (asm_stmt
) || gimple_asm_input_p (asm_stmt
))
7621 /* Add fake edges to the function exit for any non constant and non
7622 noreturn calls (or noreturn calls with EH/abnormal edges),
7623 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7624 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7627 The goal is to expose cases in which entering a basic block does
7628 not imply that all subsequent instructions must be executed. */
7631 gimple_flow_call_edges_add (sbitmap blocks
)
7634 int blocks_split
= 0;
7635 int last_bb
= last_basic_block_for_fn (cfun
);
7636 bool check_last_block
= false;
7638 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7642 check_last_block
= true;
7644 check_last_block
= bitmap_bit_p (blocks
,
7645 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7647 /* In the last basic block, before epilogue generation, there will be
7648 a fallthru edge to EXIT. Special care is required if the last insn
7649 of the last basic block is a call because make_edge folds duplicate
7650 edges, which would result in the fallthru edge also being marked
7651 fake, which would result in the fallthru edge being removed by
7652 remove_fake_edges, which would result in an invalid CFG.
7654 Moreover, we can't elide the outgoing fake edge, since the block
7655 profiler needs to take this into account in order to solve the minimal
7656 spanning tree in the case that the call doesn't return.
7658 Handle this by adding a dummy instruction in a new last basic block. */
7659 if (check_last_block
)
7661 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7662 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7665 if (!gsi_end_p (gsi
))
7668 if (t
&& need_fake_edge_p (t
))
7672 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7675 gsi_insert_on_edge (e
, gimple_build_nop ());
7676 gsi_commit_edge_inserts ();
7681 /* Now add fake edges to the function exit for any non constant
7682 calls since there is no way that we can determine if they will
7684 for (i
= 0; i
< last_bb
; i
++)
7686 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7687 gimple_stmt_iterator gsi
;
7688 gimple stmt
, last_stmt
;
7693 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7696 gsi
= gsi_last_nondebug_bb (bb
);
7697 if (!gsi_end_p (gsi
))
7699 last_stmt
= gsi_stmt (gsi
);
7702 stmt
= gsi_stmt (gsi
);
7703 if (need_fake_edge_p (stmt
))
7707 /* The handling above of the final block before the
7708 epilogue should be enough to verify that there is
7709 no edge to the exit block in CFG already.
7710 Calling make_edge in such case would cause us to
7711 mark that edge as fake and remove it later. */
7712 #ifdef ENABLE_CHECKING
7713 if (stmt
== last_stmt
)
7715 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7716 gcc_assert (e
== NULL
);
7720 /* Note that the following may create a new basic block
7721 and renumber the existing basic blocks. */
7722 if (stmt
!= last_stmt
)
7724 e
= split_block (bb
, stmt
);
7728 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7732 while (!gsi_end_p (gsi
));
7737 verify_flow_info ();
7739 return blocks_split
;
7742 /* Removes edge E and all the blocks dominated by it, and updates dominance
7743 information. The IL in E->src needs to be updated separately.
7744 If dominance info is not available, only the edge E is removed.*/
7747 remove_edge_and_dominated_blocks (edge e
)
7749 vec
<basic_block
> bbs_to_remove
= vNULL
;
7750 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7754 bool none_removed
= false;
7756 basic_block bb
, dbb
;
7759 if (!dom_info_available_p (CDI_DOMINATORS
))
7765 /* No updating is needed for edges to exit. */
7766 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7768 if (cfgcleanup_altered_bbs
)
7769 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7774 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7775 that is not dominated by E->dest, then this set is empty. Otherwise,
7776 all the basic blocks dominated by E->dest are removed.
7778 Also, to DF_IDOM we store the immediate dominators of the blocks in
7779 the dominance frontier of E (i.e., of the successors of the
7780 removed blocks, if there are any, and of E->dest otherwise). */
7781 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7786 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7788 none_removed
= true;
7793 df
= BITMAP_ALLOC (NULL
);
7794 df_idom
= BITMAP_ALLOC (NULL
);
7797 bitmap_set_bit (df_idom
,
7798 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7801 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7802 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7804 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7806 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7807 bitmap_set_bit (df
, f
->dest
->index
);
7810 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7811 bitmap_clear_bit (df
, bb
->index
);
7813 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7815 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7816 bitmap_set_bit (df_idom
,
7817 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7821 if (cfgcleanup_altered_bbs
)
7823 /* Record the set of the altered basic blocks. */
7824 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7825 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7828 /* Remove E and the cancelled blocks. */
7833 /* Walk backwards so as to get a chance to substitute all
7834 released DEFs into debug stmts. See
7835 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7837 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7838 delete_basic_block (bbs_to_remove
[i
]);
7841 /* Update the dominance information. The immediate dominator may change only
7842 for blocks whose immediate dominator belongs to DF_IDOM:
7844 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7845 removal. Let Z the arbitrary block such that idom(Z) = Y and
7846 Z dominates X after the removal. Before removal, there exists a path P
7847 from Y to X that avoids Z. Let F be the last edge on P that is
7848 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7849 dominates W, and because of P, Z does not dominate W), and W belongs to
7850 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7851 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7853 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7854 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7856 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7857 bbs_to_fix_dom
.safe_push (dbb
);
7860 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7863 BITMAP_FREE (df_idom
);
7864 bbs_to_remove
.release ();
7865 bbs_to_fix_dom
.release ();
7868 /* Purge dead EH edges from basic block BB. */
7871 gimple_purge_dead_eh_edges (basic_block bb
)
7873 bool changed
= false;
7876 gimple stmt
= last_stmt (bb
);
7878 if (stmt
&& stmt_can_throw_internal (stmt
))
7881 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7883 if (e
->flags
& EDGE_EH
)
7885 remove_edge_and_dominated_blocks (e
);
7895 /* Purge dead EH edges from basic block listed in BLOCKS. */
7898 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7900 bool changed
= false;
7904 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7906 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7908 /* Earlier gimple_purge_dead_eh_edges could have removed
7909 this basic block already. */
7910 gcc_assert (bb
|| changed
);
7912 changed
|= gimple_purge_dead_eh_edges (bb
);
7918 /* Purge dead abnormal call edges from basic block BB. */
7921 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7923 bool changed
= false;
7926 gimple stmt
= last_stmt (bb
);
7928 if (!cfun
->has_nonlocal_label
7929 && !cfun
->calls_setjmp
)
7932 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7935 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7937 if (e
->flags
& EDGE_ABNORMAL
)
7939 if (e
->flags
& EDGE_FALLTHRU
)
7940 e
->flags
&= ~EDGE_ABNORMAL
;
7942 remove_edge_and_dominated_blocks (e
);
7952 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7955 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7957 bool changed
= false;
7961 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7963 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7965 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7966 this basic block already. */
7967 gcc_assert (bb
|| changed
);
7969 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7975 /* This function is called whenever a new edge is created or
7979 gimple_execute_on_growing_pred (edge e
)
7981 basic_block bb
= e
->dest
;
7983 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7984 reserve_phi_args_for_new_edge (bb
);
7987 /* This function is called immediately before edge E is removed from
7988 the edge vector E->dest->preds. */
7991 gimple_execute_on_shrinking_pred (edge e
)
7993 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7994 remove_phi_args (e
);
7997 /*---------------------------------------------------------------------------
7998 Helper functions for Loop versioning
7999 ---------------------------------------------------------------------------*/
8001 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8002 of 'first'. Both of them are dominated by 'new_head' basic block. When
8003 'new_head' was created by 'second's incoming edge it received phi arguments
8004 on the edge by split_edge(). Later, additional edge 'e' was created to
8005 connect 'new_head' and 'first'. Now this routine adds phi args on this
8006 additional edge 'e' that new_head to second edge received as part of edge
8010 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
8011 basic_block new_head
, edge e
)
8014 gphi_iterator psi1
, psi2
;
8016 edge e2
= find_edge (new_head
, second
);
8018 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8019 edge, we should always have an edge from NEW_HEAD to SECOND. */
8020 gcc_assert (e2
!= NULL
);
8022 /* Browse all 'second' basic block phi nodes and add phi args to
8023 edge 'e' for 'first' head. PHI args are always in correct order. */
8025 for (psi2
= gsi_start_phis (second
),
8026 psi1
= gsi_start_phis (first
);
8027 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
8028 gsi_next (&psi2
), gsi_next (&psi1
))
8032 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
8033 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
8038 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8039 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8040 the destination of the ELSE part. */
8043 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
8044 basic_block second_head ATTRIBUTE_UNUSED
,
8045 basic_block cond_bb
, void *cond_e
)
8047 gimple_stmt_iterator gsi
;
8048 gimple new_cond_expr
;
8049 tree cond_expr
= (tree
) cond_e
;
8052 /* Build new conditional expr */
8053 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
8054 NULL_TREE
, NULL_TREE
);
8056 /* Add new cond in cond_bb. */
8057 gsi
= gsi_last_bb (cond_bb
);
8058 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
8060 /* Adjust edges appropriately to connect new head with first head
8061 as well as second head. */
8062 e0
= single_succ_edge (cond_bb
);
8063 e0
->flags
&= ~EDGE_FALLTHRU
;
8064 e0
->flags
|= EDGE_FALSE_VALUE
;
8068 /* Do book-keeping of basic block BB for the profile consistency checker.
8069 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8070 then do post-pass accounting. Store the counting in RECORD. */
8072 gimple_account_profile_record (basic_block bb
, int after_pass
,
8073 struct profile_record
*record
)
8075 gimple_stmt_iterator i
;
8076 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
8078 record
->size
[after_pass
]
8079 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
8080 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
8081 record
->time
[after_pass
]
8082 += estimate_num_insns (gsi_stmt (i
),
8083 &eni_time_weights
) * bb
->count
;
8084 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8085 record
->time
[after_pass
]
8086 += estimate_num_insns (gsi_stmt (i
),
8087 &eni_time_weights
) * bb
->frequency
;
8091 struct cfg_hooks gimple_cfg_hooks
= {
8093 gimple_verify_flow_info
,
8094 gimple_dump_bb
, /* dump_bb */
8095 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8096 create_bb
, /* create_basic_block */
8097 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8098 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8099 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8100 remove_bb
, /* delete_basic_block */
8101 gimple_split_block
, /* split_block */
8102 gimple_move_block_after
, /* move_block_after */
8103 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8104 gimple_merge_blocks
, /* merge_blocks */
8105 gimple_predict_edge
, /* predict_edge */
8106 gimple_predicted_by_p
, /* predicted_by_p */
8107 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8108 gimple_duplicate_bb
, /* duplicate_block */
8109 gimple_split_edge
, /* split_edge */
8110 gimple_make_forwarder_block
, /* make_forward_block */
8111 NULL
, /* tidy_fallthru_edge */
8112 NULL
, /* force_nonfallthru */
8113 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8114 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8115 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8116 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8117 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8118 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8119 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8120 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8121 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8122 flush_pending_stmts
, /* flush_pending_stmts */
8123 gimple_empty_block_p
, /* block_empty_p */
8124 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8125 gimple_account_profile_record
,
8129 /* Split all critical edges. */
8132 split_critical_edges (void)
8138 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8139 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8140 mappings around the calls to split_edge. */
8141 start_recording_case_labels ();
8142 FOR_ALL_BB_FN (bb
, cfun
)
8144 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8146 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8148 /* PRE inserts statements to edges and expects that
8149 since split_critical_edges was done beforehand, committing edge
8150 insertions will not split more edges. In addition to critical
8151 edges we must split edges that have multiple successors and
8152 end by control flow statements, such as RESX.
8153 Go ahead and split them too. This matches the logic in
8154 gimple_find_edge_insert_loc. */
8155 else if ((!single_pred_p (e
->dest
)
8156 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8157 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8158 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8159 && !(e
->flags
& EDGE_ABNORMAL
))
8161 gimple_stmt_iterator gsi
;
8163 gsi
= gsi_last_bb (e
->src
);
8164 if (!gsi_end_p (gsi
)
8165 && stmt_ends_bb_p (gsi_stmt (gsi
))
8166 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8167 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8173 end_recording_case_labels ();
8179 const pass_data pass_data_split_crit_edges
=
8181 GIMPLE_PASS
, /* type */
8182 "crited", /* name */
8183 OPTGROUP_NONE
, /* optinfo_flags */
8184 TV_TREE_SPLIT_EDGES
, /* tv_id */
8185 PROP_cfg
, /* properties_required */
8186 PROP_no_crit_edges
, /* properties_provided */
8187 0, /* properties_destroyed */
8188 0, /* todo_flags_start */
8189 0, /* todo_flags_finish */
8192 class pass_split_crit_edges
: public gimple_opt_pass
8195 pass_split_crit_edges (gcc::context
*ctxt
)
8196 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8199 /* opt_pass methods: */
8200 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8202 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8203 }; // class pass_split_crit_edges
8208 make_pass_split_crit_edges (gcc::context
*ctxt
)
8210 return new pass_split_crit_edges (ctxt
);
8214 /* Insert COND expression which is GIMPLE_COND after STMT
8215 in basic block BB with appropriate basic block split
8216 and creation of a new conditionally executed basic block.
8217 Return created basic block. */
8219 insert_cond_bb (basic_block bb
, gimple stmt
, gimple cond
)
8221 edge fall
= split_block (bb
, stmt
);
8222 gimple_stmt_iterator iter
= gsi_last_bb (bb
);
8225 /* Insert cond statement. */
8226 gcc_assert (gimple_code (cond
) == GIMPLE_COND
);
8227 if (gsi_end_p (iter
))
8228 gsi_insert_before (&iter
, cond
, GSI_CONTINUE_LINKING
);
8230 gsi_insert_after (&iter
, cond
, GSI_CONTINUE_LINKING
);
8232 /* Create conditionally executed block. */
8233 new_bb
= create_empty_bb (bb
);
8234 make_edge (bb
, new_bb
, EDGE_TRUE_VALUE
);
8235 make_single_succ_edge (new_bb
, fall
->dest
, EDGE_FALLTHRU
);
8237 /* Fix edge for split bb. */
8238 fall
->flags
= EDGE_FALSE_VALUE
;
8240 /* Update dominance info. */
8241 if (dom_info_available_p (CDI_DOMINATORS
))
8243 set_immediate_dominator (CDI_DOMINATORS
, new_bb
, bb
);
8244 set_immediate_dominator (CDI_DOMINATORS
, fall
->dest
, bb
);
8247 /* Update loop info. */
8249 add_bb_to_loop (new_bb
, bb
->loop_father
);
8254 /* Build a ternary operation and gimplify it. Emit code before GSI.
8255 Return the gimple_val holding the result. */
8258 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8259 tree type
, tree a
, tree b
, tree c
)
8262 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8264 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8267 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8271 /* Build a binary operation and gimplify it. Emit code before GSI.
8272 Return the gimple_val holding the result. */
8275 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8276 tree type
, tree a
, tree b
)
8280 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8283 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8287 /* Build a unary operation and gimplify it. Emit code before GSI.
8288 Return the gimple_val holding the result. */
8291 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8296 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8299 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8305 /* Given a basic block B which ends with a conditional and has
8306 precisely two successors, determine which of the edges is taken if
8307 the conditional is true and which is taken if the conditional is
8308 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8311 extract_true_false_edges_from_block (basic_block b
,
8315 edge e
= EDGE_SUCC (b
, 0);
8317 if (e
->flags
& EDGE_TRUE_VALUE
)
8320 *false_edge
= EDGE_SUCC (b
, 1);
8325 *true_edge
= EDGE_SUCC (b
, 1);
8329 /* Emit return warnings. */
8333 const pass_data pass_data_warn_function_return
=
8335 GIMPLE_PASS
, /* type */
8336 "*warn_function_return", /* name */
8337 OPTGROUP_NONE
, /* optinfo_flags */
8338 TV_NONE
, /* tv_id */
8339 PROP_cfg
, /* properties_required */
8340 0, /* properties_provided */
8341 0, /* properties_destroyed */
8342 0, /* todo_flags_start */
8343 0, /* todo_flags_finish */
8346 class pass_warn_function_return
: public gimple_opt_pass
8349 pass_warn_function_return (gcc::context
*ctxt
)
8350 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8353 /* opt_pass methods: */
8354 virtual unsigned int execute (function
*);
8356 }; // class pass_warn_function_return
8359 pass_warn_function_return::execute (function
*fun
)
8361 source_location location
;
8366 if (!targetm
.warn_func_return (fun
->decl
))
8369 /* If we have a path to EXIT, then we do return. */
8370 if (TREE_THIS_VOLATILE (fun
->decl
)
8371 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8373 location
= UNKNOWN_LOCATION
;
8374 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8376 last
= last_stmt (e
->src
);
8377 if ((gimple_code (last
) == GIMPLE_RETURN
8378 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8379 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8382 if (location
== UNKNOWN_LOCATION
)
8383 location
= cfun
->function_end_locus
;
8384 warning_at (location
, 0, "%<noreturn%> function does return");
8387 /* If we see "return;" in some basic block, then we do reach the end
8388 without returning a value. */
8389 else if (warn_return_type
8390 && !TREE_NO_WARNING (fun
->decl
)
8391 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8392 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8394 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8396 gimple last
= last_stmt (e
->src
);
8397 greturn
*return_stmt
= dyn_cast
<greturn
*> (last
);
8399 && gimple_return_retval (return_stmt
) == NULL
8400 && !gimple_no_warning_p (last
))
8402 location
= gimple_location (last
);
8403 if (location
== UNKNOWN_LOCATION
)
8404 location
= fun
->function_end_locus
;
8405 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8406 TREE_NO_WARNING (fun
->decl
) = 1;
8417 make_pass_warn_function_return (gcc::context
*ctxt
)
8419 return new pass_warn_function_return (ctxt
);
8422 /* Walk a gimplified function and warn for functions whose return value is
8423 ignored and attribute((warn_unused_result)) is set. This is done before
8424 inlining, so we don't have to worry about that. */
8427 do_warn_unused_result (gimple_seq seq
)
8430 gimple_stmt_iterator i
;
8432 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8434 gimple g
= gsi_stmt (i
);
8436 switch (gimple_code (g
))
8439 do_warn_unused_result (gimple_bind_body (as_a
<gbind
*>(g
)));
8442 do_warn_unused_result (gimple_try_eval (g
));
8443 do_warn_unused_result (gimple_try_cleanup (g
));
8446 do_warn_unused_result (gimple_catch_handler (
8447 as_a
<gcatch
*> (g
)));
8449 case GIMPLE_EH_FILTER
:
8450 do_warn_unused_result (gimple_eh_filter_failure (g
));
8454 if (gimple_call_lhs (g
))
8456 if (gimple_call_internal_p (g
))
8459 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8460 LHS. All calls whose value is ignored should be
8461 represented like this. Look for the attribute. */
8462 fdecl
= gimple_call_fndecl (g
);
8463 ftype
= gimple_call_fntype (g
);
8465 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8467 location_t loc
= gimple_location (g
);
8470 warning_at (loc
, OPT_Wunused_result
,
8471 "ignoring return value of %qD, "
8472 "declared with attribute warn_unused_result",
8475 warning_at (loc
, OPT_Wunused_result
,
8476 "ignoring return value of function "
8477 "declared with attribute warn_unused_result");
8482 /* Not a container, not a call, or a call whose value is used. */
8490 const pass_data pass_data_warn_unused_result
=
8492 GIMPLE_PASS
, /* type */
8493 "*warn_unused_result", /* name */
8494 OPTGROUP_NONE
, /* optinfo_flags */
8495 TV_NONE
, /* tv_id */
8496 PROP_gimple_any
, /* properties_required */
8497 0, /* properties_provided */
8498 0, /* properties_destroyed */
8499 0, /* todo_flags_start */
8500 0, /* todo_flags_finish */
8503 class pass_warn_unused_result
: public gimple_opt_pass
8506 pass_warn_unused_result (gcc::context
*ctxt
)
8507 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8510 /* opt_pass methods: */
8511 virtual bool gate (function
*) { return flag_warn_unused_result
; }
8512 virtual unsigned int execute (function
*)
8514 do_warn_unused_result (gimple_body (current_function_decl
));
8518 }; // class pass_warn_unused_result
8523 make_pass_warn_unused_result (gcc::context
*ctxt
)
8525 return new pass_warn_unused_result (ctxt
);
8528 /* IPA passes, compilation of earlier functions or inlining
8529 might have changed some properties, such as marked functions nothrow,
8530 pure, const or noreturn.
8531 Remove redundant edges and basic blocks, and create new ones if necessary.
8533 This pass can't be executed as stand alone pass from pass manager, because
8534 in between inlining and this fixup the verify_flow_info would fail. */
8537 execute_fixup_cfg (void)
8540 gimple_stmt_iterator gsi
;
8542 gcov_type count_scale
;
8547 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl
)->count
,
8548 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8550 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8551 cgraph_node::get (current_function_decl
)->count
;
8552 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8553 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8556 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8557 e
->count
= apply_scale (e
->count
, count_scale
);
8559 FOR_EACH_BB_FN (bb
, cfun
)
8561 bb
->count
= apply_scale (bb
->count
, count_scale
);
8562 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
8564 gimple stmt
= gsi_stmt (gsi
);
8565 tree decl
= is_gimple_call (stmt
)
8566 ? gimple_call_fndecl (stmt
)
8570 int flags
= gimple_call_flags (stmt
);
8571 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8573 if (gimple_purge_dead_abnormal_call_edges (bb
))
8574 todo
|= TODO_cleanup_cfg
;
8576 if (gimple_in_ssa_p (cfun
))
8578 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8583 if (flags
& ECF_NORETURN
8584 && fixup_noreturn_call (stmt
))
8585 todo
|= TODO_cleanup_cfg
;
8588 /* Remove stores to variables we marked write-only.
8589 Keep access when store has side effect, i.e. in case when source
8591 if (gimple_store_p (stmt
)
8592 && !gimple_has_side_effects (stmt
))
8594 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8596 if (TREE_CODE (lhs
) == VAR_DECL
8597 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8598 && varpool_node::get (lhs
)->writeonly
)
8600 unlink_stmt_vdef (stmt
);
8601 gsi_remove (&gsi
, true);
8602 release_defs (stmt
);
8603 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8607 /* For calls we can simply remove LHS when it is known
8608 to be write-only. */
8609 if (is_gimple_call (stmt
)
8610 && gimple_get_lhs (stmt
))
8612 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8614 if (TREE_CODE (lhs
) == VAR_DECL
8615 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8616 && varpool_node::get (lhs
)->writeonly
)
8618 gimple_call_set_lhs (stmt
, NULL
);
8620 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8624 if (maybe_clean_eh_stmt (stmt
)
8625 && gimple_purge_dead_eh_edges (bb
))
8626 todo
|= TODO_cleanup_cfg
;
8630 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8631 e
->count
= apply_scale (e
->count
, count_scale
);
8633 /* If we have a basic block with no successors that does not
8634 end with a control statement or a noreturn call end it with
8635 a call to __builtin_unreachable. This situation can occur
8636 when inlining a noreturn call that does in fact return. */
8637 if (EDGE_COUNT (bb
->succs
) == 0)
8639 gimple stmt
= last_stmt (bb
);
8641 || (!is_ctrl_stmt (stmt
)
8642 && (!is_gimple_call (stmt
)
8643 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8645 if (stmt
&& is_gimple_call (stmt
))
8646 gimple_call_set_ctrl_altering (stmt
, false);
8647 stmt
= gimple_build_call
8648 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8649 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8650 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8654 if (count_scale
!= REG_BR_PROB_BASE
)
8655 compute_function_frequency ();
8657 /* Dump a textual representation of the flowgraph. */
8659 gimple_dump_cfg (dump_file
, dump_flags
);
8662 && (todo
& TODO_cleanup_cfg
))
8663 loops_state_set (LOOPS_NEED_FIXUP
);
8670 const pass_data pass_data_fixup_cfg
=
8672 GIMPLE_PASS
, /* type */
8673 "*free_cfg_annotations", /* name */
8674 OPTGROUP_NONE
, /* optinfo_flags */
8675 TV_NONE
, /* tv_id */
8676 PROP_cfg
, /* properties_required */
8677 0, /* properties_provided */
8678 0, /* properties_destroyed */
8679 0, /* todo_flags_start */
8680 0, /* todo_flags_finish */
8683 class pass_fixup_cfg
: public gimple_opt_pass
8686 pass_fixup_cfg (gcc::context
*ctxt
)
8687 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8690 /* opt_pass methods: */
8691 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8692 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
8694 }; // class pass_fixup_cfg
8699 make_pass_fixup_cfg (gcc::context
*ctxt
)
8701 return new pass_fixup_cfg (ctxt
);
8704 /* Garbage collection support for edge_def. */
8706 extern void gt_ggc_mx (tree
&);
8707 extern void gt_ggc_mx (gimple
&);
8708 extern void gt_ggc_mx (rtx
&);
8709 extern void gt_ggc_mx (basic_block
&);
8712 gt_ggc_mx (rtx_insn
*& x
)
8715 gt_ggc_mx_rtx_def ((void *) x
);
8719 gt_ggc_mx (edge_def
*e
)
8721 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8723 gt_ggc_mx (e
->dest
);
8724 if (current_ir_type () == IR_GIMPLE
)
8725 gt_ggc_mx (e
->insns
.g
);
8727 gt_ggc_mx (e
->insns
.r
);
8731 /* PCH support for edge_def. */
8733 extern void gt_pch_nx (tree
&);
8734 extern void gt_pch_nx (gimple
&);
8735 extern void gt_pch_nx (rtx
&);
8736 extern void gt_pch_nx (basic_block
&);
8739 gt_pch_nx (rtx_insn
*& x
)
8742 gt_pch_nx_rtx_def ((void *) x
);
8746 gt_pch_nx (edge_def
*e
)
8748 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8750 gt_pch_nx (e
->dest
);
8751 if (current_ir_type () == IR_GIMPLE
)
8752 gt_pch_nx (e
->insns
.g
);
8754 gt_pch_nx (e
->insns
.r
);
8759 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8761 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8762 op (&(e
->src
), cookie
);
8763 op (&(e
->dest
), cookie
);
8764 if (current_ir_type () == IR_GIMPLE
)
8765 op (&(e
->insns
.g
), cookie
);
8767 op (&(e
->insns
.r
), cookie
);
8768 op (&(block
), cookie
);