1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
68 #include "tree-ssa-live.h"
70 #include "tree-cfgcleanup.h"
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
75 /* Local declarations. */
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity
= 20;
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
93 static struct pointer_map_t
*edge_to_cases
;
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
99 static bitmap touched_switch_bbs
;
101 /* CFG statistics. */
104 long num_merged_labels
;
107 static struct cfg_stats_d cfg_stats
;
109 /* Nonzero if we found a computed goto while building basic blocks. */
110 static bool found_computed_goto
;
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
119 /* Hashtable helpers. */
121 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
123 typedef locus_discrim_map value_type
;
124 typedef locus_discrim_map compare_type
;
125 static inline hashval_t
hash (const value_type
*);
126 static inline bool equal (const value_type
*, const compare_type
*);
129 /* Trivial hash function for a location_t. ITEM is a pointer to
130 a hash table entry that maps a location_t to a discriminator. */
133 locus_discrim_hasher::hash (const value_type
*item
)
135 return LOCATION_LINE (item
->locus
);
138 /* Equality function for the locus-to-discriminator map. A and B
139 point to the two hash table entries to compare. */
142 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
144 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
147 static hash_table
<locus_discrim_hasher
> discriminator_per_locus
;
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq
);
151 static void factor_computed_gotos (void);
154 static void make_edges (void);
155 static void assign_discriminators (void);
156 static void make_cond_expr_edges (basic_block
);
157 static void make_gimple_switch_edges (basic_block
);
158 static void make_goto_expr_edges (basic_block
);
159 static void make_gimple_asm_edges (basic_block
);
160 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
161 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
162 static unsigned int split_critical_edges (void);
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple
, gimple
);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge
);
168 static gimple
first_non_label_stmt (basic_block
);
169 static bool verify_gimple_transaction (gimple
);
171 /* Flowgraph optimization and cleanup. */
172 static void gimple_merge_blocks (basic_block
, basic_block
);
173 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
174 static void remove_bb (basic_block
);
175 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
176 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
177 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
178 static tree
find_case_label_for_value (gimple
, tree
);
181 init_empty_tree_cfg_for_function (struct function
*fn
)
183 /* Initialize the basic block array. */
185 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
186 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
187 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
188 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
190 initial_cfg_capacity
);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
194 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
195 initial_cfg_capacity
);
197 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
198 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
200 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
201 = EXIT_BLOCK_PTR_FOR_FN (fn
);
202 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
203 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
207 init_empty_tree_cfg (void)
209 init_empty_tree_cfg_for_function (cfun
);
212 /*---------------------------------------------------------------------------
214 ---------------------------------------------------------------------------*/
216 /* Entry point to the CFG builder for trees. SEQ is the sequence of
217 statements to be added to the flowgraph. */
220 build_gimple_cfg (gimple_seq seq
)
222 /* Register specific gimple functions. */
223 gimple_register_cfg_hooks ();
225 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
227 init_empty_tree_cfg ();
229 found_computed_goto
= 0;
232 /* Computed gotos are hell to deal with, especially if there are
233 lots of them with a large number of destinations. So we factor
234 them to a common computed goto location before we build the
235 edge list. After we convert back to normal form, we will un-factor
236 the computed gotos since factoring introduces an unwanted jump. */
237 if (found_computed_goto
)
238 factor_computed_gotos ();
240 /* Make sure there is always at least one block, even if it's empty. */
241 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
242 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
244 /* Adjust the size of the array. */
245 if (basic_block_info_for_fn (cfun
)->length ()
246 < (size_t) n_basic_blocks_for_fn (cfun
))
247 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
248 n_basic_blocks_for_fn (cfun
));
250 /* To speed up statement iterator walks, we first purge dead labels. */
251 cleanup_dead_labels ();
253 /* Group case nodes to reduce the number of edges.
254 We do this after cleaning up dead labels because otherwise we miss
255 a lot of obvious case merging opportunities. */
256 group_case_labels ();
258 /* Create the edges of the flowgraph. */
259 discriminator_per_locus
.create (13);
261 assign_discriminators ();
262 cleanup_dead_labels ();
263 discriminator_per_locus
.dispose ();
267 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
268 it and set loop->safelen to INT_MAX. We assume that the annotation
269 comes immediately before the condition. */
272 replace_loop_annotate ()
276 gimple_stmt_iterator gsi
;
279 FOR_EACH_LOOP (loop
, 0)
281 gsi
= gsi_last_bb (loop
->header
);
282 stmt
= gsi_stmt (gsi
);
283 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
285 gsi_prev_nondebug (&gsi
);
288 stmt
= gsi_stmt (gsi
);
289 if (gimple_code (stmt
) != GIMPLE_CALL
)
291 if (!gimple_call_internal_p (stmt
)
292 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
294 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
295 != annot_expr_ivdep_kind
)
297 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
298 gimple_call_arg (stmt
, 0));
299 gsi_replace (&gsi
, stmt
, true);
300 loop
->safelen
= INT_MAX
;
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
305 FOR_EACH_BB_FN (bb
, cfun
)
307 gsi
= gsi_last_bb (bb
);
308 stmt
= gsi_stmt (gsi
);
309 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
310 gsi_prev_nondebug (&gsi
);
313 stmt
= gsi_stmt (gsi
);
314 if (gimple_code (stmt
) != GIMPLE_CALL
)
316 if (!gimple_call_internal_p (stmt
)
317 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
319 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
320 != annot_expr_ivdep_kind
)
322 warning_at (gimple_location (stmt
), 0, "ignoring %<GCC ivdep%> "
324 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
325 gimple_call_arg (stmt
, 0));
326 gsi_replace (&gsi
, stmt
, true);
332 execute_build_cfg (void)
334 gimple_seq body
= gimple_body (current_function_decl
);
336 build_gimple_cfg (body
);
337 gimple_set_body (current_function_decl
, NULL
);
338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
340 fprintf (dump_file
, "Scope blocks:\n");
341 dump_scope_blocks (dump_file
, dump_flags
);
344 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
345 replace_loop_annotate ();
351 const pass_data pass_data_build_cfg
=
353 GIMPLE_PASS
, /* type */
355 OPTGROUP_NONE
, /* optinfo_flags */
356 false, /* has_gate */
357 true, /* has_execute */
358 TV_TREE_CFG
, /* tv_id */
359 PROP_gimple_leh
, /* properties_required */
360 ( PROP_cfg
| PROP_loops
), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 TODO_verify_stmts
, /* todo_flags_finish */
366 class pass_build_cfg
: public gimple_opt_pass
369 pass_build_cfg (gcc::context
*ctxt
)
370 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
373 /* opt_pass methods: */
374 unsigned int execute () { return execute_build_cfg (); }
376 }; // class pass_build_cfg
381 make_pass_build_cfg (gcc::context
*ctxt
)
383 return new pass_build_cfg (ctxt
);
387 /* Return true if T is a computed goto. */
390 computed_goto_p (gimple t
)
392 return (gimple_code (t
) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
406 __builtin_unreachable ();
410 assert_unreachable_fallthru_edge_p (edge e
)
412 basic_block pred_bb
= e
->src
;
413 gimple last
= last_stmt (pred_bb
);
414 if (last
&& gimple_code (last
) == GIMPLE_COND
)
416 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
417 if (other_bb
== e
->dest
)
418 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
419 if (EDGE_COUNT (other_bb
->succs
) == 0)
421 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
426 stmt
= gsi_stmt (gsi
);
427 if (is_gimple_debug (stmt
))
429 gsi_next_nondebug (&gsi
);
432 stmt
= gsi_stmt (gsi
);
434 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
441 /* Search the CFG for any computed gotos. If found, factor them to a
442 common computed goto site. Also record the location of that site so
443 that we can un-factor the gotos after we have converted back to
447 factor_computed_gotos (void)
450 tree factored_label_decl
= NULL
;
452 gimple factored_computed_goto_label
= NULL
;
453 gimple factored_computed_goto
= NULL
;
455 /* We know there are one or more computed gotos in this function.
456 Examine the last statement in each basic block to see if the block
457 ends with a computed goto. */
459 FOR_EACH_BB_FN (bb
, cfun
)
461 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
467 last
= gsi_stmt (gsi
);
469 /* Ignore the computed goto we create when we factor the original
471 if (last
== factored_computed_goto
)
474 /* If the last statement is a computed goto, factor it. */
475 if (computed_goto_p (last
))
479 /* The first time we find a computed goto we need to create
480 the factored goto block and the variable each original
481 computed goto will use for their goto destination. */
482 if (!factored_computed_goto
)
484 basic_block new_bb
= create_empty_bb (bb
);
485 gimple_stmt_iterator new_gsi
= gsi_start_bb (new_bb
);
487 /* Create the destination of the factored goto. Each original
488 computed goto will put its desired destination into this
489 variable and jump to the label we create immediately
491 var
= create_tmp_var (ptr_type_node
, "gotovar");
493 /* Build a label for the new block which will contain the
494 factored computed goto. */
495 factored_label_decl
= create_artificial_label (UNKNOWN_LOCATION
);
496 factored_computed_goto_label
497 = gimple_build_label (factored_label_decl
);
498 gsi_insert_after (&new_gsi
, factored_computed_goto_label
,
501 /* Build our new computed goto. */
502 factored_computed_goto
= gimple_build_goto (var
);
503 gsi_insert_after (&new_gsi
, factored_computed_goto
, GSI_NEW_STMT
);
506 /* Copy the original computed goto's destination into VAR. */
507 assignment
= gimple_build_assign (var
, gimple_goto_dest (last
));
508 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
510 /* And re-vector the computed goto to the new destination. */
511 gimple_goto_set_dest (last
, factored_label_decl
);
517 /* Build a flowgraph for the sequence of stmts SEQ. */
520 make_blocks (gimple_seq seq
)
522 gimple_stmt_iterator i
= gsi_start (seq
);
524 bool start_new_block
= true;
525 bool first_stmt_of_seq
= true;
526 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
528 while (!gsi_end_p (i
))
535 /* If the statement starts a new basic block or if we have determined
536 in a previous pass that we need to create a new block for STMT, do
538 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
540 if (!first_stmt_of_seq
)
541 gsi_split_seq_before (&i
, &seq
);
542 bb
= create_basic_block (seq
, NULL
, bb
);
543 start_new_block
= false;
546 /* Now add STMT to BB and create the subgraphs for special statement
548 gimple_set_bb (stmt
, bb
);
550 if (computed_goto_p (stmt
))
551 found_computed_goto
= true;
553 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
555 if (stmt_ends_bb_p (stmt
))
557 /* If the stmt can make abnormal goto use a new temporary
558 for the assignment to the LHS. This makes sure the old value
559 of the LHS is available on the abnormal edge. Otherwise
560 we will end up with overlapping life-ranges for abnormal
562 if (gimple_has_lhs (stmt
)
563 && stmt_can_make_abnormal_goto (stmt
)
564 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
566 tree lhs
= gimple_get_lhs (stmt
);
567 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
568 gimple s
= gimple_build_assign (lhs
, tmp
);
569 gimple_set_location (s
, gimple_location (stmt
));
570 gimple_set_block (s
, gimple_block (stmt
));
571 gimple_set_lhs (stmt
, tmp
);
572 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
573 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
574 DECL_GIMPLE_REG_P (tmp
) = 1;
575 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
577 start_new_block
= true;
581 first_stmt_of_seq
= false;
586 /* Create and return a new empty basic block after bb AFTER. */
589 create_bb (void *h
, void *e
, basic_block after
)
595 /* Create and initialize a new basic block. Since alloc_block uses
596 GC allocation that clears memory to allocate a basic block, we do
597 not have to clear the newly allocated basic block here. */
600 bb
->index
= last_basic_block_for_fn (cfun
);
602 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
604 /* Add the new block to the linked list of blocks. */
605 link_block (bb
, after
);
607 /* Grow the basic block array if needed. */
608 if ((size_t) last_basic_block_for_fn (cfun
)
609 == basic_block_info_for_fn (cfun
)->length ())
612 (last_basic_block_for_fn (cfun
)
613 + (last_basic_block_for_fn (cfun
) + 3) / 4);
614 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
617 /* Add the newly created block to the array. */
618 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
620 n_basic_blocks_for_fn (cfun
)++;
621 last_basic_block_for_fn (cfun
)++;
627 /*---------------------------------------------------------------------------
629 ---------------------------------------------------------------------------*/
631 /* Fold COND_EXPR_COND of each COND_EXPR. */
634 fold_cond_expr_cond (void)
638 FOR_EACH_BB_FN (bb
, cfun
)
640 gimple stmt
= last_stmt (bb
);
642 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
644 location_t loc
= gimple_location (stmt
);
648 fold_defer_overflow_warnings ();
649 cond
= fold_binary_loc (loc
, gimple_cond_code (stmt
), boolean_type_node
,
650 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
653 zerop
= integer_zerop (cond
);
654 onep
= integer_onep (cond
);
657 zerop
= onep
= false;
659 fold_undefer_overflow_warnings (zerop
|| onep
,
661 WARN_STRICT_OVERFLOW_CONDITIONAL
);
663 gimple_cond_make_false (stmt
);
665 gimple_cond_make_true (stmt
);
670 /* Join all the blocks in the flowgraph. */
676 struct omp_region
*cur_region
= NULL
;
678 /* Create an edge from entry to the first block with executable
680 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
681 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
684 /* Traverse the basic block array placing edges. */
685 FOR_EACH_BB_FN (bb
, cfun
)
687 gimple last
= last_stmt (bb
);
692 enum gimple_code code
= gimple_code (last
);
696 make_goto_expr_edges (bb
);
700 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
704 make_cond_expr_edges (bb
);
708 make_gimple_switch_edges (bb
);
712 make_eh_edges (last
);
715 case GIMPLE_EH_DISPATCH
:
716 fallthru
= make_eh_dispatch_edges (last
);
720 /* If this function receives a nonlocal goto, then we need to
721 make edges from this call site to all the nonlocal goto
723 if (stmt_can_make_abnormal_goto (last
))
724 make_abnormal_goto_edges (bb
, true);
726 /* If this statement has reachable exception handlers, then
727 create abnormal edges to them. */
728 make_eh_edges (last
);
730 /* BUILTIN_RETURN is really a return statement. */
731 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
732 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0), fallthru
=
734 /* Some calls are known not to return. */
736 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
740 /* A GIMPLE_ASSIGN may throw internally and thus be considered
742 if (is_ctrl_altering_stmt (last
))
743 make_eh_edges (last
);
748 make_gimple_asm_edges (bb
);
753 fallthru
= make_gimple_omp_edges (bb
, &cur_region
);
756 case GIMPLE_TRANSACTION
:
758 tree abort_label
= gimple_transaction_label (last
);
760 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
766 gcc_assert (!stmt_ends_bb_p (last
));
774 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
779 /* Fold COND_EXPR_COND of each COND_EXPR. */
780 fold_cond_expr_cond ();
783 /* Find the next available discriminator value for LOCUS. The
784 discriminator distinguishes among several basic blocks that
785 share a common locus, allowing for more accurate sample-based
789 next_discriminator_for_locus (location_t locus
)
791 struct locus_discrim_map item
;
792 struct locus_discrim_map
**slot
;
795 item
.discriminator
= 0;
796 slot
= discriminator_per_locus
.find_slot_with_hash (
797 &item
, LOCATION_LINE (locus
), INSERT
);
799 if (*slot
== HTAB_EMPTY_ENTRY
)
801 *slot
= XNEW (struct locus_discrim_map
);
803 (*slot
)->locus
= locus
;
804 (*slot
)->discriminator
= 0;
806 (*slot
)->discriminator
++;
807 return (*slot
)->discriminator
;
810 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
813 same_line_p (location_t locus1
, location_t locus2
)
815 expanded_location from
, to
;
817 if (locus1
== locus2
)
820 from
= expand_location (locus1
);
821 to
= expand_location (locus2
);
823 if (from
.line
!= to
.line
)
825 if (from
.file
== to
.file
)
827 return (from
.file
!= NULL
829 && filename_cmp (from
.file
, to
.file
) == 0);
832 /* Assign discriminators to each basic block. */
835 assign_discriminators (void)
839 FOR_EACH_BB_FN (bb
, cfun
)
843 gimple last
= last_stmt (bb
);
844 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
846 if (locus
== UNKNOWN_LOCATION
)
849 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
851 gimple first
= first_non_label_stmt (e
->dest
);
852 gimple last
= last_stmt (e
->dest
);
853 if ((first
&& same_line_p (locus
, gimple_location (first
)))
854 || (last
&& same_line_p (locus
, gimple_location (last
))))
856 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
857 bb
->discriminator
= next_discriminator_for_locus (locus
);
859 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
865 /* Create the edges for a GIMPLE_COND starting at block BB. */
868 make_cond_expr_edges (basic_block bb
)
870 gimple entry
= last_stmt (bb
);
871 gimple then_stmt
, else_stmt
;
872 basic_block then_bb
, else_bb
;
873 tree then_label
, else_label
;
877 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
879 /* Entry basic blocks for each component. */
880 then_label
= gimple_cond_true_label (entry
);
881 else_label
= gimple_cond_false_label (entry
);
882 then_bb
= label_to_block (then_label
);
883 else_bb
= label_to_block (else_label
);
884 then_stmt
= first_stmt (then_bb
);
885 else_stmt
= first_stmt (else_bb
);
887 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
888 e
->goto_locus
= gimple_location (then_stmt
);
889 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
891 e
->goto_locus
= gimple_location (else_stmt
);
893 /* We do not need the labels anymore. */
894 gimple_cond_set_true_label (entry
, NULL_TREE
);
895 gimple_cond_set_false_label (entry
, NULL_TREE
);
899 /* Called for each element in the hash table (P) as we delete the
900 edge to cases hash table.
902 Clear all the TREE_CHAINs to prevent problems with copying of
903 SWITCH_EXPRs and structure sharing rules, then free the hash table
907 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
908 void *data ATTRIBUTE_UNUSED
)
912 for (t
= (tree
) *value
; t
; t
= next
)
914 next
= CASE_CHAIN (t
);
915 CASE_CHAIN (t
) = NULL
;
922 /* Start recording information mapping edges to case labels. */
925 start_recording_case_labels (void)
927 gcc_assert (edge_to_cases
== NULL
);
928 edge_to_cases
= pointer_map_create ();
929 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
932 /* Return nonzero if we are recording information for case labels. */
935 recording_case_labels_p (void)
937 return (edge_to_cases
!= NULL
);
940 /* Stop recording information mapping edges to case labels and
941 remove any information we have recorded. */
943 end_recording_case_labels (void)
947 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
948 pointer_map_destroy (edge_to_cases
);
949 edge_to_cases
= NULL
;
950 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
952 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
955 gimple stmt
= last_stmt (bb
);
956 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
957 group_case_labels_stmt (stmt
);
960 BITMAP_FREE (touched_switch_bbs
);
963 /* If we are inside a {start,end}_recording_cases block, then return
964 a chain of CASE_LABEL_EXPRs from T which reference E.
966 Otherwise return NULL. */
969 get_cases_for_edge (edge e
, gimple t
)
974 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
975 chains available. Return NULL so the caller can detect this case. */
976 if (!recording_case_labels_p ())
979 slot
= pointer_map_contains (edge_to_cases
, e
);
983 /* If we did not find E in the hash table, then this must be the first
984 time we have been queried for information about E & T. Add all the
985 elements from T to the hash table then perform the query again. */
987 n
= gimple_switch_num_labels (t
);
988 for (i
= 0; i
< n
; i
++)
990 tree elt
= gimple_switch_label (t
, i
);
991 tree lab
= CASE_LABEL (elt
);
992 basic_block label_bb
= label_to_block (lab
);
993 edge this_edge
= find_edge (e
->src
, label_bb
);
995 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
997 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
998 CASE_CHAIN (elt
) = (tree
) *slot
;
1002 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
1005 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1008 make_gimple_switch_edges (basic_block bb
)
1010 gimple entry
= last_stmt (bb
);
1013 n
= gimple_switch_num_labels (entry
);
1015 for (i
= 0; i
< n
; ++i
)
1017 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1018 basic_block label_bb
= label_to_block (lab
);
1019 make_edge (bb
, label_bb
, 0);
1024 /* Return the basic block holding label DEST. */
1027 label_to_block_fn (struct function
*ifun
, tree dest
)
1029 int uid
= LABEL_DECL_UID (dest
);
1031 /* We would die hard when faced by an undefined label. Emit a label to
1032 the very first basic block. This will hopefully make even the dataflow
1033 and undefined variable warnings quite right. */
1034 if (seen_error () && uid
< 0)
1036 gimple_stmt_iterator gsi
=
1037 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1040 stmt
= gimple_build_label (dest
);
1041 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1042 uid
= LABEL_DECL_UID (dest
);
1044 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1046 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1049 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1050 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1053 make_abnormal_goto_edges (basic_block bb
, bool for_call
)
1055 basic_block target_bb
;
1056 gimple_stmt_iterator gsi
;
1058 FOR_EACH_BB_FN (target_bb
, cfun
)
1060 for (gsi
= gsi_start_bb (target_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1062 gimple label_stmt
= gsi_stmt (gsi
);
1065 if (gimple_code (label_stmt
) != GIMPLE_LABEL
)
1068 target
= gimple_label_label (label_stmt
);
1070 /* Make an edge to every label block that has been marked as a
1071 potential target for a computed goto or a non-local goto. */
1072 if ((FORCED_LABEL (target
) && !for_call
)
1073 || (DECL_NONLOCAL (target
) && for_call
))
1075 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1079 if (!gsi_end_p (gsi
)
1080 && is_gimple_debug (gsi_stmt (gsi
)))
1081 gsi_next_nondebug (&gsi
);
1082 if (!gsi_end_p (gsi
))
1084 /* Make an edge to every setjmp-like call. */
1085 gimple call_stmt
= gsi_stmt (gsi
);
1086 if (is_gimple_call (call_stmt
)
1087 && (gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
))
1088 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1093 /* Create edges for a goto statement at block BB. */
1096 make_goto_expr_edges (basic_block bb
)
1098 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1099 gimple goto_t
= gsi_stmt (last
);
1101 /* A simple GOTO creates normal edges. */
1102 if (simple_goto_p (goto_t
))
1104 tree dest
= gimple_goto_dest (goto_t
);
1105 basic_block label_bb
= label_to_block (dest
);
1106 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1107 e
->goto_locus
= gimple_location (goto_t
);
1108 gsi_remove (&last
, true);
1112 /* A computed GOTO creates abnormal edges. */
1113 make_abnormal_goto_edges (bb
, false);
1116 /* Create edges for an asm statement with labels at block BB. */
1119 make_gimple_asm_edges (basic_block bb
)
1121 gimple stmt
= last_stmt (bb
);
1122 int i
, n
= gimple_asm_nlabels (stmt
);
1124 for (i
= 0; i
< n
; ++i
)
1126 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1127 basic_block label_bb
= label_to_block (label
);
1128 make_edge (bb
, label_bb
, 0);
1132 /*---------------------------------------------------------------------------
1134 ---------------------------------------------------------------------------*/
1136 /* Cleanup useless labels in basic blocks. This is something we wish
1137 to do early because it allows us to group case labels before creating
1138 the edges for the CFG, and it speeds up block statement iterators in
1139 all passes later on.
1140 We rerun this pass after CFG is created, to get rid of the labels that
1141 are no longer referenced. After then we do not run it any more, since
1142 (almost) no new labels should be created. */
1144 /* A map from basic block index to the leading label of that block. */
1145 static struct label_record
1150 /* True if the label is referenced from somewhere. */
1154 /* Given LABEL return the first label in the same basic block. */
1157 main_block_label (tree label
)
1159 basic_block bb
= label_to_block (label
);
1160 tree main_label
= label_for_bb
[bb
->index
].label
;
1162 /* label_to_block possibly inserted undefined label into the chain. */
1165 label_for_bb
[bb
->index
].label
= label
;
1169 label_for_bb
[bb
->index
].used
= true;
1173 /* Clean up redundant labels within the exception tree. */
1176 cleanup_dead_labels_eh (void)
1183 if (cfun
->eh
== NULL
)
1186 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1187 if (lp
&& lp
->post_landing_pad
)
1189 lab
= main_block_label (lp
->post_landing_pad
);
1190 if (lab
!= lp
->post_landing_pad
)
1192 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1193 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1197 FOR_ALL_EH_REGION (r
)
1201 case ERT_MUST_NOT_THROW
:
1207 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1211 c
->label
= main_block_label (lab
);
1216 case ERT_ALLOWED_EXCEPTIONS
:
1217 lab
= r
->u
.allowed
.label
;
1219 r
->u
.allowed
.label
= main_block_label (lab
);
1225 /* Cleanup redundant labels. This is a three-step process:
1226 1) Find the leading label for each block.
1227 2) Redirect all references to labels to the leading labels.
1228 3) Cleanup all useless labels. */
1231 cleanup_dead_labels (void)
1234 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1236 /* Find a suitable label for each block. We use the first user-defined
1237 label if there is one, or otherwise just the first label we see. */
1238 FOR_EACH_BB_FN (bb
, cfun
)
1240 gimple_stmt_iterator i
;
1242 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1245 gimple stmt
= gsi_stmt (i
);
1247 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1250 label
= gimple_label_label (stmt
);
1252 /* If we have not yet seen a label for the current block,
1253 remember this one and see if there are more labels. */
1254 if (!label_for_bb
[bb
->index
].label
)
1256 label_for_bb
[bb
->index
].label
= label
;
1260 /* If we did see a label for the current block already, but it
1261 is an artificially created label, replace it if the current
1262 label is a user defined label. */
1263 if (!DECL_ARTIFICIAL (label
)
1264 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1266 label_for_bb
[bb
->index
].label
= label
;
1272 /* Now redirect all jumps/branches to the selected label.
1273 First do so for each block ending in a control statement. */
1274 FOR_EACH_BB_FN (bb
, cfun
)
1276 gimple stmt
= last_stmt (bb
);
1277 tree label
, new_label
;
1282 switch (gimple_code (stmt
))
1285 label
= gimple_cond_true_label (stmt
);
1288 new_label
= main_block_label (label
);
1289 if (new_label
!= label
)
1290 gimple_cond_set_true_label (stmt
, new_label
);
1293 label
= gimple_cond_false_label (stmt
);
1296 new_label
= main_block_label (label
);
1297 if (new_label
!= label
)
1298 gimple_cond_set_false_label (stmt
, new_label
);
1304 size_t i
, n
= gimple_switch_num_labels (stmt
);
1306 /* Replace all destination labels. */
1307 for (i
= 0; i
< n
; ++i
)
1309 tree case_label
= gimple_switch_label (stmt
, i
);
1310 label
= CASE_LABEL (case_label
);
1311 new_label
= main_block_label (label
);
1312 if (new_label
!= label
)
1313 CASE_LABEL (case_label
) = new_label
;
1320 int i
, n
= gimple_asm_nlabels (stmt
);
1322 for (i
= 0; i
< n
; ++i
)
1324 tree cons
= gimple_asm_label_op (stmt
, i
);
1325 tree label
= main_block_label (TREE_VALUE (cons
));
1326 TREE_VALUE (cons
) = label
;
1331 /* We have to handle gotos until they're removed, and we don't
1332 remove them until after we've created the CFG edges. */
1334 if (!computed_goto_p (stmt
))
1336 label
= gimple_goto_dest (stmt
);
1337 new_label
= main_block_label (label
);
1338 if (new_label
!= label
)
1339 gimple_goto_set_dest (stmt
, new_label
);
1343 case GIMPLE_TRANSACTION
:
1345 tree label
= gimple_transaction_label (stmt
);
1348 tree new_label
= main_block_label (label
);
1349 if (new_label
!= label
)
1350 gimple_transaction_set_label (stmt
, new_label
);
1360 /* Do the same for the exception region tree labels. */
1361 cleanup_dead_labels_eh ();
1363 /* Finally, purge dead labels. All user-defined labels and labels that
1364 can be the target of non-local gotos and labels which have their
1365 address taken are preserved. */
1366 FOR_EACH_BB_FN (bb
, cfun
)
1368 gimple_stmt_iterator i
;
1369 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1371 if (!label_for_this_bb
)
1374 /* If the main label of the block is unused, we may still remove it. */
1375 if (!label_for_bb
[bb
->index
].used
)
1376 label_for_this_bb
= NULL
;
1378 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1381 gimple stmt
= gsi_stmt (i
);
1383 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1386 label
= gimple_label_label (stmt
);
1388 if (label
== label_for_this_bb
1389 || !DECL_ARTIFICIAL (label
)
1390 || DECL_NONLOCAL (label
)
1391 || FORCED_LABEL (label
))
1394 gsi_remove (&i
, true);
1398 free (label_for_bb
);
1401 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1402 the ones jumping to the same label.
1403 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1406 group_case_labels_stmt (gimple stmt
)
1408 int old_size
= gimple_switch_num_labels (stmt
);
1409 int i
, j
, new_size
= old_size
;
1410 basic_block default_bb
= NULL
;
1412 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1414 /* Look for possible opportunities to merge cases. */
1416 while (i
< old_size
)
1418 tree base_case
, base_high
;
1419 basic_block base_bb
;
1421 base_case
= gimple_switch_label (stmt
, i
);
1423 gcc_assert (base_case
);
1424 base_bb
= label_to_block (CASE_LABEL (base_case
));
1426 /* Discard cases that have the same destination as the
1428 if (base_bb
== default_bb
)
1430 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1436 base_high
= CASE_HIGH (base_case
)
1437 ? CASE_HIGH (base_case
)
1438 : CASE_LOW (base_case
);
1441 /* Try to merge case labels. Break out when we reach the end
1442 of the label vector or when we cannot merge the next case
1443 label with the current one. */
1444 while (i
< old_size
)
1446 tree merge_case
= gimple_switch_label (stmt
, i
);
1447 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1448 double_int bhp1
= tree_to_double_int (base_high
) + double_int_one
;
1450 /* Merge the cases if they jump to the same place,
1451 and their ranges are consecutive. */
1452 if (merge_bb
== base_bb
1453 && tree_to_double_int (CASE_LOW (merge_case
)) == bhp1
)
1455 base_high
= CASE_HIGH (merge_case
) ?
1456 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1457 CASE_HIGH (base_case
) = base_high
;
1458 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1467 /* Compress the case labels in the label vector, and adjust the
1468 length of the vector. */
1469 for (i
= 0, j
= 0; i
< new_size
; i
++)
1471 while (! gimple_switch_label (stmt
, j
))
1473 gimple_switch_set_label (stmt
, i
,
1474 gimple_switch_label (stmt
, j
++));
1477 gcc_assert (new_size
<= old_size
);
1478 gimple_switch_set_num_labels (stmt
, new_size
);
1481 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1482 and scan the sorted vector of cases. Combine the ones jumping to the
1486 group_case_labels (void)
1490 FOR_EACH_BB_FN (bb
, cfun
)
1492 gimple stmt
= last_stmt (bb
);
1493 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1494 group_case_labels_stmt (stmt
);
1498 /* Checks whether we can merge block B into block A. */
1501 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1504 gimple_stmt_iterator gsi
;
1506 if (!single_succ_p (a
))
1509 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1512 if (single_succ (a
) != b
)
1515 if (!single_pred_p (b
))
1518 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1521 /* If A ends by a statement causing exceptions or something similar, we
1522 cannot merge the blocks. */
1523 stmt
= last_stmt (a
);
1524 if (stmt
&& stmt_ends_bb_p (stmt
))
1527 /* Do not allow a block with only a non-local label to be merged. */
1529 && gimple_code (stmt
) == GIMPLE_LABEL
1530 && DECL_NONLOCAL (gimple_label_label (stmt
)))
1533 /* Examine the labels at the beginning of B. */
1534 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1537 stmt
= gsi_stmt (gsi
);
1538 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1540 lab
= gimple_label_label (stmt
);
1542 /* Do not remove user forced labels or for -O0 any user labels. */
1543 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1547 /* Protect the loop latches. */
1548 if (current_loops
&& b
->loop_father
->latch
== b
)
1551 /* It must be possible to eliminate all phi nodes in B. If ssa form
1552 is not up-to-date and a name-mapping is registered, we cannot eliminate
1553 any phis. Symbols marked for renaming are never a problem though. */
1554 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1556 gimple phi
= gsi_stmt (gsi
);
1557 /* Technically only new names matter. */
1558 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1562 /* When not optimizing, don't merge if we'd lose goto_locus. */
1564 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1566 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1567 gimple_stmt_iterator prev
, next
;
1568 prev
= gsi_last_nondebug_bb (a
);
1569 next
= gsi_after_labels (b
);
1570 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1571 gsi_next_nondebug (&next
);
1572 if ((gsi_end_p (prev
)
1573 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1574 && (gsi_end_p (next
)
1575 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1582 /* Replaces all uses of NAME by VAL. */
1585 replace_uses_by (tree name
, tree val
)
1587 imm_use_iterator imm_iter
;
1592 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1594 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1596 replace_exp (use
, val
);
1598 if (gimple_code (stmt
) == GIMPLE_PHI
)
1600 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1601 if (e
->flags
& EDGE_ABNORMAL
)
1603 /* This can only occur for virtual operands, since
1604 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1605 would prevent replacement. */
1606 gcc_checking_assert (virtual_operand_p (name
));
1607 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1612 if (gimple_code (stmt
) != GIMPLE_PHI
)
1614 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1615 gimple orig_stmt
= stmt
;
1618 /* Mark the block if we changed the last stmt in it. */
1619 if (cfgcleanup_altered_bbs
1620 && stmt_ends_bb_p (stmt
))
1621 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1623 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1624 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1625 only change sth from non-invariant to invariant, and only
1626 when propagating constants. */
1627 if (is_gimple_min_invariant (val
))
1628 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1630 tree op
= gimple_op (stmt
, i
);
1631 /* Operands may be empty here. For example, the labels
1632 of a GIMPLE_COND are nulled out following the creation
1633 of the corresponding CFG edges. */
1634 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1635 recompute_tree_invariant_for_addr_expr (op
);
1638 if (fold_stmt (&gsi
))
1639 stmt
= gsi_stmt (gsi
);
1641 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1642 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1648 gcc_checking_assert (has_zero_uses (name
));
1650 /* Also update the trees stored in loop structures. */
1655 FOR_EACH_LOOP (loop
, 0)
1657 substitute_in_loop_info (loop
, name
, val
);
1662 /* Merge block B into block A. */
1665 gimple_merge_blocks (basic_block a
, basic_block b
)
1667 gimple_stmt_iterator last
, gsi
, psi
;
1670 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1672 /* Remove all single-valued PHI nodes from block B of the form
1673 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1674 gsi
= gsi_last_bb (a
);
1675 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1677 gimple phi
= gsi_stmt (psi
);
1678 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1680 bool may_replace_uses
= (virtual_operand_p (def
)
1681 || may_propagate_copy (def
, use
));
1683 /* In case we maintain loop closed ssa form, do not propagate arguments
1684 of loop exit phi nodes. */
1686 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1687 && !virtual_operand_p (def
)
1688 && TREE_CODE (use
) == SSA_NAME
1689 && a
->loop_father
!= b
->loop_father
)
1690 may_replace_uses
= false;
1692 if (!may_replace_uses
)
1694 gcc_assert (!virtual_operand_p (def
));
1696 /* Note that just emitting the copies is fine -- there is no problem
1697 with ordering of phi nodes. This is because A is the single
1698 predecessor of B, therefore results of the phi nodes cannot
1699 appear as arguments of the phi nodes. */
1700 copy
= gimple_build_assign (def
, use
);
1701 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1702 remove_phi_node (&psi
, false);
1706 /* If we deal with a PHI for virtual operands, we can simply
1707 propagate these without fussing with folding or updating
1709 if (virtual_operand_p (def
))
1711 imm_use_iterator iter
;
1712 use_operand_p use_p
;
1715 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1716 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1717 SET_USE (use_p
, use
);
1719 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1720 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1723 replace_uses_by (def
, use
);
1725 remove_phi_node (&psi
, true);
1729 /* Ensure that B follows A. */
1730 move_block_after (b
, a
);
1732 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1733 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1735 /* Remove labels from B and set gimple_bb to A for other statements. */
1736 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1738 gimple stmt
= gsi_stmt (gsi
);
1739 if (gimple_code (stmt
) == GIMPLE_LABEL
)
1741 tree label
= gimple_label_label (stmt
);
1744 gsi_remove (&gsi
, false);
1746 /* Now that we can thread computed gotos, we might have
1747 a situation where we have a forced label in block B
1748 However, the label at the start of block B might still be
1749 used in other ways (think about the runtime checking for
1750 Fortran assigned gotos). So we can not just delete the
1751 label. Instead we move the label to the start of block A. */
1752 if (FORCED_LABEL (label
))
1754 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1755 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1757 /* Other user labels keep around in a form of a debug stmt. */
1758 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1760 gimple dbg
= gimple_build_debug_bind (label
,
1763 gimple_debug_bind_reset_value (dbg
);
1764 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1767 lp_nr
= EH_LANDING_PAD_NR (label
);
1770 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1771 lp
->post_landing_pad
= NULL
;
1776 gimple_set_bb (stmt
, a
);
1781 /* Merge the sequences. */
1782 last
= gsi_last_bb (a
);
1783 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1784 set_bb_seq (b
, NULL
);
1786 if (cfgcleanup_altered_bbs
)
1787 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1791 /* Return the one of two successors of BB that is not reachable by a
1792 complex edge, if there is one. Else, return BB. We use
1793 this in optimizations that use post-dominators for their heuristics,
1794 to catch the cases in C++ where function calls are involved. */
1797 single_noncomplex_succ (basic_block bb
)
1800 if (EDGE_COUNT (bb
->succs
) != 2)
1803 e0
= EDGE_SUCC (bb
, 0);
1804 e1
= EDGE_SUCC (bb
, 1);
1805 if (e0
->flags
& EDGE_COMPLEX
)
1807 if (e1
->flags
& EDGE_COMPLEX
)
1813 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1816 notice_special_calls (gimple call
)
1818 int flags
= gimple_call_flags (call
);
1820 if (flags
& ECF_MAY_BE_ALLOCA
)
1821 cfun
->calls_alloca
= true;
1822 if (flags
& ECF_RETURNS_TWICE
)
1823 cfun
->calls_setjmp
= true;
1827 /* Clear flags set by notice_special_calls. Used by dead code removal
1828 to update the flags. */
1831 clear_special_calls (void)
1833 cfun
->calls_alloca
= false;
1834 cfun
->calls_setjmp
= false;
1837 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1840 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1842 /* Since this block is no longer reachable, we can just delete all
1843 of its PHI nodes. */
1844 remove_phi_nodes (bb
);
1846 /* Remove edges to BB's successors. */
1847 while (EDGE_COUNT (bb
->succs
) > 0)
1848 remove_edge (EDGE_SUCC (bb
, 0));
1852 /* Remove statements of basic block BB. */
1855 remove_bb (basic_block bb
)
1857 gimple_stmt_iterator i
;
1861 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1862 if (dump_flags
& TDF_DETAILS
)
1864 dump_bb (dump_file
, bb
, 0, dump_flags
);
1865 fprintf (dump_file
, "\n");
1871 struct loop
*loop
= bb
->loop_father
;
1873 /* If a loop gets removed, clean up the information associated
1875 if (loop
->latch
== bb
1876 || loop
->header
== bb
)
1877 free_numbers_of_iterations_estimates_loop (loop
);
1880 /* Remove all the instructions in the block. */
1881 if (bb_seq (bb
) != NULL
)
1883 /* Walk backwards so as to get a chance to substitute all
1884 released DEFs into debug stmts. See
1885 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1887 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
1889 gimple stmt
= gsi_stmt (i
);
1890 if (gimple_code (stmt
) == GIMPLE_LABEL
1891 && (FORCED_LABEL (gimple_label_label (stmt
))
1892 || DECL_NONLOCAL (gimple_label_label (stmt
))))
1895 gimple_stmt_iterator new_gsi
;
1897 /* A non-reachable non-local label may still be referenced.
1898 But it no longer needs to carry the extra semantics of
1900 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
1902 DECL_NONLOCAL (gimple_label_label (stmt
)) = 0;
1903 FORCED_LABEL (gimple_label_label (stmt
)) = 1;
1906 new_bb
= bb
->prev_bb
;
1907 new_gsi
= gsi_start_bb (new_bb
);
1908 gsi_remove (&i
, false);
1909 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
1913 /* Release SSA definitions if we are in SSA. Note that we
1914 may be called when not in SSA. For example,
1915 final_cleanup calls this function via
1916 cleanup_tree_cfg. */
1917 if (gimple_in_ssa_p (cfun
))
1918 release_defs (stmt
);
1920 gsi_remove (&i
, true);
1924 i
= gsi_last_bb (bb
);
1930 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
1931 bb
->il
.gimple
.seq
= NULL
;
1932 bb
->il
.gimple
.phi_nodes
= NULL
;
1936 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1937 predicate VAL, return the edge that will be taken out of the block.
1938 If VAL does not match a unique edge, NULL is returned. */
1941 find_taken_edge (basic_block bb
, tree val
)
1945 stmt
= last_stmt (bb
);
1948 gcc_assert (is_ctrl_stmt (stmt
));
1953 if (!is_gimple_min_invariant (val
))
1956 if (gimple_code (stmt
) == GIMPLE_COND
)
1957 return find_taken_edge_cond_expr (bb
, val
);
1959 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
1960 return find_taken_edge_switch_expr (bb
, val
);
1962 if (computed_goto_p (stmt
))
1964 /* Only optimize if the argument is a label, if the argument is
1965 not a label then we can not construct a proper CFG.
1967 It may be the case that we only need to allow the LABEL_REF to
1968 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1969 appear inside a LABEL_EXPR just to be safe. */
1970 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
1971 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
1972 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
1979 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1980 statement, determine which of the outgoing edges will be taken out of the
1981 block. Return NULL if either edge may be taken. */
1984 find_taken_edge_computed_goto (basic_block bb
, tree val
)
1989 dest
= label_to_block (val
);
1992 e
= find_edge (bb
, dest
);
1993 gcc_assert (e
!= NULL
);
1999 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2000 statement, determine which of the two edges will be taken out of the
2001 block. Return NULL if either edge may be taken. */
2004 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2006 edge true_edge
, false_edge
;
2008 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2010 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2011 return (integer_zerop (val
) ? false_edge
: true_edge
);
2014 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2015 statement, determine which edge will be taken out of the block. Return
2016 NULL if any edge may be taken. */
2019 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2021 basic_block dest_bb
;
2026 switch_stmt
= last_stmt (bb
);
2027 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2028 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2030 e
= find_edge (bb
, dest_bb
);
2036 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2037 We can make optimal use here of the fact that the case labels are
2038 sorted: We can do a binary search for a case matching VAL. */
2041 find_case_label_for_value (gimple switch_stmt
, tree val
)
2043 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2044 tree default_case
= gimple_switch_default_label (switch_stmt
);
2046 for (low
= 0, high
= n
; high
- low
> 1; )
2048 size_t i
= (high
+ low
) / 2;
2049 tree t
= gimple_switch_label (switch_stmt
, i
);
2052 /* Cache the result of comparing CASE_LOW and val. */
2053 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2060 if (CASE_HIGH (t
) == NULL
)
2062 /* A singe-valued case label. */
2068 /* A case range. We can only handle integer ranges. */
2069 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2074 return default_case
;
2078 /* Dump a basic block on stderr. */
2081 gimple_debug_bb (basic_block bb
)
2083 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2087 /* Dump basic block with index N on stderr. */
2090 gimple_debug_bb_n (int n
)
2092 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2093 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2097 /* Dump the CFG on stderr.
2099 FLAGS are the same used by the tree dumping functions
2100 (see TDF_* in dumpfile.h). */
2103 gimple_debug_cfg (int flags
)
2105 gimple_dump_cfg (stderr
, flags
);
2109 /* Dump the program showing basic block boundaries on the given FILE.
2111 FLAGS are the same used by the tree dumping functions (see TDF_* in
2115 gimple_dump_cfg (FILE *file
, int flags
)
2117 if (flags
& TDF_DETAILS
)
2119 dump_function_header (file
, current_function_decl
, flags
);
2120 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2121 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2122 last_basic_block_for_fn (cfun
));
2124 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2125 fprintf (file
, "\n");
2128 if (flags
& TDF_STATS
)
2129 dump_cfg_stats (file
);
2131 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2135 /* Dump CFG statistics on FILE. */
2138 dump_cfg_stats (FILE *file
)
2140 static long max_num_merged_labels
= 0;
2141 unsigned long size
, total
= 0;
2144 const char * const fmt_str
= "%-30s%-13s%12s\n";
2145 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2146 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2147 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2148 const char *funcname
= current_function_name ();
2150 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2152 fprintf (file
, "---------------------------------------------------------\n");
2153 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2154 fprintf (file
, fmt_str
, "", " instances ", "used ");
2155 fprintf (file
, "---------------------------------------------------------\n");
2157 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2159 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2160 SCALE (size
), LABEL (size
));
2163 FOR_EACH_BB_FN (bb
, cfun
)
2164 num_edges
+= EDGE_COUNT (bb
->succs
);
2165 size
= num_edges
* sizeof (struct edge_def
);
2167 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2169 fprintf (file
, "---------------------------------------------------------\n");
2170 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2172 fprintf (file
, "---------------------------------------------------------\n");
2173 fprintf (file
, "\n");
2175 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2176 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2178 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2179 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2181 fprintf (file
, "\n");
2185 /* Dump CFG statistics on stderr. Keep extern so that it's always
2186 linked in the final executable. */
2189 debug_cfg_stats (void)
2191 dump_cfg_stats (stderr
);
2194 /*---------------------------------------------------------------------------
2195 Miscellaneous helpers
2196 ---------------------------------------------------------------------------*/
2198 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2199 flow. Transfers of control flow associated with EH are excluded. */
2202 call_can_make_abnormal_goto (gimple t
)
2204 /* If the function has no non-local labels, then a call cannot make an
2205 abnormal transfer of control. */
2206 if (!cfun
->has_nonlocal_label
2207 && !cfun
->calls_setjmp
)
2210 /* Likewise if the call has no side effects. */
2211 if (!gimple_has_side_effects (t
))
2214 /* Likewise if the called function is leaf. */
2215 if (gimple_call_flags (t
) & ECF_LEAF
)
2222 /* Return true if T can make an abnormal transfer of control flow.
2223 Transfers of control flow associated with EH are excluded. */
2226 stmt_can_make_abnormal_goto (gimple t
)
2228 if (computed_goto_p (t
))
2230 if (is_gimple_call (t
))
2231 return call_can_make_abnormal_goto (t
);
2236 /* Return true if T represents a stmt that always transfers control. */
2239 is_ctrl_stmt (gimple t
)
2241 switch (gimple_code (t
))
2255 /* Return true if T is a statement that may alter the flow of control
2256 (e.g., a call to a non-returning function). */
2259 is_ctrl_altering_stmt (gimple t
)
2263 switch (gimple_code (t
))
2267 int flags
= gimple_call_flags (t
);
2269 /* A call alters control flow if it can make an abnormal goto. */
2270 if (call_can_make_abnormal_goto (t
))
2273 /* A call also alters control flow if it does not return. */
2274 if (flags
& ECF_NORETURN
)
2277 /* TM ending statements have backedges out of the transaction.
2278 Return true so we split the basic block containing them.
2279 Note that the TM_BUILTIN test is merely an optimization. */
2280 if ((flags
& ECF_TM_BUILTIN
)
2281 && is_tm_ending_fndecl (gimple_call_fndecl (t
)))
2284 /* BUILT_IN_RETURN call is same as return statement. */
2285 if (gimple_call_builtin_p (t
, BUILT_IN_RETURN
))
2290 case GIMPLE_EH_DISPATCH
:
2291 /* EH_DISPATCH branches to the individual catch handlers at
2292 this level of a try or allowed-exceptions region. It can
2293 fallthru to the next statement as well. */
2297 if (gimple_asm_nlabels (t
) > 0)
2302 /* OpenMP directives alter control flow. */
2305 case GIMPLE_TRANSACTION
:
2306 /* A transaction start alters control flow. */
2313 /* If a statement can throw, it alters control flow. */
2314 return stmt_can_throw_internal (t
);
2318 /* Return true if T is a simple local goto. */
2321 simple_goto_p (gimple t
)
2323 return (gimple_code (t
) == GIMPLE_GOTO
2324 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2328 /* Return true if STMT should start a new basic block. PREV_STMT is
2329 the statement preceding STMT. It is used when STMT is a label or a
2330 case label. Labels should only start a new basic block if their
2331 previous statement wasn't a label. Otherwise, sequence of labels
2332 would generate unnecessary basic blocks that only contain a single
2336 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2341 /* Labels start a new basic block only if the preceding statement
2342 wasn't a label of the same type. This prevents the creation of
2343 consecutive blocks that have nothing but a single label. */
2344 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2346 /* Nonlocal and computed GOTO targets always start a new block. */
2347 if (DECL_NONLOCAL (gimple_label_label (stmt
))
2348 || FORCED_LABEL (gimple_label_label (stmt
)))
2351 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2353 if (DECL_NONLOCAL (gimple_label_label (prev_stmt
)))
2356 cfg_stats
.num_merged_labels
++;
2362 else if (gimple_code (stmt
) == GIMPLE_CALL
2363 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2364 /* setjmp acts similar to a nonlocal GOTO target and thus should
2365 start a new block. */
2372 /* Return true if T should end a basic block. */
2375 stmt_ends_bb_p (gimple t
)
2377 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2380 /* Remove block annotations and other data structures. */
2383 delete_tree_cfg_annotations (void)
2385 vec_free (label_to_block_map_for_fn (cfun
));
2389 /* Return the first statement in basic block BB. */
2392 first_stmt (basic_block bb
)
2394 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2397 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2405 /* Return the first non-label statement in basic block BB. */
2408 first_non_label_stmt (basic_block bb
)
2410 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2411 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2413 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2416 /* Return the last statement in basic block BB. */
2419 last_stmt (basic_block bb
)
2421 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2424 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2432 /* Return the last statement of an otherwise empty block. Return NULL
2433 if the block is totally empty, or if it contains more than one
2437 last_and_only_stmt (basic_block bb
)
2439 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2445 last
= gsi_stmt (i
);
2446 gsi_prev_nondebug (&i
);
2450 /* Empty statements should no longer appear in the instruction stream.
2451 Everything that might have appeared before should be deleted by
2452 remove_useless_stmts, and the optimizers should just gsi_remove
2453 instead of smashing with build_empty_stmt.
2455 Thus the only thing that should appear here in a block containing
2456 one executable statement is a label. */
2457 prev
= gsi_stmt (i
);
2458 if (gimple_code (prev
) == GIMPLE_LABEL
)
2464 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2467 reinstall_phi_args (edge new_edge
, edge old_edge
)
2469 edge_var_map_vector
*v
;
2472 gimple_stmt_iterator phis
;
2474 v
= redirect_edge_var_map_vector (old_edge
);
2478 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2479 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2480 i
++, gsi_next (&phis
))
2482 gimple phi
= gsi_stmt (phis
);
2483 tree result
= redirect_edge_var_map_result (vm
);
2484 tree arg
= redirect_edge_var_map_def (vm
);
2486 gcc_assert (result
== gimple_phi_result (phi
));
2488 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2491 redirect_edge_var_map_clear (old_edge
);
2494 /* Returns the basic block after which the new basic block created
2495 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2496 near its "logical" location. This is of most help to humans looking
2497 at debugging dumps. */
2500 split_edge_bb_loc (edge edge_in
)
2502 basic_block dest
= edge_in
->dest
;
2503 basic_block dest_prev
= dest
->prev_bb
;
2507 edge e
= find_edge (dest_prev
, dest
);
2508 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2509 return edge_in
->src
;
2514 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2515 Abort on abnormal edges. */
2518 gimple_split_edge (edge edge_in
)
2520 basic_block new_bb
, after_bb
, dest
;
2523 /* Abnormal edges cannot be split. */
2524 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2526 dest
= edge_in
->dest
;
2528 after_bb
= split_edge_bb_loc (edge_in
);
2530 new_bb
= create_empty_bb (after_bb
);
2531 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2532 new_bb
->count
= edge_in
->count
;
2533 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2534 new_edge
->probability
= REG_BR_PROB_BASE
;
2535 new_edge
->count
= edge_in
->count
;
2537 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2538 gcc_assert (e
== edge_in
);
2539 reinstall_phi_args (new_edge
, e
);
2545 /* Verify properties of the address expression T with base object BASE. */
2548 verify_address (tree t
, tree base
)
2551 bool old_side_effects
;
2553 bool new_side_effects
;
2555 old_constant
= TREE_CONSTANT (t
);
2556 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2558 recompute_tree_invariant_for_addr_expr (t
);
2559 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2560 new_constant
= TREE_CONSTANT (t
);
2562 if (old_constant
!= new_constant
)
2564 error ("constant not recomputed when ADDR_EXPR changed");
2567 if (old_side_effects
!= new_side_effects
)
2569 error ("side effects not recomputed when ADDR_EXPR changed");
2573 if (!(TREE_CODE (base
) == VAR_DECL
2574 || TREE_CODE (base
) == PARM_DECL
2575 || TREE_CODE (base
) == RESULT_DECL
))
2578 if (DECL_GIMPLE_REG_P (base
))
2580 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2587 /* Callback for walk_tree, check that all elements with address taken are
2588 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2589 inside a PHI node. */
2592 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2599 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2600 #define CHECK_OP(N, MSG) \
2601 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2602 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2604 switch (TREE_CODE (t
))
2607 if (SSA_NAME_IN_FREE_LIST (t
))
2609 error ("SSA name in freelist but still referenced");
2615 error ("INDIRECT_REF in gimple IL");
2619 x
= TREE_OPERAND (t
, 0);
2620 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2621 || !is_gimple_mem_ref_addr (x
))
2623 error ("invalid first operand of MEM_REF");
2626 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2627 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2629 error ("invalid offset operand of MEM_REF");
2630 return TREE_OPERAND (t
, 1);
2632 if (TREE_CODE (x
) == ADDR_EXPR
2633 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2639 x
= fold (ASSERT_EXPR_COND (t
));
2640 if (x
== boolean_false_node
)
2642 error ("ASSERT_EXPR with an always-false condition");
2648 error ("MODIFY_EXPR not expected while having tuples");
2655 gcc_assert (is_gimple_address (t
));
2657 /* Skip any references (they will be checked when we recurse down the
2658 tree) and ensure that any variable used as a prefix is marked
2660 for (x
= TREE_OPERAND (t
, 0);
2661 handled_component_p (x
);
2662 x
= TREE_OPERAND (x
, 0))
2665 if ((tem
= verify_address (t
, x
)))
2668 if (!(TREE_CODE (x
) == VAR_DECL
2669 || TREE_CODE (x
) == PARM_DECL
2670 || TREE_CODE (x
) == RESULT_DECL
))
2673 if (!TREE_ADDRESSABLE (x
))
2675 error ("address taken, but ADDRESSABLE bit not set");
2683 x
= COND_EXPR_COND (t
);
2684 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2686 error ("non-integral used in condition");
2689 if (!is_gimple_condexpr (x
))
2691 error ("invalid conditional operand");
2696 case NON_LVALUE_EXPR
:
2697 case TRUTH_NOT_EXPR
:
2701 case FIX_TRUNC_EXPR
:
2706 CHECK_OP (0, "invalid operand to unary operator");
2712 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2714 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2718 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2720 tree t0
= TREE_OPERAND (t
, 0);
2721 tree t1
= TREE_OPERAND (t
, 1);
2722 tree t2
= TREE_OPERAND (t
, 2);
2723 if (!tree_fits_uhwi_p (t1
)
2724 || !tree_fits_uhwi_p (t2
))
2726 error ("invalid position or size operand to BIT_FIELD_REF");
2729 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2730 && (TYPE_PRECISION (TREE_TYPE (t
))
2731 != tree_to_uhwi (t1
)))
2733 error ("integral result type precision does not match "
2734 "field size of BIT_FIELD_REF");
2737 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2738 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2739 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2740 != tree_to_uhwi (t1
)))
2742 error ("mode precision of non-integral result does not "
2743 "match field size of BIT_FIELD_REF");
2746 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2747 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2748 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2750 error ("position plus size exceeds size of referenced object in "
2755 t
= TREE_OPERAND (t
, 0);
2760 case ARRAY_RANGE_REF
:
2761 case VIEW_CONVERT_EXPR
:
2762 /* We have a nest of references. Verify that each of the operands
2763 that determine where to reference is either a constant or a variable,
2764 verify that the base is valid, and then show we've already checked
2766 while (handled_component_p (t
))
2768 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2769 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2770 else if (TREE_CODE (t
) == ARRAY_REF
2771 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2773 CHECK_OP (1, "invalid array index");
2774 if (TREE_OPERAND (t
, 2))
2775 CHECK_OP (2, "invalid array lower bound");
2776 if (TREE_OPERAND (t
, 3))
2777 CHECK_OP (3, "invalid array stride");
2779 else if (TREE_CODE (t
) == BIT_FIELD_REF
2780 || TREE_CODE (t
) == REALPART_EXPR
2781 || TREE_CODE (t
) == IMAGPART_EXPR
)
2783 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2788 t
= TREE_OPERAND (t
, 0);
2791 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2793 error ("invalid reference prefix");
2800 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2801 POINTER_PLUS_EXPR. */
2802 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2804 error ("invalid operand to plus/minus, type is a pointer");
2807 CHECK_OP (0, "invalid operand to binary operator");
2808 CHECK_OP (1, "invalid operand to binary operator");
2811 case POINTER_PLUS_EXPR
:
2812 /* Check to make sure the first operand is a pointer or reference type. */
2813 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2815 error ("invalid operand to pointer plus, first operand is not a pointer");
2818 /* Check to make sure the second operand is a ptrofftype. */
2819 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2821 error ("invalid operand to pointer plus, second operand is not an "
2822 "integer type of appropriate width");
2832 case UNORDERED_EXPR
:
2841 case TRUNC_DIV_EXPR
:
2843 case FLOOR_DIV_EXPR
:
2844 case ROUND_DIV_EXPR
:
2845 case TRUNC_MOD_EXPR
:
2847 case FLOOR_MOD_EXPR
:
2848 case ROUND_MOD_EXPR
:
2850 case EXACT_DIV_EXPR
:
2860 CHECK_OP (0, "invalid operand to binary operator");
2861 CHECK_OP (1, "invalid operand to binary operator");
2865 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2869 case CASE_LABEL_EXPR
:
2872 error ("invalid CASE_CHAIN");
2886 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2887 Returns true if there is an error, otherwise false. */
2890 verify_types_in_gimple_min_lval (tree expr
)
2894 if (is_gimple_id (expr
))
2897 if (TREE_CODE (expr
) != TARGET_MEM_REF
2898 && TREE_CODE (expr
) != MEM_REF
)
2900 error ("invalid expression for min lvalue");
2904 /* TARGET_MEM_REFs are strange beasts. */
2905 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
2908 op
= TREE_OPERAND (expr
, 0);
2909 if (!is_gimple_val (op
))
2911 error ("invalid operand in indirect reference");
2912 debug_generic_stmt (op
);
2915 /* Memory references now generally can involve a value conversion. */
2920 /* Verify if EXPR is a valid GIMPLE reference expression. If
2921 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2922 if there is an error, otherwise false. */
2925 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
2927 while (handled_component_p (expr
))
2929 tree op
= TREE_OPERAND (expr
, 0);
2931 if (TREE_CODE (expr
) == ARRAY_REF
2932 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
2934 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
2935 || (TREE_OPERAND (expr
, 2)
2936 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
2937 || (TREE_OPERAND (expr
, 3)
2938 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
2940 error ("invalid operands to array reference");
2941 debug_generic_stmt (expr
);
2946 /* Verify if the reference array element types are compatible. */
2947 if (TREE_CODE (expr
) == ARRAY_REF
2948 && !useless_type_conversion_p (TREE_TYPE (expr
),
2949 TREE_TYPE (TREE_TYPE (op
))))
2951 error ("type mismatch in array reference");
2952 debug_generic_stmt (TREE_TYPE (expr
));
2953 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2956 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
2957 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
2958 TREE_TYPE (TREE_TYPE (op
))))
2960 error ("type mismatch in array range reference");
2961 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
2962 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2966 if ((TREE_CODE (expr
) == REALPART_EXPR
2967 || TREE_CODE (expr
) == IMAGPART_EXPR
)
2968 && !useless_type_conversion_p (TREE_TYPE (expr
),
2969 TREE_TYPE (TREE_TYPE (op
))))
2971 error ("type mismatch in real/imagpart reference");
2972 debug_generic_stmt (TREE_TYPE (expr
));
2973 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2977 if (TREE_CODE (expr
) == COMPONENT_REF
2978 && !useless_type_conversion_p (TREE_TYPE (expr
),
2979 TREE_TYPE (TREE_OPERAND (expr
, 1))))
2981 error ("type mismatch in component reference");
2982 debug_generic_stmt (TREE_TYPE (expr
));
2983 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
2987 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2989 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2990 that their operand is not an SSA name or an invariant when
2991 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2992 bug). Otherwise there is nothing to verify, gross mismatches at
2993 most invoke undefined behavior. */
2995 && (TREE_CODE (op
) == SSA_NAME
2996 || is_gimple_min_invariant (op
)))
2998 error ("conversion of an SSA_NAME on the left hand side");
2999 debug_generic_stmt (expr
);
3002 else if (TREE_CODE (op
) == SSA_NAME
3003 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3005 error ("conversion of register to a different size");
3006 debug_generic_stmt (expr
);
3009 else if (!handled_component_p (op
))
3016 if (TREE_CODE (expr
) == MEM_REF
)
3018 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3020 error ("invalid address operand in MEM_REF");
3021 debug_generic_stmt (expr
);
3024 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3025 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3027 error ("invalid offset operand in MEM_REF");
3028 debug_generic_stmt (expr
);
3032 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3034 if (!TMR_BASE (expr
)
3035 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3037 error ("invalid address operand in TARGET_MEM_REF");
3040 if (!TMR_OFFSET (expr
)
3041 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3042 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3044 error ("invalid offset operand in TARGET_MEM_REF");
3045 debug_generic_stmt (expr
);
3050 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3051 && verify_types_in_gimple_min_lval (expr
));
3054 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3055 list of pointer-to types that is trivially convertible to DEST. */
3058 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3062 if (!TYPE_POINTER_TO (src_obj
))
3065 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3066 if (useless_type_conversion_p (dest
, src
))
3072 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3073 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3076 valid_fixed_convert_types_p (tree type1
, tree type2
)
3078 return (FIXED_POINT_TYPE_P (type1
)
3079 && (INTEGRAL_TYPE_P (type2
)
3080 || SCALAR_FLOAT_TYPE_P (type2
)
3081 || FIXED_POINT_TYPE_P (type2
)));
3084 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3085 is a problem, otherwise false. */
3088 verify_gimple_call (gimple stmt
)
3090 tree fn
= gimple_call_fn (stmt
);
3091 tree fntype
, fndecl
;
3094 if (gimple_call_internal_p (stmt
))
3098 error ("gimple call has two targets");
3099 debug_generic_stmt (fn
);
3107 error ("gimple call has no target");
3112 if (fn
&& !is_gimple_call_addr (fn
))
3114 error ("invalid function in gimple call");
3115 debug_generic_stmt (fn
);
3120 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3121 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3122 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3124 error ("non-function in gimple call");
3128 fndecl
= gimple_call_fndecl (stmt
);
3130 && TREE_CODE (fndecl
) == FUNCTION_DECL
3131 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3132 && !DECL_PURE_P (fndecl
)
3133 && !TREE_READONLY (fndecl
))
3135 error ("invalid pure const state for function");
3139 if (gimple_call_lhs (stmt
)
3140 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3141 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3143 error ("invalid LHS in gimple call");
3147 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3149 error ("LHS in noreturn call");
3153 fntype
= gimple_call_fntype (stmt
);
3155 && gimple_call_lhs (stmt
)
3156 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3158 /* ??? At least C++ misses conversions at assignments from
3159 void * call results.
3160 ??? Java is completely off. Especially with functions
3161 returning java.lang.Object.
3162 For now simply allow arbitrary pointer type conversions. */
3163 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3164 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3166 error ("invalid conversion in gimple call");
3167 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3168 debug_generic_stmt (TREE_TYPE (fntype
));
3172 if (gimple_call_chain (stmt
)
3173 && !is_gimple_val (gimple_call_chain (stmt
)))
3175 error ("invalid static chain in gimple call");
3176 debug_generic_stmt (gimple_call_chain (stmt
));
3180 /* If there is a static chain argument, this should not be an indirect
3181 call, and the decl should have DECL_STATIC_CHAIN set. */
3182 if (gimple_call_chain (stmt
))
3184 if (!gimple_call_fndecl (stmt
))
3186 error ("static chain in indirect gimple call");
3189 fn
= TREE_OPERAND (fn
, 0);
3191 if (!DECL_STATIC_CHAIN (fn
))
3193 error ("static chain with function that doesn%'t use one");
3198 /* ??? The C frontend passes unpromoted arguments in case it
3199 didn't see a function declaration before the call. So for now
3200 leave the call arguments mostly unverified. Once we gimplify
3201 unit-at-a-time we have a chance to fix this. */
3203 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3205 tree arg
= gimple_call_arg (stmt
, i
);
3206 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3207 && !is_gimple_val (arg
))
3208 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3209 && !is_gimple_lvalue (arg
)))
3211 error ("invalid argument to gimple call");
3212 debug_generic_expr (arg
);
3220 /* Verifies the gimple comparison with the result type TYPE and
3221 the operands OP0 and OP1. */
3224 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3226 tree op0_type
= TREE_TYPE (op0
);
3227 tree op1_type
= TREE_TYPE (op1
);
3229 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3231 error ("invalid operands in gimple comparison");
3235 /* For comparisons we do not have the operations type as the
3236 effective type the comparison is carried out in. Instead
3237 we require that either the first operand is trivially
3238 convertible into the second, or the other way around.
3239 Because we special-case pointers to void we allow
3240 comparisons of pointers with the same mode as well. */
3241 if (!useless_type_conversion_p (op0_type
, op1_type
)
3242 && !useless_type_conversion_p (op1_type
, op0_type
)
3243 && (!POINTER_TYPE_P (op0_type
)
3244 || !POINTER_TYPE_P (op1_type
)
3245 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3247 error ("mismatching comparison operand types");
3248 debug_generic_expr (op0_type
);
3249 debug_generic_expr (op1_type
);
3253 /* The resulting type of a comparison may be an effective boolean type. */
3254 if (INTEGRAL_TYPE_P (type
)
3255 && (TREE_CODE (type
) == BOOLEAN_TYPE
3256 || TYPE_PRECISION (type
) == 1))
3258 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3259 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3261 error ("vector comparison returning a boolean");
3262 debug_generic_expr (op0_type
);
3263 debug_generic_expr (op1_type
);
3267 /* Or an integer vector type with the same size and element count
3268 as the comparison operand types. */
3269 else if (TREE_CODE (type
) == VECTOR_TYPE
3270 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3272 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3273 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3275 error ("non-vector operands in vector comparison");
3276 debug_generic_expr (op0_type
);
3277 debug_generic_expr (op1_type
);
3281 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3282 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3283 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3284 /* The result of a vector comparison is of signed
3286 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3288 error ("invalid vector comparison resulting type");
3289 debug_generic_expr (type
);
3295 error ("bogus comparison result type");
3296 debug_generic_expr (type
);
3303 /* Verify a gimple assignment statement STMT with an unary rhs.
3304 Returns true if anything is wrong. */
3307 verify_gimple_assign_unary (gimple stmt
)
3309 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3310 tree lhs
= gimple_assign_lhs (stmt
);
3311 tree lhs_type
= TREE_TYPE (lhs
);
3312 tree rhs1
= gimple_assign_rhs1 (stmt
);
3313 tree rhs1_type
= TREE_TYPE (rhs1
);
3315 if (!is_gimple_reg (lhs
))
3317 error ("non-register as LHS of unary operation");
3321 if (!is_gimple_val (rhs1
))
3323 error ("invalid operand in unary operation");
3327 /* First handle conversions. */
3332 /* Allow conversions from pointer type to integral type only if
3333 there is no sign or zero extension involved.
3334 For targets were the precision of ptrofftype doesn't match that
3335 of pointers we need to allow arbitrary conversions to ptrofftype. */
3336 if ((POINTER_TYPE_P (lhs_type
)
3337 && INTEGRAL_TYPE_P (rhs1_type
))
3338 || (POINTER_TYPE_P (rhs1_type
)
3339 && INTEGRAL_TYPE_P (lhs_type
)
3340 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3341 || ptrofftype_p (sizetype
))))
3344 /* Allow conversion from integral to offset type and vice versa. */
3345 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3346 && INTEGRAL_TYPE_P (rhs1_type
))
3347 || (INTEGRAL_TYPE_P (lhs_type
)
3348 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3351 /* Otherwise assert we are converting between types of the
3353 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3355 error ("invalid types in nop conversion");
3356 debug_generic_expr (lhs_type
);
3357 debug_generic_expr (rhs1_type
);
3364 case ADDR_SPACE_CONVERT_EXPR
:
3366 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3367 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3368 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3370 error ("invalid types in address space conversion");
3371 debug_generic_expr (lhs_type
);
3372 debug_generic_expr (rhs1_type
);
3379 case FIXED_CONVERT_EXPR
:
3381 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3382 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3384 error ("invalid types in fixed-point conversion");
3385 debug_generic_expr (lhs_type
);
3386 debug_generic_expr (rhs1_type
);
3395 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3396 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3397 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3399 error ("invalid types in conversion to floating point");
3400 debug_generic_expr (lhs_type
);
3401 debug_generic_expr (rhs1_type
);
3408 case FIX_TRUNC_EXPR
:
3410 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3411 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3412 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3414 error ("invalid types in conversion to integer");
3415 debug_generic_expr (lhs_type
);
3416 debug_generic_expr (rhs1_type
);
3423 case VEC_UNPACK_HI_EXPR
:
3424 case VEC_UNPACK_LO_EXPR
:
3425 case REDUC_MAX_EXPR
:
3426 case REDUC_MIN_EXPR
:
3427 case REDUC_PLUS_EXPR
:
3428 case VEC_UNPACK_FLOAT_HI_EXPR
:
3429 case VEC_UNPACK_FLOAT_LO_EXPR
:
3437 case NON_LVALUE_EXPR
:
3445 /* For the remaining codes assert there is no conversion involved. */
3446 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3448 error ("non-trivial conversion in unary operation");
3449 debug_generic_expr (lhs_type
);
3450 debug_generic_expr (rhs1_type
);
3457 /* Verify a gimple assignment statement STMT with a binary rhs.
3458 Returns true if anything is wrong. */
3461 verify_gimple_assign_binary (gimple stmt
)
3463 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3464 tree lhs
= gimple_assign_lhs (stmt
);
3465 tree lhs_type
= TREE_TYPE (lhs
);
3466 tree rhs1
= gimple_assign_rhs1 (stmt
);
3467 tree rhs1_type
= TREE_TYPE (rhs1
);
3468 tree rhs2
= gimple_assign_rhs2 (stmt
);
3469 tree rhs2_type
= TREE_TYPE (rhs2
);
3471 if (!is_gimple_reg (lhs
))
3473 error ("non-register as LHS of binary operation");
3477 if (!is_gimple_val (rhs1
)
3478 || !is_gimple_val (rhs2
))
3480 error ("invalid operands in binary operation");
3484 /* First handle operations that involve different types. */
3489 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3490 || !(INTEGRAL_TYPE_P (rhs1_type
)
3491 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3492 || !(INTEGRAL_TYPE_P (rhs2_type
)
3493 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3495 error ("type mismatch in complex expression");
3496 debug_generic_expr (lhs_type
);
3497 debug_generic_expr (rhs1_type
);
3498 debug_generic_expr (rhs2_type
);
3510 /* Shifts and rotates are ok on integral types, fixed point
3511 types and integer vector types. */
3512 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3513 && !FIXED_POINT_TYPE_P (rhs1_type
)
3514 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3515 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3516 || (!INTEGRAL_TYPE_P (rhs2_type
)
3517 /* Vector shifts of vectors are also ok. */
3518 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3519 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3520 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3521 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3522 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3524 error ("type mismatch in shift expression");
3525 debug_generic_expr (lhs_type
);
3526 debug_generic_expr (rhs1_type
);
3527 debug_generic_expr (rhs2_type
);
3534 case VEC_LSHIFT_EXPR
:
3535 case VEC_RSHIFT_EXPR
:
3537 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3538 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3539 || POINTER_TYPE_P (TREE_TYPE (rhs1_type
))
3540 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type
))
3541 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3542 || (!INTEGRAL_TYPE_P (rhs2_type
)
3543 && (TREE_CODE (rhs2_type
) != VECTOR_TYPE
3544 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3545 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3547 error ("type mismatch in vector shift expression");
3548 debug_generic_expr (lhs_type
);
3549 debug_generic_expr (rhs1_type
);
3550 debug_generic_expr (rhs2_type
);
3553 /* For shifting a vector of non-integral components we
3554 only allow shifting by a constant multiple of the element size. */
3555 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3556 && (TREE_CODE (rhs2
) != INTEGER_CST
3557 || !div_if_zero_remainder (EXACT_DIV_EXPR
, rhs2
,
3558 TYPE_SIZE (TREE_TYPE (rhs1_type
)))))
3560 error ("non-element sized vector shift of floating point vector");
3567 case WIDEN_LSHIFT_EXPR
:
3569 if (!INTEGRAL_TYPE_P (lhs_type
)
3570 || !INTEGRAL_TYPE_P (rhs1_type
)
3571 || TREE_CODE (rhs2
) != INTEGER_CST
3572 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3574 error ("type mismatch in widening vector shift expression");
3575 debug_generic_expr (lhs_type
);
3576 debug_generic_expr (rhs1_type
);
3577 debug_generic_expr (rhs2_type
);
3584 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3585 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3587 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3588 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3589 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3590 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3591 || TREE_CODE (rhs2
) != INTEGER_CST
3592 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3593 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3595 error ("type mismatch in widening vector shift expression");
3596 debug_generic_expr (lhs_type
);
3597 debug_generic_expr (rhs1_type
);
3598 debug_generic_expr (rhs2_type
);
3608 tree lhs_etype
= lhs_type
;
3609 tree rhs1_etype
= rhs1_type
;
3610 tree rhs2_etype
= rhs2_type
;
3611 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3613 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3614 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3616 error ("invalid non-vector operands to vector valued plus");
3619 lhs_etype
= TREE_TYPE (lhs_type
);
3620 rhs1_etype
= TREE_TYPE (rhs1_type
);
3621 rhs2_etype
= TREE_TYPE (rhs2_type
);
3623 if (POINTER_TYPE_P (lhs_etype
)
3624 || POINTER_TYPE_P (rhs1_etype
)
3625 || POINTER_TYPE_P (rhs2_etype
))
3627 error ("invalid (pointer) operands to plus/minus");
3631 /* Continue with generic binary expression handling. */
3635 case POINTER_PLUS_EXPR
:
3637 if (!POINTER_TYPE_P (rhs1_type
)
3638 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3639 || !ptrofftype_p (rhs2_type
))
3641 error ("type mismatch in pointer plus expression");
3642 debug_generic_stmt (lhs_type
);
3643 debug_generic_stmt (rhs1_type
);
3644 debug_generic_stmt (rhs2_type
);
3651 case TRUTH_ANDIF_EXPR
:
3652 case TRUTH_ORIF_EXPR
:
3653 case TRUTH_AND_EXPR
:
3655 case TRUTH_XOR_EXPR
:
3665 case UNORDERED_EXPR
:
3673 /* Comparisons are also binary, but the result type is not
3674 connected to the operand types. */
3675 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3677 case WIDEN_MULT_EXPR
:
3678 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3680 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3681 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3683 case WIDEN_SUM_EXPR
:
3684 case VEC_WIDEN_MULT_HI_EXPR
:
3685 case VEC_WIDEN_MULT_LO_EXPR
:
3686 case VEC_WIDEN_MULT_EVEN_EXPR
:
3687 case VEC_WIDEN_MULT_ODD_EXPR
:
3688 case VEC_PACK_TRUNC_EXPR
:
3689 case VEC_PACK_SAT_EXPR
:
3690 case VEC_PACK_FIX_TRUNC_EXPR
:
3695 case MULT_HIGHPART_EXPR
:
3696 case TRUNC_DIV_EXPR
:
3698 case FLOOR_DIV_EXPR
:
3699 case ROUND_DIV_EXPR
:
3700 case TRUNC_MOD_EXPR
:
3702 case FLOOR_MOD_EXPR
:
3703 case ROUND_MOD_EXPR
:
3705 case EXACT_DIV_EXPR
:
3711 /* Continue with generic binary expression handling. */
3718 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3719 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3721 error ("type mismatch in binary expression");
3722 debug_generic_stmt (lhs_type
);
3723 debug_generic_stmt (rhs1_type
);
3724 debug_generic_stmt (rhs2_type
);
3731 /* Verify a gimple assignment statement STMT with a ternary rhs.
3732 Returns true if anything is wrong. */
3735 verify_gimple_assign_ternary (gimple stmt
)
3737 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3738 tree lhs
= gimple_assign_lhs (stmt
);
3739 tree lhs_type
= TREE_TYPE (lhs
);
3740 tree rhs1
= gimple_assign_rhs1 (stmt
);
3741 tree rhs1_type
= TREE_TYPE (rhs1
);
3742 tree rhs2
= gimple_assign_rhs2 (stmt
);
3743 tree rhs2_type
= TREE_TYPE (rhs2
);
3744 tree rhs3
= gimple_assign_rhs3 (stmt
);
3745 tree rhs3_type
= TREE_TYPE (rhs3
);
3747 if (!is_gimple_reg (lhs
))
3749 error ("non-register as LHS of ternary operation");
3753 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3754 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3755 || !is_gimple_val (rhs2
)
3756 || !is_gimple_val (rhs3
))
3758 error ("invalid operands in ternary operation");
3762 /* First handle operations that involve different types. */
3765 case WIDEN_MULT_PLUS_EXPR
:
3766 case WIDEN_MULT_MINUS_EXPR
:
3767 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3768 && !FIXED_POINT_TYPE_P (rhs1_type
))
3769 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3770 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3771 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3772 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3774 error ("type mismatch in widening multiply-accumulate expression");
3775 debug_generic_expr (lhs_type
);
3776 debug_generic_expr (rhs1_type
);
3777 debug_generic_expr (rhs2_type
);
3778 debug_generic_expr (rhs3_type
);
3784 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3785 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3786 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3788 error ("type mismatch in fused multiply-add expression");
3789 debug_generic_expr (lhs_type
);
3790 debug_generic_expr (rhs1_type
);
3791 debug_generic_expr (rhs2_type
);
3792 debug_generic_expr (rhs3_type
);
3799 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3800 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3802 error ("type mismatch in conditional expression");
3803 debug_generic_expr (lhs_type
);
3804 debug_generic_expr (rhs2_type
);
3805 debug_generic_expr (rhs3_type
);
3811 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3812 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3814 error ("type mismatch in vector permute expression");
3815 debug_generic_expr (lhs_type
);
3816 debug_generic_expr (rhs1_type
);
3817 debug_generic_expr (rhs2_type
);
3818 debug_generic_expr (rhs3_type
);
3822 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3823 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3824 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3826 error ("vector types expected in vector permute expression");
3827 debug_generic_expr (lhs_type
);
3828 debug_generic_expr (rhs1_type
);
3829 debug_generic_expr (rhs2_type
);
3830 debug_generic_expr (rhs3_type
);
3834 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3835 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3836 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3837 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3838 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3840 error ("vectors with different element number found "
3841 "in vector permute expression");
3842 debug_generic_expr (lhs_type
);
3843 debug_generic_expr (rhs1_type
);
3844 debug_generic_expr (rhs2_type
);
3845 debug_generic_expr (rhs3_type
);
3849 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3850 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3851 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3853 error ("invalid mask type in vector permute expression");
3854 debug_generic_expr (lhs_type
);
3855 debug_generic_expr (rhs1_type
);
3856 debug_generic_expr (rhs2_type
);
3857 debug_generic_expr (rhs3_type
);
3864 case REALIGN_LOAD_EXPR
:
3874 /* Verify a gimple assignment statement STMT with a single rhs.
3875 Returns true if anything is wrong. */
3878 verify_gimple_assign_single (gimple stmt
)
3880 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3881 tree lhs
= gimple_assign_lhs (stmt
);
3882 tree lhs_type
= TREE_TYPE (lhs
);
3883 tree rhs1
= gimple_assign_rhs1 (stmt
);
3884 tree rhs1_type
= TREE_TYPE (rhs1
);
3887 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3889 error ("non-trivial conversion at assignment");
3890 debug_generic_expr (lhs_type
);
3891 debug_generic_expr (rhs1_type
);
3895 if (gimple_clobber_p (stmt
)
3896 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
3898 error ("non-decl/MEM_REF LHS in clobber statement");
3899 debug_generic_expr (lhs
);
3903 if (handled_component_p (lhs
))
3904 res
|= verify_types_in_gimple_reference (lhs
, true);
3906 /* Special codes we cannot handle via their class. */
3911 tree op
= TREE_OPERAND (rhs1
, 0);
3912 if (!is_gimple_addressable (op
))
3914 error ("invalid operand in unary expression");
3918 /* Technically there is no longer a need for matching types, but
3919 gimple hygiene asks for this check. In LTO we can end up
3920 combining incompatible units and thus end up with addresses
3921 of globals that change their type to a common one. */
3923 && !types_compatible_p (TREE_TYPE (op
),
3924 TREE_TYPE (TREE_TYPE (rhs1
)))
3925 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
3928 error ("type mismatch in address expression");
3929 debug_generic_stmt (TREE_TYPE (rhs1
));
3930 debug_generic_stmt (TREE_TYPE (op
));
3934 return verify_types_in_gimple_reference (op
, true);
3939 error ("INDIRECT_REF in gimple IL");
3945 case ARRAY_RANGE_REF
:
3946 case VIEW_CONVERT_EXPR
:
3949 case TARGET_MEM_REF
:
3951 if (!is_gimple_reg (lhs
)
3952 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3954 error ("invalid rhs for gimple memory store");
3955 debug_generic_stmt (lhs
);
3956 debug_generic_stmt (rhs1
);
3959 return res
|| verify_types_in_gimple_reference (rhs1
, false);
3971 /* tcc_declaration */
3976 if (!is_gimple_reg (lhs
)
3977 && !is_gimple_reg (rhs1
)
3978 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3980 error ("invalid rhs for gimple memory store");
3981 debug_generic_stmt (lhs
);
3982 debug_generic_stmt (rhs1
);
3988 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
3991 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
3993 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
3995 /* For vector CONSTRUCTORs we require that either it is empty
3996 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3997 (then the element count must be correct to cover the whole
3998 outer vector and index must be NULL on all elements, or it is
3999 a CONSTRUCTOR of scalar elements, where we as an exception allow
4000 smaller number of elements (assuming zero filling) and
4001 consecutive indexes as compared to NULL indexes (such
4002 CONSTRUCTORs can appear in the IL from FEs). */
4003 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4005 if (elt_t
== NULL_TREE
)
4007 elt_t
= TREE_TYPE (elt_v
);
4008 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4010 tree elt_t
= TREE_TYPE (elt_v
);
4011 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4014 error ("incorrect type of vector CONSTRUCTOR"
4016 debug_generic_stmt (rhs1
);
4019 else if (CONSTRUCTOR_NELTS (rhs1
)
4020 * TYPE_VECTOR_SUBPARTS (elt_t
)
4021 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4023 error ("incorrect number of vector CONSTRUCTOR"
4025 debug_generic_stmt (rhs1
);
4029 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4032 error ("incorrect type of vector CONSTRUCTOR elements");
4033 debug_generic_stmt (rhs1
);
4036 else if (CONSTRUCTOR_NELTS (rhs1
)
4037 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4039 error ("incorrect number of vector CONSTRUCTOR elements");
4040 debug_generic_stmt (rhs1
);
4044 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4046 error ("incorrect type of vector CONSTRUCTOR elements");
4047 debug_generic_stmt (rhs1
);
4050 if (elt_i
!= NULL_TREE
4051 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4052 || TREE_CODE (elt_i
) != INTEGER_CST
4053 || compare_tree_int (elt_i
, i
) != 0))
4055 error ("vector CONSTRUCTOR with non-NULL element index");
4056 debug_generic_stmt (rhs1
);
4064 case WITH_SIZE_EXPR
:
4074 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4075 is a problem, otherwise false. */
4078 verify_gimple_assign (gimple stmt
)
4080 switch (gimple_assign_rhs_class (stmt
))
4082 case GIMPLE_SINGLE_RHS
:
4083 return verify_gimple_assign_single (stmt
);
4085 case GIMPLE_UNARY_RHS
:
4086 return verify_gimple_assign_unary (stmt
);
4088 case GIMPLE_BINARY_RHS
:
4089 return verify_gimple_assign_binary (stmt
);
4091 case GIMPLE_TERNARY_RHS
:
4092 return verify_gimple_assign_ternary (stmt
);
4099 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4100 is a problem, otherwise false. */
4103 verify_gimple_return (gimple stmt
)
4105 tree op
= gimple_return_retval (stmt
);
4106 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4108 /* We cannot test for present return values as we do not fix up missing
4109 return values from the original source. */
4113 if (!is_gimple_val (op
)
4114 && TREE_CODE (op
) != RESULT_DECL
)
4116 error ("invalid operand in return statement");
4117 debug_generic_stmt (op
);
4121 if ((TREE_CODE (op
) == RESULT_DECL
4122 && DECL_BY_REFERENCE (op
))
4123 || (TREE_CODE (op
) == SSA_NAME
4124 && SSA_NAME_VAR (op
)
4125 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4126 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4127 op
= TREE_TYPE (op
);
4129 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4131 error ("invalid conversion in return statement");
4132 debug_generic_stmt (restype
);
4133 debug_generic_stmt (TREE_TYPE (op
));
4141 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4142 is a problem, otherwise false. */
4145 verify_gimple_goto (gimple stmt
)
4147 tree dest
= gimple_goto_dest (stmt
);
4149 /* ??? We have two canonical forms of direct goto destinations, a
4150 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4151 if (TREE_CODE (dest
) != LABEL_DECL
4152 && (!is_gimple_val (dest
)
4153 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4155 error ("goto destination is neither a label nor a pointer");
4162 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4163 is a problem, otherwise false. */
4166 verify_gimple_switch (gimple stmt
)
4169 tree elt
, prev_upper_bound
= NULL_TREE
;
4170 tree index_type
, elt_type
= NULL_TREE
;
4172 if (!is_gimple_val (gimple_switch_index (stmt
)))
4174 error ("invalid operand to switch statement");
4175 debug_generic_stmt (gimple_switch_index (stmt
));
4179 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4180 if (! INTEGRAL_TYPE_P (index_type
))
4182 error ("non-integral type switch statement");
4183 debug_generic_expr (index_type
);
4187 elt
= gimple_switch_label (stmt
, 0);
4188 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4190 error ("invalid default case label in switch statement");
4191 debug_generic_expr (elt
);
4195 n
= gimple_switch_num_labels (stmt
);
4196 for (i
= 1; i
< n
; i
++)
4198 elt
= gimple_switch_label (stmt
, i
);
4200 if (! CASE_LOW (elt
))
4202 error ("invalid case label in switch statement");
4203 debug_generic_expr (elt
);
4207 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4209 error ("invalid case range in switch statement");
4210 debug_generic_expr (elt
);
4216 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4217 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4219 error ("type mismatch for case label in switch statement");
4220 debug_generic_expr (elt
);
4226 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4227 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4229 error ("type precision mismatch in switch statement");
4234 if (prev_upper_bound
)
4236 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4238 error ("case labels not sorted in switch statement");
4243 prev_upper_bound
= CASE_HIGH (elt
);
4244 if (! prev_upper_bound
)
4245 prev_upper_bound
= CASE_LOW (elt
);
4251 /* Verify a gimple debug statement STMT.
4252 Returns true if anything is wrong. */
4255 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4257 /* There isn't much that could be wrong in a gimple debug stmt. A
4258 gimple debug bind stmt, for example, maps a tree, that's usually
4259 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4260 component or member of an aggregate type, to another tree, that
4261 can be an arbitrary expression. These stmts expand into debug
4262 insns, and are converted to debug notes by var-tracking.c. */
4266 /* Verify a gimple label statement STMT.
4267 Returns true if anything is wrong. */
4270 verify_gimple_label (gimple stmt
)
4272 tree decl
= gimple_label_label (stmt
);
4276 if (TREE_CODE (decl
) != LABEL_DECL
)
4278 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4279 && DECL_CONTEXT (decl
) != current_function_decl
)
4281 error ("label's context is not the current function decl");
4285 uid
= LABEL_DECL_UID (decl
);
4288 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4290 error ("incorrect entry in label_to_block_map");
4294 uid
= EH_LANDING_PAD_NR (decl
);
4297 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4298 if (decl
!= lp
->post_landing_pad
)
4300 error ("incorrect setting of landing pad number");
4308 /* Verify the GIMPLE statement STMT. Returns true if there is an
4309 error, otherwise false. */
4312 verify_gimple_stmt (gimple stmt
)
4314 switch (gimple_code (stmt
))
4317 return verify_gimple_assign (stmt
);
4320 return verify_gimple_label (stmt
);
4323 return verify_gimple_call (stmt
);
4326 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4328 error ("invalid comparison code in gimple cond");
4331 if (!(!gimple_cond_true_label (stmt
)
4332 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4333 || !(!gimple_cond_false_label (stmt
)
4334 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4336 error ("invalid labels in gimple cond");
4340 return verify_gimple_comparison (boolean_type_node
,
4341 gimple_cond_lhs (stmt
),
4342 gimple_cond_rhs (stmt
));
4345 return verify_gimple_goto (stmt
);
4348 return verify_gimple_switch (stmt
);
4351 return verify_gimple_return (stmt
);
4356 case GIMPLE_TRANSACTION
:
4357 return verify_gimple_transaction (stmt
);
4359 /* Tuples that do not have tree operands. */
4361 case GIMPLE_PREDICT
:
4363 case GIMPLE_EH_DISPATCH
:
4364 case GIMPLE_EH_MUST_NOT_THROW
:
4368 /* OpenMP directives are validated by the FE and never operated
4369 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4370 non-gimple expressions when the main index variable has had
4371 its address taken. This does not affect the loop itself
4372 because the header of an GIMPLE_OMP_FOR is merely used to determine
4373 how to setup the parallel iteration. */
4377 return verify_gimple_debug (stmt
);
4384 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4385 and false otherwise. */
4388 verify_gimple_phi (gimple phi
)
4392 tree phi_result
= gimple_phi_result (phi
);
4397 error ("invalid PHI result");
4401 virtual_p
= virtual_operand_p (phi_result
);
4402 if (TREE_CODE (phi_result
) != SSA_NAME
4404 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4406 error ("invalid PHI result");
4410 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4412 tree t
= gimple_phi_arg_def (phi
, i
);
4416 error ("missing PHI def");
4420 /* Addressable variables do have SSA_NAMEs but they
4421 are not considered gimple values. */
4422 else if ((TREE_CODE (t
) == SSA_NAME
4423 && virtual_p
!= virtual_operand_p (t
))
4425 && (TREE_CODE (t
) != SSA_NAME
4426 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4428 && !is_gimple_val (t
)))
4430 error ("invalid PHI argument");
4431 debug_generic_expr (t
);
4434 #ifdef ENABLE_TYPES_CHECKING
4435 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4437 error ("incompatible types in PHI argument %u", i
);
4438 debug_generic_stmt (TREE_TYPE (phi_result
));
4439 debug_generic_stmt (TREE_TYPE (t
));
4448 /* Verify the GIMPLE statements inside the sequence STMTS. */
4451 verify_gimple_in_seq_2 (gimple_seq stmts
)
4453 gimple_stmt_iterator ittr
;
4456 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4458 gimple stmt
= gsi_stmt (ittr
);
4460 switch (gimple_code (stmt
))
4463 err
|= verify_gimple_in_seq_2 (gimple_bind_body (stmt
));
4467 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4468 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4471 case GIMPLE_EH_FILTER
:
4472 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4475 case GIMPLE_EH_ELSE
:
4476 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt
));
4477 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt
));
4481 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (stmt
));
4484 case GIMPLE_TRANSACTION
:
4485 err
|= verify_gimple_transaction (stmt
);
4490 bool err2
= verify_gimple_stmt (stmt
);
4492 debug_gimple_stmt (stmt
);
4501 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4502 is a problem, otherwise false. */
4505 verify_gimple_transaction (gimple stmt
)
4507 tree lab
= gimple_transaction_label (stmt
);
4508 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4510 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4514 /* Verify the GIMPLE statements inside the statement list STMTS. */
4517 verify_gimple_in_seq (gimple_seq stmts
)
4519 timevar_push (TV_TREE_STMT_VERIFY
);
4520 if (verify_gimple_in_seq_2 (stmts
))
4521 internal_error ("verify_gimple failed");
4522 timevar_pop (TV_TREE_STMT_VERIFY
);
4525 /* Return true when the T can be shared. */
4528 tree_node_can_be_shared (tree t
)
4530 if (IS_TYPE_OR_DECL_P (t
)
4531 || is_gimple_min_invariant (t
)
4532 || TREE_CODE (t
) == SSA_NAME
4533 || t
== error_mark_node
4534 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4537 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4546 /* Called via walk_tree. Verify tree sharing. */
4549 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4551 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4553 if (tree_node_can_be_shared (*tp
))
4555 *walk_subtrees
= false;
4559 if (pointer_set_insert (visited
, *tp
))
4565 /* Called via walk_gimple_stmt. Verify tree sharing. */
4568 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4570 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4571 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4574 static bool eh_error_found
;
4576 verify_eh_throw_stmt_node (void **slot
, void *data
)
4578 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4579 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4581 if (!pointer_set_contains (visited
, node
->stmt
))
4583 error ("dead STMT in EH table");
4584 debug_gimple_stmt (node
->stmt
);
4585 eh_error_found
= true;
4590 /* Verify if the location LOCs block is in BLOCKS. */
4593 verify_location (pointer_set_t
*blocks
, location_t loc
)
4595 tree block
= LOCATION_BLOCK (loc
);
4596 if (block
!= NULL_TREE
4597 && !pointer_set_contains (blocks
, block
))
4599 error ("location references block not in block tree");
4602 if (block
!= NULL_TREE
)
4603 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4607 /* Called via walk_tree. Verify that expressions have no blocks. */
4610 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4614 *walk_subtrees
= false;
4618 location_t loc
= EXPR_LOCATION (*tp
);
4619 if (LOCATION_BLOCK (loc
) != NULL
)
4625 /* Called via walk_tree. Verify locations of expressions. */
4628 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4630 struct pointer_set_t
*blocks
= (struct pointer_set_t
*) data
;
4632 if (TREE_CODE (*tp
) == VAR_DECL
4633 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4635 tree t
= DECL_DEBUG_EXPR (*tp
);
4636 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4640 if ((TREE_CODE (*tp
) == VAR_DECL
4641 || TREE_CODE (*tp
) == PARM_DECL
4642 || TREE_CODE (*tp
) == RESULT_DECL
)
4643 && DECL_HAS_VALUE_EXPR_P (*tp
))
4645 tree t
= DECL_VALUE_EXPR (*tp
);
4646 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4653 *walk_subtrees
= false;
4657 location_t loc
= EXPR_LOCATION (*tp
);
4658 if (verify_location (blocks
, loc
))
4664 /* Called via walk_gimple_op. Verify locations of expressions. */
4667 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4669 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4670 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4673 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4676 collect_subblocks (pointer_set_t
*blocks
, tree block
)
4679 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4681 pointer_set_insert (blocks
, t
);
4682 collect_subblocks (blocks
, t
);
4686 /* Verify the GIMPLE statements in the CFG of FN. */
4689 verify_gimple_in_cfg (struct function
*fn
)
4693 struct pointer_set_t
*visited
, *visited_stmts
, *blocks
;
4695 timevar_push (TV_TREE_STMT_VERIFY
);
4696 visited
= pointer_set_create ();
4697 visited_stmts
= pointer_set_create ();
4699 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4700 blocks
= pointer_set_create ();
4701 if (DECL_INITIAL (fn
->decl
))
4703 pointer_set_insert (blocks
, DECL_INITIAL (fn
->decl
));
4704 collect_subblocks (blocks
, DECL_INITIAL (fn
->decl
));
4707 FOR_EACH_BB_FN (bb
, fn
)
4709 gimple_stmt_iterator gsi
;
4711 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4713 gimple phi
= gsi_stmt (gsi
);
4717 pointer_set_insert (visited_stmts
, phi
);
4719 if (gimple_bb (phi
) != bb
)
4721 error ("gimple_bb (phi) is set to a wrong basic block");
4725 err2
|= verify_gimple_phi (phi
);
4727 /* Only PHI arguments have locations. */
4728 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4730 error ("PHI node with location");
4734 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4736 tree arg
= gimple_phi_arg_def (phi
, i
);
4737 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4741 error ("incorrect sharing of tree nodes");
4742 debug_generic_expr (addr
);
4745 location_t loc
= gimple_phi_arg_location (phi
, i
);
4746 if (virtual_operand_p (gimple_phi_result (phi
))
4747 && loc
!= UNKNOWN_LOCATION
)
4749 error ("virtual PHI with argument locations");
4752 addr
= walk_tree (&arg
, verify_expr_location_1
, blocks
, NULL
);
4755 debug_generic_expr (addr
);
4758 err2
|= verify_location (blocks
, loc
);
4762 debug_gimple_stmt (phi
);
4766 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4768 gimple stmt
= gsi_stmt (gsi
);
4770 struct walk_stmt_info wi
;
4774 pointer_set_insert (visited_stmts
, stmt
);
4776 if (gimple_bb (stmt
) != bb
)
4778 error ("gimple_bb (stmt) is set to a wrong basic block");
4782 err2
|= verify_gimple_stmt (stmt
);
4783 err2
|= verify_location (blocks
, gimple_location (stmt
));
4785 memset (&wi
, 0, sizeof (wi
));
4786 wi
.info
= (void *) visited
;
4787 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4790 error ("incorrect sharing of tree nodes");
4791 debug_generic_expr (addr
);
4795 memset (&wi
, 0, sizeof (wi
));
4796 wi
.info
= (void *) blocks
;
4797 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4800 debug_generic_expr (addr
);
4804 /* ??? Instead of not checking these stmts at all the walker
4805 should know its context via wi. */
4806 if (!is_gimple_debug (stmt
)
4807 && !is_gimple_omp (stmt
))
4809 memset (&wi
, 0, sizeof (wi
));
4810 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
4813 debug_generic_expr (addr
);
4814 inform (gimple_location (stmt
), "in statement");
4819 /* If the statement is marked as part of an EH region, then it is
4820 expected that the statement could throw. Verify that when we
4821 have optimizations that simplify statements such that we prove
4822 that they cannot throw, that we update other data structures
4824 lp_nr
= lookup_stmt_eh_lp (stmt
);
4827 if (!stmt_could_throw_p (stmt
))
4829 error ("statement marked for throw, but doesn%'t");
4833 && !gsi_one_before_end_p (gsi
)
4834 && stmt_can_throw_internal (stmt
))
4836 error ("statement marked for throw in middle of block");
4842 debug_gimple_stmt (stmt
);
4847 eh_error_found
= false;
4848 if (get_eh_throw_stmt_table (cfun
))
4849 htab_traverse (get_eh_throw_stmt_table (cfun
),
4850 verify_eh_throw_stmt_node
,
4853 if (err
|| eh_error_found
)
4854 internal_error ("verify_gimple failed");
4856 pointer_set_destroy (visited
);
4857 pointer_set_destroy (visited_stmts
);
4858 pointer_set_destroy (blocks
);
4859 verify_histograms ();
4860 timevar_pop (TV_TREE_STMT_VERIFY
);
4864 /* Verifies that the flow information is OK. */
4867 gimple_verify_flow_info (void)
4871 gimple_stmt_iterator gsi
;
4876 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4877 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4879 error ("ENTRY_BLOCK has IL associated with it");
4883 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4884 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4886 error ("EXIT_BLOCK has IL associated with it");
4890 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
4891 if (e
->flags
& EDGE_FALLTHRU
)
4893 error ("fallthru to exit from bb %d", e
->src
->index
);
4897 FOR_EACH_BB_FN (bb
, cfun
)
4899 bool found_ctrl_stmt
= false;
4903 /* Skip labels on the start of basic block. */
4904 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4907 gimple prev_stmt
= stmt
;
4909 stmt
= gsi_stmt (gsi
);
4911 if (gimple_code (stmt
) != GIMPLE_LABEL
)
4914 label
= gimple_label_label (stmt
);
4915 if (prev_stmt
&& DECL_NONLOCAL (label
))
4917 error ("nonlocal label ");
4918 print_generic_expr (stderr
, label
, 0);
4919 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4924 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
4926 error ("EH landing pad label ");
4927 print_generic_expr (stderr
, label
, 0);
4928 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4933 if (label_to_block (label
) != bb
)
4936 print_generic_expr (stderr
, label
, 0);
4937 fprintf (stderr
, " to block does not match in bb %d",
4942 if (decl_function_context (label
) != current_function_decl
)
4945 print_generic_expr (stderr
, label
, 0);
4946 fprintf (stderr
, " has incorrect context in bb %d",
4952 /* Verify that body of basic block BB is free of control flow. */
4953 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
4955 gimple stmt
= gsi_stmt (gsi
);
4957 if (found_ctrl_stmt
)
4959 error ("control flow in the middle of basic block %d",
4964 if (stmt_ends_bb_p (stmt
))
4965 found_ctrl_stmt
= true;
4967 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4970 print_generic_expr (stderr
, gimple_label_label (stmt
), 0);
4971 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
4976 gsi
= gsi_last_bb (bb
);
4977 if (gsi_end_p (gsi
))
4980 stmt
= gsi_stmt (gsi
);
4982 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4985 err
|= verify_eh_edges (stmt
);
4987 if (is_ctrl_stmt (stmt
))
4989 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4990 if (e
->flags
& EDGE_FALLTHRU
)
4992 error ("fallthru edge after a control statement in bb %d",
4998 if (gimple_code (stmt
) != GIMPLE_COND
)
5000 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5001 after anything else but if statement. */
5002 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5003 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5005 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5011 switch (gimple_code (stmt
))
5018 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5022 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5023 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5024 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5025 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5026 || EDGE_COUNT (bb
->succs
) >= 3)
5028 error ("wrong outgoing edge flags at end of bb %d",
5036 if (simple_goto_p (stmt
))
5038 error ("explicit goto at end of bb %d", bb
->index
);
5043 /* FIXME. We should double check that the labels in the
5044 destination blocks have their address taken. */
5045 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5046 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5047 | EDGE_FALSE_VALUE
))
5048 || !(e
->flags
& EDGE_ABNORMAL
))
5050 error ("wrong outgoing edge flags at end of bb %d",
5058 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5060 /* ... fallthru ... */
5062 if (!single_succ_p (bb
)
5063 || (single_succ_edge (bb
)->flags
5064 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5065 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5067 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5070 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5072 error ("return edge does not point to exit in bb %d",
5084 n
= gimple_switch_num_labels (stmt
);
5086 /* Mark all the destination basic blocks. */
5087 for (i
= 0; i
< n
; ++i
)
5089 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5090 basic_block label_bb
= label_to_block (lab
);
5091 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5092 label_bb
->aux
= (void *)1;
5095 /* Verify that the case labels are sorted. */
5096 prev
= gimple_switch_label (stmt
, 0);
5097 for (i
= 1; i
< n
; ++i
)
5099 tree c
= gimple_switch_label (stmt
, i
);
5102 error ("found default case not at the start of "
5108 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5110 error ("case labels not sorted: ");
5111 print_generic_expr (stderr
, prev
, 0);
5112 fprintf (stderr
," is greater than ");
5113 print_generic_expr (stderr
, c
, 0);
5114 fprintf (stderr
," but comes before it.\n");
5119 /* VRP will remove the default case if it can prove it will
5120 never be executed. So do not verify there always exists
5121 a default case here. */
5123 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5127 error ("extra outgoing edge %d->%d",
5128 bb
->index
, e
->dest
->index
);
5132 e
->dest
->aux
= (void *)2;
5133 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5134 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5136 error ("wrong outgoing edge flags at end of bb %d",
5142 /* Check that we have all of them. */
5143 for (i
= 0; i
< n
; ++i
)
5145 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5146 basic_block label_bb
= label_to_block (lab
);
5148 if (label_bb
->aux
!= (void *)2)
5150 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5155 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5156 e
->dest
->aux
= (void *)0;
5160 case GIMPLE_EH_DISPATCH
:
5161 err
|= verify_eh_dispatch_edge (stmt
);
5169 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5170 verify_dominators (CDI_DOMINATORS
);
5176 /* Updates phi nodes after creating a forwarder block joined
5177 by edge FALLTHRU. */
5180 gimple_make_forwarder_block (edge fallthru
)
5184 basic_block dummy
, bb
;
5186 gimple_stmt_iterator gsi
;
5188 dummy
= fallthru
->src
;
5189 bb
= fallthru
->dest
;
5191 if (single_pred_p (bb
))
5194 /* If we redirected a branch we must create new PHI nodes at the
5196 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5198 gimple phi
, new_phi
;
5200 phi
= gsi_stmt (gsi
);
5201 var
= gimple_phi_result (phi
);
5202 new_phi
= create_phi_node (var
, bb
);
5203 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5204 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5208 /* Add the arguments we have stored on edges. */
5209 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5214 flush_pending_stmts (e
);
5219 /* Return a non-special label in the head of basic block BLOCK.
5220 Create one if it doesn't exist. */
5223 gimple_block_label (basic_block bb
)
5225 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5230 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5232 stmt
= gsi_stmt (i
);
5233 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5235 label
= gimple_label_label (stmt
);
5236 if (!DECL_NONLOCAL (label
))
5239 gsi_move_before (&i
, &s
);
5244 label
= create_artificial_label (UNKNOWN_LOCATION
);
5245 stmt
= gimple_build_label (label
);
5246 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5251 /* Attempt to perform edge redirection by replacing a possibly complex
5252 jump instruction by a goto or by removing the jump completely.
5253 This can apply only if all edges now point to the same block. The
5254 parameters and return values are equivalent to
5255 redirect_edge_and_branch. */
5258 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5260 basic_block src
= e
->src
;
5261 gimple_stmt_iterator i
;
5264 /* We can replace or remove a complex jump only when we have exactly
5266 if (EDGE_COUNT (src
->succs
) != 2
5267 /* Verify that all targets will be TARGET. Specifically, the
5268 edge that is not E must also go to TARGET. */
5269 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5272 i
= gsi_last_bb (src
);
5276 stmt
= gsi_stmt (i
);
5278 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5280 gsi_remove (&i
, true);
5281 e
= ssa_redirect_edge (e
, target
);
5282 e
->flags
= EDGE_FALLTHRU
;
5290 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5291 edge representing the redirected branch. */
5294 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5296 basic_block bb
= e
->src
;
5297 gimple_stmt_iterator gsi
;
5301 if (e
->flags
& EDGE_ABNORMAL
)
5304 if (e
->dest
== dest
)
5307 if (e
->flags
& EDGE_EH
)
5308 return redirect_eh_edge (e
, dest
);
5310 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5312 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5317 gsi
= gsi_last_bb (bb
);
5318 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5320 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5323 /* For COND_EXPR, we only need to redirect the edge. */
5327 /* No non-abnormal edges should lead from a non-simple goto, and
5328 simple ones should be represented implicitly. */
5333 tree label
= gimple_block_label (dest
);
5334 tree cases
= get_cases_for_edge (e
, stmt
);
5336 /* If we have a list of cases associated with E, then use it
5337 as it's a lot faster than walking the entire case vector. */
5340 edge e2
= find_edge (e
->src
, dest
);
5347 CASE_LABEL (cases
) = label
;
5348 cases
= CASE_CHAIN (cases
);
5351 /* If there was already an edge in the CFG, then we need
5352 to move all the cases associated with E to E2. */
5355 tree cases2
= get_cases_for_edge (e2
, stmt
);
5357 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5358 CASE_CHAIN (cases2
) = first
;
5360 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5364 size_t i
, n
= gimple_switch_num_labels (stmt
);
5366 for (i
= 0; i
< n
; i
++)
5368 tree elt
= gimple_switch_label (stmt
, i
);
5369 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5370 CASE_LABEL (elt
) = label
;
5378 int i
, n
= gimple_asm_nlabels (stmt
);
5381 for (i
= 0; i
< n
; ++i
)
5383 tree cons
= gimple_asm_label_op (stmt
, i
);
5384 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5387 label
= gimple_block_label (dest
);
5388 TREE_VALUE (cons
) = label
;
5392 /* If we didn't find any label matching the former edge in the
5393 asm labels, we must be redirecting the fallthrough
5395 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5400 gsi_remove (&gsi
, true);
5401 e
->flags
|= EDGE_FALLTHRU
;
5404 case GIMPLE_OMP_RETURN
:
5405 case GIMPLE_OMP_CONTINUE
:
5406 case GIMPLE_OMP_SECTIONS_SWITCH
:
5407 case GIMPLE_OMP_FOR
:
5408 /* The edges from OMP constructs can be simply redirected. */
5411 case GIMPLE_EH_DISPATCH
:
5412 if (!(e
->flags
& EDGE_FALLTHRU
))
5413 redirect_eh_dispatch_edge (stmt
, e
, dest
);
5416 case GIMPLE_TRANSACTION
:
5417 /* The ABORT edge has a stored label associated with it, otherwise
5418 the edges are simply redirectable. */
5420 gimple_transaction_set_label (stmt
, gimple_block_label (dest
));
5424 /* Otherwise it must be a fallthru edge, and we don't need to
5425 do anything besides redirecting it. */
5426 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5430 /* Update/insert PHI nodes as necessary. */
5432 /* Now update the edges in the CFG. */
5433 e
= ssa_redirect_edge (e
, dest
);
5438 /* Returns true if it is possible to remove edge E by redirecting
5439 it to the destination of the other edge from E->src. */
5442 gimple_can_remove_branch_p (const_edge e
)
5444 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5450 /* Simple wrapper, as we can always redirect fallthru edges. */
5453 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5455 e
= gimple_redirect_edge_and_branch (e
, dest
);
5462 /* Splits basic block BB after statement STMT (but at least after the
5463 labels). If STMT is NULL, BB is split just after the labels. */
5466 gimple_split_block (basic_block bb
, void *stmt
)
5468 gimple_stmt_iterator gsi
;
5469 gimple_stmt_iterator gsi_tgt
;
5476 new_bb
= create_empty_bb (bb
);
5478 /* Redirect the outgoing edges. */
5479 new_bb
->succs
= bb
->succs
;
5481 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5484 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5487 /* Move everything from GSI to the new basic block. */
5488 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5490 act
= gsi_stmt (gsi
);
5491 if (gimple_code (act
) == GIMPLE_LABEL
)
5504 if (gsi_end_p (gsi
))
5507 /* Split the statement list - avoid re-creating new containers as this
5508 brings ugly quadratic memory consumption in the inliner.
5509 (We are still quadratic since we need to update stmt BB pointers,
5511 gsi_split_seq_before (&gsi
, &list
);
5512 set_bb_seq (new_bb
, list
);
5513 for (gsi_tgt
= gsi_start (list
);
5514 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5515 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5521 /* Moves basic block BB after block AFTER. */
5524 gimple_move_block_after (basic_block bb
, basic_block after
)
5526 if (bb
->prev_bb
== after
)
5530 link_block (bb
, after
);
5536 /* Return TRUE if block BB has no executable statements, otherwise return
5540 gimple_empty_block_p (basic_block bb
)
5542 /* BB must have no executable statements. */
5543 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5546 if (gsi_end_p (gsi
))
5548 if (is_gimple_debug (gsi_stmt (gsi
)))
5549 gsi_next_nondebug (&gsi
);
5550 return gsi_end_p (gsi
);
5554 /* Split a basic block if it ends with a conditional branch and if the
5555 other part of the block is not empty. */
5558 gimple_split_block_before_cond_jump (basic_block bb
)
5560 gimple last
, split_point
;
5561 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5562 if (gsi_end_p (gsi
))
5564 last
= gsi_stmt (gsi
);
5565 if (gimple_code (last
) != GIMPLE_COND
5566 && gimple_code (last
) != GIMPLE_SWITCH
)
5568 gsi_prev_nondebug (&gsi
);
5569 split_point
= gsi_stmt (gsi
);
5570 return split_block (bb
, split_point
)->dest
;
5574 /* Return true if basic_block can be duplicated. */
5577 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5582 /* Create a duplicate of the basic block BB. NOTE: This does not
5583 preserve SSA form. */
5586 gimple_duplicate_bb (basic_block bb
)
5589 gimple_stmt_iterator gsi
, gsi_tgt
;
5590 gimple_seq phis
= phi_nodes (bb
);
5591 gimple phi
, stmt
, copy
;
5593 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5595 /* Copy the PHI nodes. We ignore PHI node arguments here because
5596 the incoming edges have not been setup yet. */
5597 for (gsi
= gsi_start (phis
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5599 phi
= gsi_stmt (gsi
);
5600 copy
= create_phi_node (NULL_TREE
, new_bb
);
5601 create_new_def_for (gimple_phi_result (phi
), copy
,
5602 gimple_phi_result_ptr (copy
));
5603 gimple_set_uid (copy
, gimple_uid (phi
));
5606 gsi_tgt
= gsi_start_bb (new_bb
);
5607 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5609 def_operand_p def_p
;
5610 ssa_op_iter op_iter
;
5613 stmt
= gsi_stmt (gsi
);
5614 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5617 /* Don't duplicate label debug stmts. */
5618 if (gimple_debug_bind_p (stmt
)
5619 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5623 /* Create a new copy of STMT and duplicate STMT's virtual
5625 copy
= gimple_copy (stmt
);
5626 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5628 maybe_duplicate_eh_stmt (copy
, stmt
);
5629 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5631 /* When copying around a stmt writing into a local non-user
5632 aggregate, make sure it won't share stack slot with other
5634 lhs
= gimple_get_lhs (stmt
);
5635 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5637 tree base
= get_base_address (lhs
);
5639 && (TREE_CODE (base
) == VAR_DECL
5640 || TREE_CODE (base
) == RESULT_DECL
)
5641 && DECL_IGNORED_P (base
)
5642 && !TREE_STATIC (base
)
5643 && !DECL_EXTERNAL (base
)
5644 && (TREE_CODE (base
) != VAR_DECL
5645 || !DECL_HAS_VALUE_EXPR_P (base
)))
5646 DECL_NONSHAREABLE (base
) = 1;
5649 /* Create new names for all the definitions created by COPY and
5650 add replacement mappings for each new name. */
5651 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5652 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5658 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5661 add_phi_args_after_copy_edge (edge e_copy
)
5663 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5666 gimple phi
, phi_copy
;
5668 gimple_stmt_iterator psi
, psi_copy
;
5670 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5673 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5675 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5676 dest
= get_bb_original (e_copy
->dest
);
5678 dest
= e_copy
->dest
;
5680 e
= find_edge (bb
, dest
);
5683 /* During loop unrolling the target of the latch edge is copied.
5684 In this case we are not looking for edge to dest, but to
5685 duplicated block whose original was dest. */
5686 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5688 if ((e
->dest
->flags
& BB_DUPLICATED
)
5689 && get_bb_original (e
->dest
) == dest
)
5693 gcc_assert (e
!= NULL
);
5696 for (psi
= gsi_start_phis (e
->dest
),
5697 psi_copy
= gsi_start_phis (e_copy
->dest
);
5699 gsi_next (&psi
), gsi_next (&psi_copy
))
5701 phi
= gsi_stmt (psi
);
5702 phi_copy
= gsi_stmt (psi_copy
);
5703 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5704 add_phi_arg (phi_copy
, def
, e_copy
,
5705 gimple_phi_arg_location_from_edge (phi
, e
));
5710 /* Basic block BB_COPY was created by code duplication. Add phi node
5711 arguments for edges going out of BB_COPY. The blocks that were
5712 duplicated have BB_DUPLICATED set. */
5715 add_phi_args_after_copy_bb (basic_block bb_copy
)
5720 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5722 add_phi_args_after_copy_edge (e_copy
);
5726 /* Blocks in REGION_COPY array of length N_REGION were created by
5727 duplication of basic blocks. Add phi node arguments for edges
5728 going from these blocks. If E_COPY is not NULL, also add
5729 phi node arguments for its destination.*/
5732 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5737 for (i
= 0; i
< n_region
; i
++)
5738 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5740 for (i
= 0; i
< n_region
; i
++)
5741 add_phi_args_after_copy_bb (region_copy
[i
]);
5743 add_phi_args_after_copy_edge (e_copy
);
5745 for (i
= 0; i
< n_region
; i
++)
5746 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5749 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5750 important exit edge EXIT. By important we mean that no SSA name defined
5751 inside region is live over the other exit edges of the region. All entry
5752 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5753 to the duplicate of the region. Dominance and loop information is
5754 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5755 UPDATE_DOMINANCE is false then we assume that the caller will update the
5756 dominance information after calling this function. The new basic
5757 blocks are stored to REGION_COPY in the same order as they had in REGION,
5758 provided that REGION_COPY is not NULL.
5759 The function returns false if it is unable to copy the region,
5763 gimple_duplicate_sese_region (edge entry
, edge exit
,
5764 basic_block
*region
, unsigned n_region
,
5765 basic_block
*region_copy
,
5766 bool update_dominance
)
5769 bool free_region_copy
= false, copying_header
= false;
5770 struct loop
*loop
= entry
->dest
->loop_father
;
5772 vec
<basic_block
> doms
;
5774 int total_freq
= 0, entry_freq
= 0;
5775 gcov_type total_count
= 0, entry_count
= 0;
5777 if (!can_copy_bbs_p (region
, n_region
))
5780 /* Some sanity checking. Note that we do not check for all possible
5781 missuses of the functions. I.e. if you ask to copy something weird,
5782 it will work, but the state of structures probably will not be
5784 for (i
= 0; i
< n_region
; i
++)
5786 /* We do not handle subloops, i.e. all the blocks must belong to the
5788 if (region
[i
]->loop_father
!= loop
)
5791 if (region
[i
] != entry
->dest
5792 && region
[i
] == loop
->header
)
5796 set_loop_copy (loop
, loop
);
5798 /* In case the function is used for loop header copying (which is the primary
5799 use), ensure that EXIT and its copy will be new latch and entry edges. */
5800 if (loop
->header
== entry
->dest
)
5802 copying_header
= true;
5803 set_loop_copy (loop
, loop_outer (loop
));
5805 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5808 for (i
= 0; i
< n_region
; i
++)
5809 if (region
[i
] != exit
->src
5810 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5816 region_copy
= XNEWVEC (basic_block
, n_region
);
5817 free_region_copy
= true;
5820 initialize_original_copy_tables ();
5822 /* Record blocks outside the region that are dominated by something
5824 if (update_dominance
)
5827 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5830 if (entry
->dest
->count
)
5832 total_count
= entry
->dest
->count
;
5833 entry_count
= entry
->count
;
5834 /* Fix up corner cases, to avoid division by zero or creation of negative
5836 if (entry_count
> total_count
)
5837 entry_count
= total_count
;
5841 total_freq
= entry
->dest
->frequency
;
5842 entry_freq
= EDGE_FREQUENCY (entry
);
5843 /* Fix up corner cases, to avoid division by zero or creation of negative
5845 if (total_freq
== 0)
5847 else if (entry_freq
> total_freq
)
5848 entry_freq
= total_freq
;
5851 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5852 split_edge_bb_loc (entry
), update_dominance
);
5855 scale_bbs_frequencies_gcov_type (region
, n_region
,
5856 total_count
- entry_count
,
5858 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
5863 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
5865 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
5870 loop
->header
= exit
->dest
;
5871 loop
->latch
= exit
->src
;
5874 /* Redirect the entry and add the phi node arguments. */
5875 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
5876 gcc_assert (redirected
!= NULL
);
5877 flush_pending_stmts (entry
);
5879 /* Concerning updating of dominators: We must recount dominators
5880 for entry block and its copy. Anything that is outside of the
5881 region, but was dominated by something inside needs recounting as
5883 if (update_dominance
)
5885 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
5886 doms
.safe_push (get_bb_original (entry
->dest
));
5887 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
5891 /* Add the other PHI node arguments. */
5892 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
5894 if (free_region_copy
)
5897 free_original_copy_tables ();
5901 /* Checks if BB is part of the region defined by N_REGION BBS. */
5903 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
5907 for (n
= 0; n
< n_region
; n
++)
5915 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5916 are stored to REGION_COPY in the same order in that they appear
5917 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5918 the region, EXIT an exit from it. The condition guarding EXIT
5919 is moved to ENTRY. Returns true if duplication succeeds, false
5945 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
5946 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
5947 basic_block
*region_copy ATTRIBUTE_UNUSED
)
5950 bool free_region_copy
= false;
5951 struct loop
*loop
= exit
->dest
->loop_father
;
5952 struct loop
*orig_loop
= entry
->dest
->loop_father
;
5953 basic_block switch_bb
, entry_bb
, nentry_bb
;
5954 vec
<basic_block
> doms
;
5955 int total_freq
= 0, exit_freq
= 0;
5956 gcov_type total_count
= 0, exit_count
= 0;
5957 edge exits
[2], nexits
[2], e
;
5958 gimple_stmt_iterator gsi
;
5961 basic_block exit_bb
;
5962 gimple_stmt_iterator psi
;
5965 struct loop
*target
, *aloop
, *cloop
;
5967 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
5969 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
5971 if (!can_copy_bbs_p (region
, n_region
))
5974 initialize_original_copy_tables ();
5975 set_loop_copy (orig_loop
, loop
);
5978 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
5980 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
5982 cloop
= duplicate_loop (aloop
, target
);
5983 duplicate_subloops (aloop
, cloop
);
5989 region_copy
= XNEWVEC (basic_block
, n_region
);
5990 free_region_copy
= true;
5993 gcc_assert (!need_ssa_update_p (cfun
));
5995 /* Record blocks outside the region that are dominated by something
5997 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5999 if (exit
->src
->count
)
6001 total_count
= exit
->src
->count
;
6002 exit_count
= exit
->count
;
6003 /* Fix up corner cases, to avoid division by zero or creation of negative
6005 if (exit_count
> total_count
)
6006 exit_count
= total_count
;
6010 total_freq
= exit
->src
->frequency
;
6011 exit_freq
= EDGE_FREQUENCY (exit
);
6012 /* Fix up corner cases, to avoid division by zero or creation of negative
6014 if (total_freq
== 0)
6016 if (exit_freq
> total_freq
)
6017 exit_freq
= total_freq
;
6020 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6021 split_edge_bb_loc (exit
), true);
6024 scale_bbs_frequencies_gcov_type (region
, n_region
,
6025 total_count
- exit_count
,
6027 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6032 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6034 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6037 /* Create the switch block, and put the exit condition to it. */
6038 entry_bb
= entry
->dest
;
6039 nentry_bb
= get_bb_copy (entry_bb
);
6040 if (!last_stmt (entry
->src
)
6041 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6042 switch_bb
= entry
->src
;
6044 switch_bb
= split_edge (entry
);
6045 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6047 gsi
= gsi_last_bb (switch_bb
);
6048 cond_stmt
= last_stmt (exit
->src
);
6049 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6050 cond_stmt
= gimple_copy (cond_stmt
);
6052 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6054 sorig
= single_succ_edge (switch_bb
);
6055 sorig
->flags
= exits
[1]->flags
;
6056 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6058 /* Register the new edge from SWITCH_BB in loop exit lists. */
6059 rescan_loop_exit (snew
, true, false);
6061 /* Add the PHI node arguments. */
6062 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6064 /* Get rid of now superfluous conditions and associated edges (and phi node
6066 exit_bb
= exit
->dest
;
6068 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6069 PENDING_STMT (e
) = NULL
;
6071 /* The latch of ORIG_LOOP was copied, and so was the backedge
6072 to the original header. We redirect this backedge to EXIT_BB. */
6073 for (i
= 0; i
< n_region
; i
++)
6074 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6076 gcc_assert (single_succ_edge (region_copy
[i
]));
6077 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6078 PENDING_STMT (e
) = NULL
;
6079 for (psi
= gsi_start_phis (exit_bb
);
6083 phi
= gsi_stmt (psi
);
6084 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6085 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6088 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6089 PENDING_STMT (e
) = NULL
;
6091 /* Anything that is outside of the region, but was dominated by something
6092 inside needs to update dominance info. */
6093 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6095 /* Update the SSA web. */
6096 update_ssa (TODO_update_ssa
);
6098 if (free_region_copy
)
6101 free_original_copy_tables ();
6105 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6106 adding blocks when the dominator traversal reaches EXIT. This
6107 function silently assumes that ENTRY strictly dominates EXIT. */
6110 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6111 vec
<basic_block
> *bbs_p
)
6115 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6117 son
= next_dom_son (CDI_DOMINATORS
, son
))
6119 bbs_p
->safe_push (son
);
6121 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6125 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6126 The duplicates are recorded in VARS_MAP. */
6129 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
6132 tree t
= *tp
, new_t
;
6133 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6136 if (DECL_CONTEXT (t
) == to_context
)
6139 loc
= pointer_map_contains (vars_map
, t
);
6143 loc
= pointer_map_insert (vars_map
, t
);
6147 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6148 add_local_decl (f
, new_t
);
6152 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6153 new_t
= copy_node (t
);
6155 DECL_CONTEXT (new_t
) = to_context
;
6160 new_t
= (tree
) *loc
;
6166 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6167 VARS_MAP maps old ssa names and var_decls to the new ones. */
6170 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
6176 gcc_assert (!virtual_operand_p (name
));
6178 loc
= pointer_map_contains (vars_map
, name
);
6182 tree decl
= SSA_NAME_VAR (name
);
6185 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6186 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6187 decl
, SSA_NAME_DEF_STMT (name
));
6188 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6189 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6193 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6194 name
, SSA_NAME_DEF_STMT (name
));
6196 loc
= pointer_map_insert (vars_map
, name
);
6200 new_name
= (tree
) *loc
;
6211 struct pointer_map_t
*vars_map
;
6212 htab_t new_label_map
;
6213 struct pointer_map_t
*eh_map
;
6217 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6218 contained in *TP if it has been ORIG_BLOCK previously and change the
6219 DECL_CONTEXT of every local variable referenced in *TP. */
6222 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6224 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6225 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6230 tree block
= TREE_BLOCK (t
);
6231 if (block
== p
->orig_block
6232 || (p
->orig_block
== NULL_TREE
6233 && block
!= NULL_TREE
))
6234 TREE_SET_BLOCK (t
, p
->new_block
);
6235 #ifdef ENABLE_CHECKING
6236 else if (block
!= NULL_TREE
)
6238 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6239 block
= BLOCK_SUPERCONTEXT (block
);
6240 gcc_assert (block
== p
->orig_block
);
6244 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6246 if (TREE_CODE (t
) == SSA_NAME
)
6247 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6248 else if (TREE_CODE (t
) == LABEL_DECL
)
6250 if (p
->new_label_map
)
6252 struct tree_map in
, *out
;
6254 out
= (struct tree_map
*)
6255 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6260 DECL_CONTEXT (t
) = p
->to_context
;
6262 else if (p
->remap_decls_p
)
6264 /* Replace T with its duplicate. T should no longer appear in the
6265 parent function, so this looks wasteful; however, it may appear
6266 in referenced_vars, and more importantly, as virtual operands of
6267 statements, and in alias lists of other variables. It would be
6268 quite difficult to expunge it from all those places. ??? It might
6269 suffice to do this for addressable variables. */
6270 if ((TREE_CODE (t
) == VAR_DECL
6271 && !is_global_var (t
))
6272 || TREE_CODE (t
) == CONST_DECL
)
6273 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6277 else if (TYPE_P (t
))
6283 /* Helper for move_stmt_r. Given an EH region number for the source
6284 function, map that to the duplicate EH regio number in the dest. */
6287 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6289 eh_region old_r
, new_r
;
6292 old_r
= get_eh_region_from_number (old_nr
);
6293 slot
= pointer_map_contains (p
->eh_map
, old_r
);
6294 new_r
= (eh_region
) *slot
;
6296 return new_r
->index
;
6299 /* Similar, but operate on INTEGER_CSTs. */
6302 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6306 old_nr
= tree_to_shwi (old_t_nr
);
6307 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6309 return build_int_cst (integer_type_node
, new_nr
);
6312 /* Like move_stmt_op, but for gimple statements.
6314 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6315 contained in the current statement in *GSI_P and change the
6316 DECL_CONTEXT of every local variable referenced in the current
6320 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6321 struct walk_stmt_info
*wi
)
6323 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6324 gimple stmt
= gsi_stmt (*gsi_p
);
6325 tree block
= gimple_block (stmt
);
6327 if (block
== p
->orig_block
6328 || (p
->orig_block
== NULL_TREE
6329 && block
!= NULL_TREE
))
6330 gimple_set_block (stmt
, p
->new_block
);
6332 switch (gimple_code (stmt
))
6335 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6337 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6338 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6339 switch (DECL_FUNCTION_CODE (fndecl
))
6341 case BUILT_IN_EH_COPY_VALUES
:
6342 r
= gimple_call_arg (stmt
, 1);
6343 r
= move_stmt_eh_region_tree_nr (r
, p
);
6344 gimple_call_set_arg (stmt
, 1, r
);
6347 case BUILT_IN_EH_POINTER
:
6348 case BUILT_IN_EH_FILTER
:
6349 r
= gimple_call_arg (stmt
, 0);
6350 r
= move_stmt_eh_region_tree_nr (r
, p
);
6351 gimple_call_set_arg (stmt
, 0, r
);
6362 int r
= gimple_resx_region (stmt
);
6363 r
= move_stmt_eh_region_nr (r
, p
);
6364 gimple_resx_set_region (stmt
, r
);
6368 case GIMPLE_EH_DISPATCH
:
6370 int r
= gimple_eh_dispatch_region (stmt
);
6371 r
= move_stmt_eh_region_nr (r
, p
);
6372 gimple_eh_dispatch_set_region (stmt
, r
);
6376 case GIMPLE_OMP_RETURN
:
6377 case GIMPLE_OMP_CONTINUE
:
6380 if (is_gimple_omp (stmt
))
6382 /* Do not remap variables inside OMP directives. Variables
6383 referenced in clauses and directive header belong to the
6384 parent function and should not be moved into the child
6386 bool save_remap_decls_p
= p
->remap_decls_p
;
6387 p
->remap_decls_p
= false;
6388 *handled_ops_p
= true;
6390 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6393 p
->remap_decls_p
= save_remap_decls_p
;
6401 /* Move basic block BB from function CFUN to function DEST_FN. The
6402 block is moved out of the original linked list and placed after
6403 block AFTER in the new list. Also, the block is removed from the
6404 original array of blocks and placed in DEST_FN's array of blocks.
6405 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6406 updated to reflect the moved edges.
6408 The local variables are remapped to new instances, VARS_MAP is used
6409 to record the mapping. */
6412 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6413 basic_block after
, bool update_edge_count_p
,
6414 struct move_stmt_d
*d
)
6416 struct control_flow_graph
*cfg
;
6419 gimple_stmt_iterator si
;
6420 unsigned old_len
, new_len
;
6422 /* Remove BB from dominance structures. */
6423 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6425 /* Move BB from its current loop to the copy in the new function. */
6428 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6430 bb
->loop_father
= new_loop
;
6433 /* Link BB to the new linked list. */
6434 move_block_after (bb
, after
);
6436 /* Update the edge count in the corresponding flowgraphs. */
6437 if (update_edge_count_p
)
6438 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6440 cfun
->cfg
->x_n_edges
--;
6441 dest_cfun
->cfg
->x_n_edges
++;
6444 /* Remove BB from the original basic block array. */
6445 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6446 cfun
->cfg
->x_n_basic_blocks
--;
6448 /* Grow DEST_CFUN's basic block array if needed. */
6449 cfg
= dest_cfun
->cfg
;
6450 cfg
->x_n_basic_blocks
++;
6451 if (bb
->index
>= cfg
->x_last_basic_block
)
6452 cfg
->x_last_basic_block
= bb
->index
+ 1;
6454 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6455 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6457 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6458 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6461 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6463 /* Remap the variables in phi nodes. */
6464 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); )
6466 gimple phi
= gsi_stmt (si
);
6468 tree op
= PHI_RESULT (phi
);
6472 if (virtual_operand_p (op
))
6474 /* Remove the phi nodes for virtual operands (alias analysis will be
6475 run for the new function, anyway). */
6476 remove_phi_node (&si
, true);
6480 SET_PHI_RESULT (phi
,
6481 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6482 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6484 op
= USE_FROM_PTR (use
);
6485 if (TREE_CODE (op
) == SSA_NAME
)
6486 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6489 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6491 location_t locus
= gimple_phi_arg_location (phi
, i
);
6492 tree block
= LOCATION_BLOCK (locus
);
6494 if (locus
== UNKNOWN_LOCATION
)
6496 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6498 if (d
->new_block
== NULL_TREE
)
6499 locus
= LOCATION_LOCUS (locus
);
6501 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6502 gimple_phi_arg_set_location (phi
, i
, locus
);
6509 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6511 gimple stmt
= gsi_stmt (si
);
6512 struct walk_stmt_info wi
;
6514 memset (&wi
, 0, sizeof (wi
));
6516 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6518 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6520 tree label
= gimple_label_label (stmt
);
6521 int uid
= LABEL_DECL_UID (label
);
6523 gcc_assert (uid
> -1);
6525 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6526 if (old_len
<= (unsigned) uid
)
6528 new_len
= 3 * uid
/ 2 + 1;
6529 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6532 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6533 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6535 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6537 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6538 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6541 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6542 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6544 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6545 gimple_remove_stmt_histograms (cfun
, stmt
);
6547 /* We cannot leave any operands allocated from the operand caches of
6548 the current function. */
6549 free_stmt_operands (cfun
, stmt
);
6550 push_cfun (dest_cfun
);
6555 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6556 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6558 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6559 if (d
->orig_block
== NULL_TREE
6560 || block
== d
->orig_block
)
6561 e
->goto_locus
= d
->new_block
?
6562 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6563 LOCATION_LOCUS (e
->goto_locus
);
6567 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6568 the outermost EH region. Use REGION as the incoming base EH region. */
6571 find_outermost_region_in_block (struct function
*src_cfun
,
6572 basic_block bb
, eh_region region
)
6574 gimple_stmt_iterator si
;
6576 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6578 gimple stmt
= gsi_stmt (si
);
6579 eh_region stmt_region
;
6582 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6583 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6587 region
= stmt_region
;
6588 else if (stmt_region
!= region
)
6590 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6591 gcc_assert (region
!= NULL
);
6600 new_label_mapper (tree decl
, void *data
)
6602 htab_t hash
= (htab_t
) data
;
6606 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6608 m
= XNEW (struct tree_map
);
6609 m
->hash
= DECL_UID (decl
);
6610 m
->base
.from
= decl
;
6611 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6612 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6613 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6614 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6616 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6617 gcc_assert (*slot
== NULL
);
6624 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6628 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6633 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6636 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6638 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6641 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6643 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6644 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6646 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6651 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6652 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6655 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6659 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6662 /* Discard it from the old loop array. */
6663 (*get_loops (fn1
))[loop
->num
] = NULL
;
6665 /* Place it in the new loop array, assigning it a new number. */
6666 loop
->num
= number_of_loops (fn2
);
6667 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6669 /* Recurse to children. */
6670 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6671 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6674 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6675 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6676 single basic block in the original CFG and the new basic block is
6677 returned. DEST_CFUN must not have a CFG yet.
6679 Note that the region need not be a pure SESE region. Blocks inside
6680 the region may contain calls to abort/exit. The only restriction
6681 is that ENTRY_BB should be the only entry point and it must
6684 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6685 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6686 to the new function.
6688 All local variables referenced in the region are assumed to be in
6689 the corresponding BLOCK_VARS and unexpanded variable lists
6690 associated with DEST_CFUN. */
6693 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6694 basic_block exit_bb
, tree orig_block
)
6696 vec
<basic_block
> bbs
, dom_bbs
;
6697 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6698 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6699 struct function
*saved_cfun
= cfun
;
6700 int *entry_flag
, *exit_flag
;
6701 unsigned *entry_prob
, *exit_prob
;
6702 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6705 htab_t new_label_map
;
6706 struct pointer_map_t
*vars_map
, *eh_map
;
6707 struct loop
*loop
= entry_bb
->loop_father
;
6708 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6709 struct move_stmt_d d
;
6711 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6713 gcc_assert (entry_bb
!= exit_bb
6715 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6717 /* Collect all the blocks in the region. Manually add ENTRY_BB
6718 because it won't be added by dfs_enumerate_from. */
6720 bbs
.safe_push (entry_bb
);
6721 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6723 /* The blocks that used to be dominated by something in BBS will now be
6724 dominated by the new block. */
6725 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6729 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6730 the predecessor edges to ENTRY_BB and the successor edges to
6731 EXIT_BB so that we can re-attach them to the new basic block that
6732 will replace the region. */
6733 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6734 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6735 entry_flag
= XNEWVEC (int, num_entry_edges
);
6736 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6738 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6740 entry_prob
[i
] = e
->probability
;
6741 entry_flag
[i
] = e
->flags
;
6742 entry_pred
[i
++] = e
->src
;
6748 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6749 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6750 exit_flag
= XNEWVEC (int, num_exit_edges
);
6751 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6753 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6755 exit_prob
[i
] = e
->probability
;
6756 exit_flag
[i
] = e
->flags
;
6757 exit_succ
[i
++] = e
->dest
;
6769 /* Switch context to the child function to initialize DEST_FN's CFG. */
6770 gcc_assert (dest_cfun
->cfg
== NULL
);
6771 push_cfun (dest_cfun
);
6773 init_empty_tree_cfg ();
6775 /* Initialize EH information for the new function. */
6777 new_label_map
= NULL
;
6780 eh_region region
= NULL
;
6782 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6783 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6785 init_eh_for_function ();
6788 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6789 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6790 new_label_mapper
, new_label_map
);
6794 /* Initialize an empty loop tree. */
6795 struct loops
*loops
= ggc_alloc_cleared_loops ();
6796 init_loops_structure (dest_cfun
, loops
, 1);
6797 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6798 set_loops_for_fn (dest_cfun
, loops
);
6800 /* Move the outlined loop tree part. */
6801 num_nodes
= bbs
.length ();
6802 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6804 if (bb
->loop_father
->header
== bb
)
6806 struct loop
*this_loop
= bb
->loop_father
;
6807 struct loop
*outer
= loop_outer (this_loop
);
6809 /* If the SESE region contains some bbs ending with
6810 a noreturn call, those are considered to belong
6811 to the outermost loop in saved_cfun, rather than
6812 the entry_bb's loop_father. */
6816 num_nodes
-= this_loop
->num_nodes
;
6817 flow_loop_tree_node_remove (bb
->loop_father
);
6818 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
6819 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
6822 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
6825 /* Remove loop exits from the outlined region. */
6826 if (loops_for_fn (saved_cfun
)->exits
)
6827 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6829 void **slot
= htab_find_slot_with_hash
6830 (loops_for_fn (saved_cfun
)->exits
, e
,
6831 htab_hash_pointer (e
), NO_INSERT
);
6833 htab_clear_slot (loops_for_fn (saved_cfun
)->exits
, slot
);
6838 /* Adjust the number of blocks in the tree root of the outlined part. */
6839 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
6841 /* Setup a mapping to be used by move_block_to_fn. */
6842 loop
->aux
= current_loops
->tree_root
;
6843 loop0
->aux
= current_loops
->tree_root
;
6847 /* Move blocks from BBS into DEST_CFUN. */
6848 gcc_assert (bbs
.length () >= 2);
6849 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6850 vars_map
= pointer_map_create ();
6852 memset (&d
, 0, sizeof (d
));
6853 d
.orig_block
= orig_block
;
6854 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
6855 d
.from_context
= cfun
->decl
;
6856 d
.to_context
= dest_cfun
->decl
;
6857 d
.vars_map
= vars_map
;
6858 d
.new_label_map
= new_label_map
;
6860 d
.remap_decls_p
= true;
6862 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6864 /* No need to update edge counts on the last block. It has
6865 already been updated earlier when we detached the region from
6866 the original CFG. */
6867 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
6873 /* Loop sizes are no longer correct, fix them up. */
6874 loop
->num_nodes
-= num_nodes
;
6875 for (struct loop
*outer
= loop_outer (loop
);
6876 outer
; outer
= loop_outer (outer
))
6877 outer
->num_nodes
-= num_nodes
;
6878 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
6880 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vect_loops
)
6883 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
6888 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
6890 dest_cfun
->has_simduid_loops
= true;
6892 if (aloop
->force_vect
)
6893 dest_cfun
->has_force_vect_loops
= true;
6897 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6901 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6903 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6904 = BLOCK_SUBBLOCKS (orig_block
);
6905 for (block
= BLOCK_SUBBLOCKS (orig_block
);
6906 block
; block
= BLOCK_CHAIN (block
))
6907 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
6908 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
6911 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
6912 vars_map
, dest_cfun
->decl
);
6915 htab_delete (new_label_map
);
6917 pointer_map_destroy (eh_map
);
6918 pointer_map_destroy (vars_map
);
6920 /* Rewire the entry and exit blocks. The successor to the entry
6921 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6922 the child function. Similarly, the predecessor of DEST_FN's
6923 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6924 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6925 various CFG manipulation function get to the right CFG.
6927 FIXME, this is silly. The CFG ought to become a parameter to
6929 push_cfun (dest_cfun
);
6930 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
6932 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
6935 /* Back in the original function, the SESE region has disappeared,
6936 create a new basic block in its place. */
6937 bb
= create_empty_bb (entry_pred
[0]);
6939 add_bb_to_loop (bb
, loop
);
6940 for (i
= 0; i
< num_entry_edges
; i
++)
6942 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
6943 e
->probability
= entry_prob
[i
];
6946 for (i
= 0; i
< num_exit_edges
; i
++)
6948 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
6949 e
->probability
= exit_prob
[i
];
6952 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
6953 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
6954 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
6972 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6976 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
6978 tree arg
, var
, old_current_fndecl
= current_function_decl
;
6979 struct function
*dsf
;
6980 bool ignore_topmost_bind
= false, any_var
= false;
6983 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
6984 && decl_is_tm_clone (fndecl
));
6985 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
6987 current_function_decl
= fndecl
;
6988 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
6990 arg
= DECL_ARGUMENTS (fndecl
);
6993 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
6994 fprintf (file
, " ");
6995 print_generic_expr (file
, arg
, dump_flags
);
6996 if (flags
& TDF_VERBOSE
)
6997 print_node (file
, "", arg
, 4);
6998 if (DECL_CHAIN (arg
))
6999 fprintf (file
, ", ");
7000 arg
= DECL_CHAIN (arg
);
7002 fprintf (file
, ")\n");
7004 if (flags
& TDF_VERBOSE
)
7005 print_node (file
, "", fndecl
, 2);
7007 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7008 if (dsf
&& (flags
& TDF_EH
))
7009 dump_eh_tree (file
, dsf
);
7011 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7013 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7014 current_function_decl
= old_current_fndecl
;
7018 /* When GIMPLE is lowered, the variables are no longer available in
7019 BIND_EXPRs, so display them separately. */
7020 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7023 ignore_topmost_bind
= true;
7025 fprintf (file
, "{\n");
7026 if (!vec_safe_is_empty (fun
->local_decls
))
7027 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7029 print_generic_decl (file
, var
, flags
);
7030 if (flags
& TDF_VERBOSE
)
7031 print_node (file
, "", var
, 4);
7032 fprintf (file
, "\n");
7036 if (gimple_in_ssa_p (cfun
))
7037 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7039 tree name
= ssa_name (ix
);
7040 if (name
&& !SSA_NAME_VAR (name
))
7042 fprintf (file
, " ");
7043 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7044 fprintf (file
, " ");
7045 print_generic_expr (file
, name
, flags
);
7046 fprintf (file
, ";\n");
7053 if (fun
&& fun
->decl
== fndecl
7055 && basic_block_info_for_fn (fun
))
7057 /* If the CFG has been built, emit a CFG-based dump. */
7058 if (!ignore_topmost_bind
)
7059 fprintf (file
, "{\n");
7061 if (any_var
&& n_basic_blocks_for_fn (fun
))
7062 fprintf (file
, "\n");
7064 FOR_EACH_BB_FN (bb
, fun
)
7065 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7067 fprintf (file
, "}\n");
7069 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7071 /* The function is now in GIMPLE form but the CFG has not been
7072 built yet. Emit the single sequence of GIMPLE statements
7073 that make up its body. */
7074 gimple_seq body
= gimple_body (fndecl
);
7076 if (gimple_seq_first_stmt (body
)
7077 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7078 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7079 print_gimple_seq (file
, body
, 0, flags
);
7082 if (!ignore_topmost_bind
)
7083 fprintf (file
, "{\n");
7086 fprintf (file
, "\n");
7088 print_gimple_seq (file
, body
, 2, flags
);
7089 fprintf (file
, "}\n");
7096 /* Make a tree based dump. */
7097 chain
= DECL_SAVED_TREE (fndecl
);
7098 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7100 if (ignore_topmost_bind
)
7102 chain
= BIND_EXPR_BODY (chain
);
7110 if (!ignore_topmost_bind
)
7111 fprintf (file
, "{\n");
7116 fprintf (file
, "\n");
7118 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7119 if (ignore_topmost_bind
)
7120 fprintf (file
, "}\n");
7123 if (flags
& TDF_ENUMERATE_LOCALS
)
7124 dump_enumerated_decls (file
, flags
);
7125 fprintf (file
, "\n\n");
7127 current_function_decl
= old_current_fndecl
;
7130 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7133 debug_function (tree fn
, int flags
)
7135 dump_function_to_file (fn
, stderr
, flags
);
7139 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7142 print_pred_bbs (FILE *file
, basic_block bb
)
7147 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7148 fprintf (file
, "bb_%d ", e
->src
->index
);
7152 /* Print on FILE the indexes for the successors of basic_block BB. */
7155 print_succ_bbs (FILE *file
, basic_block bb
)
7160 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7161 fprintf (file
, "bb_%d ", e
->dest
->index
);
7164 /* Print to FILE the basic block BB following the VERBOSITY level. */
7167 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7169 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7170 memset ((void *) s_indent
, ' ', (size_t) indent
);
7171 s_indent
[indent
] = '\0';
7173 /* Print basic_block's header. */
7176 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7177 print_pred_bbs (file
, bb
);
7178 fprintf (file
, "}, succs = {");
7179 print_succ_bbs (file
, bb
);
7180 fprintf (file
, "})\n");
7183 /* Print basic_block's body. */
7186 fprintf (file
, "%s {\n", s_indent
);
7187 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7188 fprintf (file
, "%s }\n", s_indent
);
7192 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7194 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7195 VERBOSITY level this outputs the contents of the loop, or just its
7199 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7207 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7208 memset ((void *) s_indent
, ' ', (size_t) indent
);
7209 s_indent
[indent
] = '\0';
7211 /* Print loop's header. */
7212 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7214 fprintf (file
, "header = %d", loop
->header
->index
);
7217 fprintf (file
, "deleted)\n");
7221 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7223 fprintf (file
, ", multiple latches");
7224 fprintf (file
, ", niter = ");
7225 print_generic_expr (file
, loop
->nb_iterations
, 0);
7227 if (loop
->any_upper_bound
)
7229 fprintf (file
, ", upper_bound = ");
7230 dump_double_int (file
, loop
->nb_iterations_upper_bound
, true);
7233 if (loop
->any_estimate
)
7235 fprintf (file
, ", estimate = ");
7236 dump_double_int (file
, loop
->nb_iterations_estimate
, true);
7238 fprintf (file
, ")\n");
7240 /* Print loop's body. */
7243 fprintf (file
, "%s{\n", s_indent
);
7244 FOR_EACH_BB_FN (bb
, cfun
)
7245 if (bb
->loop_father
== loop
)
7246 print_loops_bb (file
, bb
, indent
, verbosity
);
7248 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7249 fprintf (file
, "%s}\n", s_indent
);
7253 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7254 spaces. Following VERBOSITY level this outputs the contents of the
7255 loop, or just its structure. */
7258 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7264 print_loop (file
, loop
, indent
, verbosity
);
7265 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7268 /* Follow a CFG edge from the entry point of the program, and on entry
7269 of a loop, pretty print the loop structure on FILE. */
7272 print_loops (FILE *file
, int verbosity
)
7276 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7277 if (bb
&& bb
->loop_father
)
7278 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7284 debug (struct loop
&ref
)
7286 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7290 debug (struct loop
*ptr
)
7295 fprintf (stderr
, "<nil>\n");
7298 /* Dump a loop verbosely. */
7301 debug_verbose (struct loop
&ref
)
7303 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7307 debug_verbose (struct loop
*ptr
)
7312 fprintf (stderr
, "<nil>\n");
7316 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7319 debug_loops (int verbosity
)
7321 print_loops (stderr
, verbosity
);
7324 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7327 debug_loop (struct loop
*loop
, int verbosity
)
7329 print_loop (stderr
, loop
, 0, verbosity
);
7332 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7336 debug_loop_num (unsigned num
, int verbosity
)
7338 debug_loop (get_loop (cfun
, num
), verbosity
);
7341 /* Return true if BB ends with a call, possibly followed by some
7342 instructions that must stay with the call. Return false,
7346 gimple_block_ends_with_call_p (basic_block bb
)
7348 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7349 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7353 /* Return true if BB ends with a conditional branch. Return false,
7357 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7359 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7360 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7364 /* Return true if we need to add fake edge to exit at statement T.
7365 Helper function for gimple_flow_call_edges_add. */
7368 need_fake_edge_p (gimple t
)
7370 tree fndecl
= NULL_TREE
;
7373 /* NORETURN and LONGJMP calls already have an edge to exit.
7374 CONST and PURE calls do not need one.
7375 We don't currently check for CONST and PURE here, although
7376 it would be a good idea, because those attributes are
7377 figured out from the RTL in mark_constant_function, and
7378 the counter incrementation code from -fprofile-arcs
7379 leads to different results from -fbranch-probabilities. */
7380 if (is_gimple_call (t
))
7382 fndecl
= gimple_call_fndecl (t
);
7383 call_flags
= gimple_call_flags (t
);
7386 if (is_gimple_call (t
)
7388 && DECL_BUILT_IN (fndecl
)
7389 && (call_flags
& ECF_NOTHROW
)
7390 && !(call_flags
& ECF_RETURNS_TWICE
)
7391 /* fork() doesn't really return twice, but the effect of
7392 wrapping it in __gcov_fork() which calls __gcov_flush()
7393 and clears the counters before forking has the same
7394 effect as returning twice. Force a fake edge. */
7395 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7396 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7399 if (is_gimple_call (t
))
7405 if (!(call_flags
& ECF_NORETURN
))
7409 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7410 if ((e
->flags
& EDGE_FAKE
) == 0)
7414 if (gimple_code (t
) == GIMPLE_ASM
7415 && (gimple_asm_volatile_p (t
) || gimple_asm_input_p (t
)))
7422 /* Add fake edges to the function exit for any non constant and non
7423 noreturn calls (or noreturn calls with EH/abnormal edges),
7424 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7425 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7428 The goal is to expose cases in which entering a basic block does
7429 not imply that all subsequent instructions must be executed. */
7432 gimple_flow_call_edges_add (sbitmap blocks
)
7435 int blocks_split
= 0;
7436 int last_bb
= last_basic_block_for_fn (cfun
);
7437 bool check_last_block
= false;
7439 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7443 check_last_block
= true;
7445 check_last_block
= bitmap_bit_p (blocks
,
7446 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7448 /* In the last basic block, before epilogue generation, there will be
7449 a fallthru edge to EXIT. Special care is required if the last insn
7450 of the last basic block is a call because make_edge folds duplicate
7451 edges, which would result in the fallthru edge also being marked
7452 fake, which would result in the fallthru edge being removed by
7453 remove_fake_edges, which would result in an invalid CFG.
7455 Moreover, we can't elide the outgoing fake edge, since the block
7456 profiler needs to take this into account in order to solve the minimal
7457 spanning tree in the case that the call doesn't return.
7459 Handle this by adding a dummy instruction in a new last basic block. */
7460 if (check_last_block
)
7462 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7463 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7466 if (!gsi_end_p (gsi
))
7469 if (t
&& need_fake_edge_p (t
))
7473 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7476 gsi_insert_on_edge (e
, gimple_build_nop ());
7477 gsi_commit_edge_inserts ();
7482 /* Now add fake edges to the function exit for any non constant
7483 calls since there is no way that we can determine if they will
7485 for (i
= 0; i
< last_bb
; i
++)
7487 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7488 gimple_stmt_iterator gsi
;
7489 gimple stmt
, last_stmt
;
7494 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7497 gsi
= gsi_last_nondebug_bb (bb
);
7498 if (!gsi_end_p (gsi
))
7500 last_stmt
= gsi_stmt (gsi
);
7503 stmt
= gsi_stmt (gsi
);
7504 if (need_fake_edge_p (stmt
))
7508 /* The handling above of the final block before the
7509 epilogue should be enough to verify that there is
7510 no edge to the exit block in CFG already.
7511 Calling make_edge in such case would cause us to
7512 mark that edge as fake and remove it later. */
7513 #ifdef ENABLE_CHECKING
7514 if (stmt
== last_stmt
)
7516 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7517 gcc_assert (e
== NULL
);
7521 /* Note that the following may create a new basic block
7522 and renumber the existing basic blocks. */
7523 if (stmt
!= last_stmt
)
7525 e
= split_block (bb
, stmt
);
7529 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7533 while (!gsi_end_p (gsi
));
7538 verify_flow_info ();
7540 return blocks_split
;
7543 /* Removes edge E and all the blocks dominated by it, and updates dominance
7544 information. The IL in E->src needs to be updated separately.
7545 If dominance info is not available, only the edge E is removed.*/
7548 remove_edge_and_dominated_blocks (edge e
)
7550 vec
<basic_block
> bbs_to_remove
= vNULL
;
7551 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7555 bool none_removed
= false;
7557 basic_block bb
, dbb
;
7560 if (!dom_info_available_p (CDI_DOMINATORS
))
7566 /* No updating is needed for edges to exit. */
7567 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7569 if (cfgcleanup_altered_bbs
)
7570 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7575 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7576 that is not dominated by E->dest, then this set is empty. Otherwise,
7577 all the basic blocks dominated by E->dest are removed.
7579 Also, to DF_IDOM we store the immediate dominators of the blocks in
7580 the dominance frontier of E (i.e., of the successors of the
7581 removed blocks, if there are any, and of E->dest otherwise). */
7582 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7587 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7589 none_removed
= true;
7594 df
= BITMAP_ALLOC (NULL
);
7595 df_idom
= BITMAP_ALLOC (NULL
);
7598 bitmap_set_bit (df_idom
,
7599 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7602 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7603 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7605 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7607 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7608 bitmap_set_bit (df
, f
->dest
->index
);
7611 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7612 bitmap_clear_bit (df
, bb
->index
);
7614 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7616 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7617 bitmap_set_bit (df_idom
,
7618 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7622 if (cfgcleanup_altered_bbs
)
7624 /* Record the set of the altered basic blocks. */
7625 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7626 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7629 /* Remove E and the cancelled blocks. */
7634 /* Walk backwards so as to get a chance to substitute all
7635 released DEFs into debug stmts. See
7636 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7638 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7639 delete_basic_block (bbs_to_remove
[i
]);
7642 /* Update the dominance information. The immediate dominator may change only
7643 for blocks whose immediate dominator belongs to DF_IDOM:
7645 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7646 removal. Let Z the arbitrary block such that idom(Z) = Y and
7647 Z dominates X after the removal. Before removal, there exists a path P
7648 from Y to X that avoids Z. Let F be the last edge on P that is
7649 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7650 dominates W, and because of P, Z does not dominate W), and W belongs to
7651 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7652 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7654 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7655 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7657 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7658 bbs_to_fix_dom
.safe_push (dbb
);
7661 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7664 BITMAP_FREE (df_idom
);
7665 bbs_to_remove
.release ();
7666 bbs_to_fix_dom
.release ();
7669 /* Purge dead EH edges from basic block BB. */
7672 gimple_purge_dead_eh_edges (basic_block bb
)
7674 bool changed
= false;
7677 gimple stmt
= last_stmt (bb
);
7679 if (stmt
&& stmt_can_throw_internal (stmt
))
7682 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7684 if (e
->flags
& EDGE_EH
)
7686 remove_edge_and_dominated_blocks (e
);
7696 /* Purge dead EH edges from basic block listed in BLOCKS. */
7699 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7701 bool changed
= false;
7705 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7707 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7709 /* Earlier gimple_purge_dead_eh_edges could have removed
7710 this basic block already. */
7711 gcc_assert (bb
|| changed
);
7713 changed
|= gimple_purge_dead_eh_edges (bb
);
7719 /* Purge dead abnormal call edges from basic block BB. */
7722 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7724 bool changed
= false;
7727 gimple stmt
= last_stmt (bb
);
7729 if (!cfun
->has_nonlocal_label
7730 && !cfun
->calls_setjmp
)
7733 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7736 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7738 if (e
->flags
& EDGE_ABNORMAL
)
7740 if (e
->flags
& EDGE_FALLTHRU
)
7741 e
->flags
&= ~EDGE_ABNORMAL
;
7743 remove_edge_and_dominated_blocks (e
);
7753 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7756 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7758 bool changed
= false;
7762 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7764 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7766 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7767 this basic block already. */
7768 gcc_assert (bb
|| changed
);
7770 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7776 /* This function is called whenever a new edge is created or
7780 gimple_execute_on_growing_pred (edge e
)
7782 basic_block bb
= e
->dest
;
7784 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7785 reserve_phi_args_for_new_edge (bb
);
7788 /* This function is called immediately before edge E is removed from
7789 the edge vector E->dest->preds. */
7792 gimple_execute_on_shrinking_pred (edge e
)
7794 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7795 remove_phi_args (e
);
7798 /*---------------------------------------------------------------------------
7799 Helper functions for Loop versioning
7800 ---------------------------------------------------------------------------*/
7802 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7803 of 'first'. Both of them are dominated by 'new_head' basic block. When
7804 'new_head' was created by 'second's incoming edge it received phi arguments
7805 on the edge by split_edge(). Later, additional edge 'e' was created to
7806 connect 'new_head' and 'first'. Now this routine adds phi args on this
7807 additional edge 'e' that new_head to second edge received as part of edge
7811 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
7812 basic_block new_head
, edge e
)
7815 gimple_stmt_iterator psi1
, psi2
;
7817 edge e2
= find_edge (new_head
, second
);
7819 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7820 edge, we should always have an edge from NEW_HEAD to SECOND. */
7821 gcc_assert (e2
!= NULL
);
7823 /* Browse all 'second' basic block phi nodes and add phi args to
7824 edge 'e' for 'first' head. PHI args are always in correct order. */
7826 for (psi2
= gsi_start_phis (second
),
7827 psi1
= gsi_start_phis (first
);
7828 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
7829 gsi_next (&psi2
), gsi_next (&psi1
))
7831 phi1
= gsi_stmt (psi1
);
7832 phi2
= gsi_stmt (psi2
);
7833 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
7834 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
7839 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7840 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7841 the destination of the ELSE part. */
7844 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
7845 basic_block second_head ATTRIBUTE_UNUSED
,
7846 basic_block cond_bb
, void *cond_e
)
7848 gimple_stmt_iterator gsi
;
7849 gimple new_cond_expr
;
7850 tree cond_expr
= (tree
) cond_e
;
7853 /* Build new conditional expr */
7854 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
7855 NULL_TREE
, NULL_TREE
);
7857 /* Add new cond in cond_bb. */
7858 gsi
= gsi_last_bb (cond_bb
);
7859 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
7861 /* Adjust edges appropriately to connect new head with first head
7862 as well as second head. */
7863 e0
= single_succ_edge (cond_bb
);
7864 e0
->flags
&= ~EDGE_FALLTHRU
;
7865 e0
->flags
|= EDGE_FALSE_VALUE
;
7869 /* Do book-keeping of basic block BB for the profile consistency checker.
7870 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7871 then do post-pass accounting. Store the counting in RECORD. */
7873 gimple_account_profile_record (basic_block bb
, int after_pass
,
7874 struct profile_record
*record
)
7876 gimple_stmt_iterator i
;
7877 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
7879 record
->size
[after_pass
]
7880 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
7881 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
7882 record
->time
[after_pass
]
7883 += estimate_num_insns (gsi_stmt (i
),
7884 &eni_time_weights
) * bb
->count
;
7885 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
7886 record
->time
[after_pass
]
7887 += estimate_num_insns (gsi_stmt (i
),
7888 &eni_time_weights
) * bb
->frequency
;
7892 struct cfg_hooks gimple_cfg_hooks
= {
7894 gimple_verify_flow_info
,
7895 gimple_dump_bb
, /* dump_bb */
7896 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
7897 create_bb
, /* create_basic_block */
7898 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
7899 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
7900 gimple_can_remove_branch_p
, /* can_remove_branch_p */
7901 remove_bb
, /* delete_basic_block */
7902 gimple_split_block
, /* split_block */
7903 gimple_move_block_after
, /* move_block_after */
7904 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
7905 gimple_merge_blocks
, /* merge_blocks */
7906 gimple_predict_edge
, /* predict_edge */
7907 gimple_predicted_by_p
, /* predicted_by_p */
7908 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
7909 gimple_duplicate_bb
, /* duplicate_block */
7910 gimple_split_edge
, /* split_edge */
7911 gimple_make_forwarder_block
, /* make_forward_block */
7912 NULL
, /* tidy_fallthru_edge */
7913 NULL
, /* force_nonfallthru */
7914 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
7915 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
7916 gimple_flow_call_edges_add
, /* flow_call_edges_add */
7917 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
7918 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
7919 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
7920 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
7921 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
7922 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
7923 flush_pending_stmts
, /* flush_pending_stmts */
7924 gimple_empty_block_p
, /* block_empty_p */
7925 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
7926 gimple_account_profile_record
,
7930 /* Split all critical edges. */
7933 split_critical_edges (void)
7939 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7940 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7941 mappings around the calls to split_edge. */
7942 start_recording_case_labels ();
7943 FOR_ALL_BB_FN (bb
, cfun
)
7945 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7947 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
7949 /* PRE inserts statements to edges and expects that
7950 since split_critical_edges was done beforehand, committing edge
7951 insertions will not split more edges. In addition to critical
7952 edges we must split edges that have multiple successors and
7953 end by control flow statements, such as RESX.
7954 Go ahead and split them too. This matches the logic in
7955 gimple_find_edge_insert_loc. */
7956 else if ((!single_pred_p (e
->dest
)
7957 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
7958 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7959 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
7960 && !(e
->flags
& EDGE_ABNORMAL
))
7962 gimple_stmt_iterator gsi
;
7964 gsi
= gsi_last_bb (e
->src
);
7965 if (!gsi_end_p (gsi
)
7966 && stmt_ends_bb_p (gsi_stmt (gsi
))
7967 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
7968 && !gimple_call_builtin_p (gsi_stmt (gsi
),
7974 end_recording_case_labels ();
7980 const pass_data pass_data_split_crit_edges
=
7982 GIMPLE_PASS
, /* type */
7983 "crited", /* name */
7984 OPTGROUP_NONE
, /* optinfo_flags */
7985 false, /* has_gate */
7986 true, /* has_execute */
7987 TV_TREE_SPLIT_EDGES
, /* tv_id */
7988 PROP_cfg
, /* properties_required */
7989 PROP_no_crit_edges
, /* properties_provided */
7990 0, /* properties_destroyed */
7991 0, /* todo_flags_start */
7992 TODO_verify_flow
, /* todo_flags_finish */
7995 class pass_split_crit_edges
: public gimple_opt_pass
7998 pass_split_crit_edges (gcc::context
*ctxt
)
7999 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8002 /* opt_pass methods: */
8003 unsigned int execute () { return split_critical_edges (); }
8005 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8006 }; // class pass_split_crit_edges
8011 make_pass_split_crit_edges (gcc::context
*ctxt
)
8013 return new pass_split_crit_edges (ctxt
);
8017 /* Build a ternary operation and gimplify it. Emit code before GSI.
8018 Return the gimple_val holding the result. */
8021 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8022 tree type
, tree a
, tree b
, tree c
)
8025 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8027 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8030 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8034 /* Build a binary operation and gimplify it. Emit code before GSI.
8035 Return the gimple_val holding the result. */
8038 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8039 tree type
, tree a
, tree b
)
8043 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8046 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8050 /* Build a unary operation and gimplify it. Emit code before GSI.
8051 Return the gimple_val holding the result. */
8054 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8059 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8062 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8068 /* Emit return warnings. */
8071 execute_warn_function_return (void)
8073 source_location location
;
8078 if (!targetm
.warn_func_return (cfun
->decl
))
8081 /* If we have a path to EXIT, then we do return. */
8082 if (TREE_THIS_VOLATILE (cfun
->decl
)
8083 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0)
8085 location
= UNKNOWN_LOCATION
;
8086 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8088 last
= last_stmt (e
->src
);
8089 if ((gimple_code (last
) == GIMPLE_RETURN
8090 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8091 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8094 if (location
== UNKNOWN_LOCATION
)
8095 location
= cfun
->function_end_locus
;
8096 warning_at (location
, 0, "%<noreturn%> function does return");
8099 /* If we see "return;" in some basic block, then we do reach the end
8100 without returning a value. */
8101 else if (warn_return_type
8102 && !TREE_NO_WARNING (cfun
->decl
)
8103 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0
8104 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
8106 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8108 gimple last
= last_stmt (e
->src
);
8109 if (gimple_code (last
) == GIMPLE_RETURN
8110 && gimple_return_retval (last
) == NULL
8111 && !gimple_no_warning_p (last
))
8113 location
= gimple_location (last
);
8114 if (location
== UNKNOWN_LOCATION
)
8115 location
= cfun
->function_end_locus
;
8116 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8117 TREE_NO_WARNING (cfun
->decl
) = 1;
8126 /* Given a basic block B which ends with a conditional and has
8127 precisely two successors, determine which of the edges is taken if
8128 the conditional is true and which is taken if the conditional is
8129 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8132 extract_true_false_edges_from_block (basic_block b
,
8136 edge e
= EDGE_SUCC (b
, 0);
8138 if (e
->flags
& EDGE_TRUE_VALUE
)
8141 *false_edge
= EDGE_SUCC (b
, 1);
8146 *true_edge
= EDGE_SUCC (b
, 1);
8152 const pass_data pass_data_warn_function_return
=
8154 GIMPLE_PASS
, /* type */
8155 "*warn_function_return", /* name */
8156 OPTGROUP_NONE
, /* optinfo_flags */
8157 false, /* has_gate */
8158 true, /* has_execute */
8159 TV_NONE
, /* tv_id */
8160 PROP_cfg
, /* properties_required */
8161 0, /* properties_provided */
8162 0, /* properties_destroyed */
8163 0, /* todo_flags_start */
8164 0, /* todo_flags_finish */
8167 class pass_warn_function_return
: public gimple_opt_pass
8170 pass_warn_function_return (gcc::context
*ctxt
)
8171 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8174 /* opt_pass methods: */
8175 unsigned int execute () { return execute_warn_function_return (); }
8177 }; // class pass_warn_function_return
8182 make_pass_warn_function_return (gcc::context
*ctxt
)
8184 return new pass_warn_function_return (ctxt
);
8187 /* Walk a gimplified function and warn for functions whose return value is
8188 ignored and attribute((warn_unused_result)) is set. This is done before
8189 inlining, so we don't have to worry about that. */
8192 do_warn_unused_result (gimple_seq seq
)
8195 gimple_stmt_iterator i
;
8197 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8199 gimple g
= gsi_stmt (i
);
8201 switch (gimple_code (g
))
8204 do_warn_unused_result (gimple_bind_body (g
));
8207 do_warn_unused_result (gimple_try_eval (g
));
8208 do_warn_unused_result (gimple_try_cleanup (g
));
8211 do_warn_unused_result (gimple_catch_handler (g
));
8213 case GIMPLE_EH_FILTER
:
8214 do_warn_unused_result (gimple_eh_filter_failure (g
));
8218 if (gimple_call_lhs (g
))
8220 if (gimple_call_internal_p (g
))
8223 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8224 LHS. All calls whose value is ignored should be
8225 represented like this. Look for the attribute. */
8226 fdecl
= gimple_call_fndecl (g
);
8227 ftype
= gimple_call_fntype (g
);
8229 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8231 location_t loc
= gimple_location (g
);
8234 warning_at (loc
, OPT_Wunused_result
,
8235 "ignoring return value of %qD, "
8236 "declared with attribute warn_unused_result",
8239 warning_at (loc
, OPT_Wunused_result
,
8240 "ignoring return value of function "
8241 "declared with attribute warn_unused_result");
8246 /* Not a container, not a call, or a call whose value is used. */
8253 run_warn_unused_result (void)
8255 do_warn_unused_result (gimple_body (current_function_decl
));
8260 gate_warn_unused_result (void)
8262 return flag_warn_unused_result
;
8267 const pass_data pass_data_warn_unused_result
=
8269 GIMPLE_PASS
, /* type */
8270 "*warn_unused_result", /* name */
8271 OPTGROUP_NONE
, /* optinfo_flags */
8272 true, /* has_gate */
8273 true, /* has_execute */
8274 TV_NONE
, /* tv_id */
8275 PROP_gimple_any
, /* properties_required */
8276 0, /* properties_provided */
8277 0, /* properties_destroyed */
8278 0, /* todo_flags_start */
8279 0, /* todo_flags_finish */
8282 class pass_warn_unused_result
: public gimple_opt_pass
8285 pass_warn_unused_result (gcc::context
*ctxt
)
8286 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8289 /* opt_pass methods: */
8290 bool gate () { return gate_warn_unused_result (); }
8291 unsigned int execute () { return run_warn_unused_result (); }
8293 }; // class pass_warn_unused_result
8298 make_pass_warn_unused_result (gcc::context
*ctxt
)
8300 return new pass_warn_unused_result (ctxt
);
8303 /* IPA passes, compilation of earlier functions or inlining
8304 might have changed some properties, such as marked functions nothrow,
8305 pure, const or noreturn.
8306 Remove redundant edges and basic blocks, and create new ones if necessary.
8308 This pass can't be executed as stand alone pass from pass manager, because
8309 in between inlining and this fixup the verify_flow_info would fail. */
8312 execute_fixup_cfg (void)
8315 gimple_stmt_iterator gsi
;
8316 int todo
= gimple_in_ssa_p (cfun
) ? TODO_verify_ssa
: 0;
8317 gcov_type count_scale
;
8322 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl
)->count
,
8323 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8325 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8326 cgraph_get_node (current_function_decl
)->count
;
8327 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8328 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8331 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8332 e
->count
= apply_scale (e
->count
, count_scale
);
8334 FOR_EACH_BB_FN (bb
, cfun
)
8336 bb
->count
= apply_scale (bb
->count
, count_scale
);
8337 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8339 gimple stmt
= gsi_stmt (gsi
);
8340 tree decl
= is_gimple_call (stmt
)
8341 ? gimple_call_fndecl (stmt
)
8345 int flags
= gimple_call_flags (stmt
);
8346 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8348 if (gimple_purge_dead_abnormal_call_edges (bb
))
8349 todo
|= TODO_cleanup_cfg
;
8351 if (gimple_in_ssa_p (cfun
))
8353 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8358 if (flags
& ECF_NORETURN
8359 && fixup_noreturn_call (stmt
))
8360 todo
|= TODO_cleanup_cfg
;
8363 if (maybe_clean_eh_stmt (stmt
)
8364 && gimple_purge_dead_eh_edges (bb
))
8365 todo
|= TODO_cleanup_cfg
;
8368 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8369 e
->count
= apply_scale (e
->count
, count_scale
);
8371 /* If we have a basic block with no successors that does not
8372 end with a control statement or a noreturn call end it with
8373 a call to __builtin_unreachable. This situation can occur
8374 when inlining a noreturn call that does in fact return. */
8375 if (EDGE_COUNT (bb
->succs
) == 0)
8377 gimple stmt
= last_stmt (bb
);
8379 || (!is_ctrl_stmt (stmt
)
8380 && (!is_gimple_call (stmt
)
8381 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8383 stmt
= gimple_build_call
8384 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8385 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8386 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8390 if (count_scale
!= REG_BR_PROB_BASE
)
8391 compute_function_frequency ();
8393 /* We just processed all calls. */
8394 if (cfun
->gimple_df
)
8395 vec_free (MODIFIED_NORETURN_CALLS (cfun
));
8397 /* Dump a textual representation of the flowgraph. */
8399 gimple_dump_cfg (dump_file
, dump_flags
);
8402 && (todo
& TODO_cleanup_cfg
))
8403 loops_state_set (LOOPS_NEED_FIXUP
);
8410 const pass_data pass_data_fixup_cfg
=
8412 GIMPLE_PASS
, /* type */
8413 "*free_cfg_annotations", /* name */
8414 OPTGROUP_NONE
, /* optinfo_flags */
8415 false, /* has_gate */
8416 true, /* has_execute */
8417 TV_NONE
, /* tv_id */
8418 PROP_cfg
, /* properties_required */
8419 0, /* properties_provided */
8420 0, /* properties_destroyed */
8421 0, /* todo_flags_start */
8422 0, /* todo_flags_finish */
8425 class pass_fixup_cfg
: public gimple_opt_pass
8428 pass_fixup_cfg (gcc::context
*ctxt
)
8429 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8432 /* opt_pass methods: */
8433 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8434 unsigned int execute () { return execute_fixup_cfg (); }
8436 }; // class pass_fixup_cfg
8441 make_pass_fixup_cfg (gcc::context
*ctxt
)
8443 return new pass_fixup_cfg (ctxt
);
8446 /* Garbage collection support for edge_def. */
8448 extern void gt_ggc_mx (tree
&);
8449 extern void gt_ggc_mx (gimple
&);
8450 extern void gt_ggc_mx (rtx
&);
8451 extern void gt_ggc_mx (basic_block
&);
8454 gt_ggc_mx (edge_def
*e
)
8456 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8458 gt_ggc_mx (e
->dest
);
8459 if (current_ir_type () == IR_GIMPLE
)
8460 gt_ggc_mx (e
->insns
.g
);
8462 gt_ggc_mx (e
->insns
.r
);
8466 /* PCH support for edge_def. */
8468 extern void gt_pch_nx (tree
&);
8469 extern void gt_pch_nx (gimple
&);
8470 extern void gt_pch_nx (rtx
&);
8471 extern void gt_pch_nx (basic_block
&);
8474 gt_pch_nx (edge_def
*e
)
8476 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8478 gt_pch_nx (e
->dest
);
8479 if (current_ir_type () == IR_GIMPLE
)
8480 gt_pch_nx (e
->insns
.g
);
8482 gt_pch_nx (e
->insns
.r
);
8487 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8489 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8490 op (&(e
->src
), cookie
);
8491 op (&(e
->dest
), cookie
);
8492 if (current_ir_type () == IR_GIMPLE
)
8493 op (&(e
->insns
.g
), cookie
);
8495 op (&(e
->insns
.r
), cookie
);
8496 op (&(block
), cookie
);