1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
35 #include "gimple-pretty-print.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "gimple-walk.h"
40 #include "gimple-ssa.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
53 #include "tree-dump.h"
54 #include "tree-pass.h"
55 #include "diagnostic-core.h"
58 #include "tree-ssa-propagate.h"
59 #include "value-prof.h"
60 #include "pointer-set.h"
61 #include "tree-inline.h"
63 #include "tree-ssa-live.h"
65 #include "tree-cfgcleanup.h"
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
70 /* Local declarations. */
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity
= 20;
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
88 static struct pointer_map_t
*edge_to_cases
;
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
94 static bitmap touched_switch_bbs
;
99 long num_merged_labels
;
102 static struct cfg_stats_d cfg_stats
;
104 /* Nonzero if we found a computed goto while building basic blocks. */
105 static bool found_computed_goto
;
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
114 /* Hashtable helpers. */
116 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
118 typedef locus_discrim_map value_type
;
119 typedef locus_discrim_map compare_type
;
120 static inline hashval_t
hash (const value_type
*);
121 static inline bool equal (const value_type
*, const compare_type
*);
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
128 locus_discrim_hasher::hash (const value_type
*item
)
130 return LOCATION_LINE (item
->locus
);
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
137 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
139 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
142 static hash_table
<locus_discrim_hasher
> discriminator_per_locus
;
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq
);
146 static void factor_computed_gotos (void);
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block
);
152 static void make_gimple_switch_edges (basic_block
);
153 static void make_goto_expr_edges (basic_block
);
154 static void make_gimple_asm_edges (basic_block
);
155 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
156 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
157 static unsigned int split_critical_edges (void);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple
, gimple
);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge
);
163 static gimple
first_non_label_stmt (basic_block
);
164 static bool verify_gimple_transaction (gimple
);
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block
, basic_block
);
168 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
169 static void remove_bb (basic_block
);
170 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
171 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
172 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
173 static tree
find_case_label_for_value (gimple
, tree
);
176 init_empty_tree_cfg_for_function (struct function
*fn
)
178 /* Initialize the basic block array. */
180 profile_status_for_function (fn
) = PROFILE_ABSENT
;
181 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
182 last_basic_block_for_function (fn
) = NUM_FIXED_BLOCKS
;
183 vec_alloc (basic_block_info_for_function (fn
), initial_cfg_capacity
);
184 vec_safe_grow_cleared (basic_block_info_for_function (fn
),
185 initial_cfg_capacity
);
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_function (fn
), initial_cfg_capacity
);
189 vec_safe_grow_cleared (label_to_block_map_for_function (fn
),
190 initial_cfg_capacity
);
192 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, ENTRY_BLOCK
,
193 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn
));
194 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, EXIT_BLOCK
,
195 EXIT_BLOCK_PTR_FOR_FUNCTION (fn
));
197 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn
)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn
);
199 EXIT_BLOCK_PTR_FOR_FUNCTION (fn
)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn
);
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun
);
209 /*---------------------------------------------------------------------------
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
217 build_gimple_cfg (gimple_seq seq
)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
224 init_empty_tree_cfg ();
226 found_computed_goto
= 0;
229 /* Computed gotos are hell to deal with, especially if there are
230 lots of them with a large number of destinations. So we factor
231 them to a common computed goto location before we build the
232 edge list. After we convert back to normal form, we will un-factor
233 the computed gotos since factoring introduces an unwanted jump. */
234 if (found_computed_goto
)
235 factor_computed_gotos ();
237 /* Make sure there is always at least one block, even if it's empty. */
238 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
239 create_empty_bb (ENTRY_BLOCK_PTR
);
241 /* Adjust the size of the array. */
242 if (basic_block_info
->length () < (size_t) n_basic_blocks_for_fn (cfun
))
243 vec_safe_grow_cleared (basic_block_info
, n_basic_blocks_for_fn (cfun
));
245 /* To speed up statement iterator walks, we first purge dead labels. */
246 cleanup_dead_labels ();
248 /* Group case nodes to reduce the number of edges.
249 We do this after cleaning up dead labels because otherwise we miss
250 a lot of obvious case merging opportunities. */
251 group_case_labels ();
253 /* Create the edges of the flowgraph. */
254 discriminator_per_locus
.create (13);
256 assign_discriminators ();
257 cleanup_dead_labels ();
258 discriminator_per_locus
.dispose ();
262 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
263 it and set loop->safelen to INT_MAX. We assume that the annotation
264 comes immediately before the condition. */
267 replace_loop_annotate ()
271 gimple_stmt_iterator gsi
;
274 FOR_EACH_LOOP (loop
, 0)
276 gsi
= gsi_last_bb (loop
->header
);
277 stmt
= gsi_stmt (gsi
);
278 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
280 gsi_prev_nondebug (&gsi
);
283 stmt
= gsi_stmt (gsi
);
284 if (gimple_code (stmt
) != GIMPLE_CALL
)
286 if (!gimple_call_internal_p (stmt
)
287 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
289 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
290 != annot_expr_ivdep_kind
)
292 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
293 gimple_call_arg (stmt
, 0));
294 gsi_replace (&gsi
, stmt
, true);
295 loop
->safelen
= INT_MAX
;
299 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
302 gsi
= gsi_last_bb (bb
);
303 stmt
= gsi_stmt (gsi
);
304 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
305 gsi_prev_nondebug (&gsi
);
308 stmt
= gsi_stmt (gsi
);
309 if (gimple_code (stmt
) != GIMPLE_CALL
)
311 if (!gimple_call_internal_p (stmt
)
312 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
314 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
315 != annot_expr_ivdep_kind
)
317 warning_at (gimple_location (stmt
), 0, "ignoring %<GCC ivdep%> "
319 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
320 gimple_call_arg (stmt
, 0));
321 gsi_replace (&gsi
, stmt
, true);
327 execute_build_cfg (void)
329 gimple_seq body
= gimple_body (current_function_decl
);
331 build_gimple_cfg (body
);
332 gimple_set_body (current_function_decl
, NULL
);
333 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
335 fprintf (dump_file
, "Scope blocks:\n");
336 dump_scope_blocks (dump_file
, dump_flags
);
339 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
340 replace_loop_annotate ();
346 const pass_data pass_data_build_cfg
=
348 GIMPLE_PASS
, /* type */
350 OPTGROUP_NONE
, /* optinfo_flags */
351 false, /* has_gate */
352 true, /* has_execute */
353 TV_TREE_CFG
, /* tv_id */
354 PROP_gimple_leh
, /* properties_required */
355 ( PROP_cfg
| PROP_loops
), /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 TODO_verify_stmts
, /* todo_flags_finish */
361 class pass_build_cfg
: public gimple_opt_pass
364 pass_build_cfg (gcc::context
*ctxt
)
365 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
368 /* opt_pass methods: */
369 unsigned int execute () { return execute_build_cfg (); }
371 }; // class pass_build_cfg
376 make_pass_build_cfg (gcc::context
*ctxt
)
378 return new pass_build_cfg (ctxt
);
382 /* Return true if T is a computed goto. */
385 computed_goto_p (gimple t
)
387 return (gimple_code (t
) == GIMPLE_GOTO
388 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
391 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
392 the other edge points to a bb with just __builtin_unreachable ().
393 I.e. return true for C->M edge in:
401 __builtin_unreachable ();
405 assert_unreachable_fallthru_edge_p (edge e
)
407 basic_block pred_bb
= e
->src
;
408 gimple last
= last_stmt (pred_bb
);
409 if (last
&& gimple_code (last
) == GIMPLE_COND
)
411 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
412 if (other_bb
== e
->dest
)
413 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
414 if (EDGE_COUNT (other_bb
->succs
) == 0)
416 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
421 stmt
= gsi_stmt (gsi
);
422 if (is_gimple_debug (stmt
))
424 gsi_next_nondebug (&gsi
);
427 stmt
= gsi_stmt (gsi
);
429 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
436 /* Search the CFG for any computed gotos. If found, factor them to a
437 common computed goto site. Also record the location of that site so
438 that we can un-factor the gotos after we have converted back to
442 factor_computed_gotos (void)
445 tree factored_label_decl
= NULL
;
447 gimple factored_computed_goto_label
= NULL
;
448 gimple factored_computed_goto
= NULL
;
450 /* We know there are one or more computed gotos in this function.
451 Examine the last statement in each basic block to see if the block
452 ends with a computed goto. */
456 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
462 last
= gsi_stmt (gsi
);
464 /* Ignore the computed goto we create when we factor the original
466 if (last
== factored_computed_goto
)
469 /* If the last statement is a computed goto, factor it. */
470 if (computed_goto_p (last
))
474 /* The first time we find a computed goto we need to create
475 the factored goto block and the variable each original
476 computed goto will use for their goto destination. */
477 if (!factored_computed_goto
)
479 basic_block new_bb
= create_empty_bb (bb
);
480 gimple_stmt_iterator new_gsi
= gsi_start_bb (new_bb
);
482 /* Create the destination of the factored goto. Each original
483 computed goto will put its desired destination into this
484 variable and jump to the label we create immediately
486 var
= create_tmp_var (ptr_type_node
, "gotovar");
488 /* Build a label for the new block which will contain the
489 factored computed goto. */
490 factored_label_decl
= create_artificial_label (UNKNOWN_LOCATION
);
491 factored_computed_goto_label
492 = gimple_build_label (factored_label_decl
);
493 gsi_insert_after (&new_gsi
, factored_computed_goto_label
,
496 /* Build our new computed goto. */
497 factored_computed_goto
= gimple_build_goto (var
);
498 gsi_insert_after (&new_gsi
, factored_computed_goto
, GSI_NEW_STMT
);
501 /* Copy the original computed goto's destination into VAR. */
502 assignment
= gimple_build_assign (var
, gimple_goto_dest (last
));
503 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
505 /* And re-vector the computed goto to the new destination. */
506 gimple_goto_set_dest (last
, factored_label_decl
);
512 /* Build a flowgraph for the sequence of stmts SEQ. */
515 make_blocks (gimple_seq seq
)
517 gimple_stmt_iterator i
= gsi_start (seq
);
519 bool start_new_block
= true;
520 bool first_stmt_of_seq
= true;
521 basic_block bb
= ENTRY_BLOCK_PTR
;
523 while (!gsi_end_p (i
))
530 /* If the statement starts a new basic block or if we have determined
531 in a previous pass that we need to create a new block for STMT, do
533 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
535 if (!first_stmt_of_seq
)
536 gsi_split_seq_before (&i
, &seq
);
537 bb
= create_basic_block (seq
, NULL
, bb
);
538 start_new_block
= false;
541 /* Now add STMT to BB and create the subgraphs for special statement
543 gimple_set_bb (stmt
, bb
);
545 if (computed_goto_p (stmt
))
546 found_computed_goto
= true;
548 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
550 if (stmt_ends_bb_p (stmt
))
552 /* If the stmt can make abnormal goto use a new temporary
553 for the assignment to the LHS. This makes sure the old value
554 of the LHS is available on the abnormal edge. Otherwise
555 we will end up with overlapping life-ranges for abnormal
557 if (gimple_has_lhs (stmt
)
558 && stmt_can_make_abnormal_goto (stmt
)
559 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
561 tree lhs
= gimple_get_lhs (stmt
);
562 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
563 gimple s
= gimple_build_assign (lhs
, tmp
);
564 gimple_set_location (s
, gimple_location (stmt
));
565 gimple_set_block (s
, gimple_block (stmt
));
566 gimple_set_lhs (stmt
, tmp
);
567 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
568 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
569 DECL_GIMPLE_REG_P (tmp
) = 1;
570 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
572 start_new_block
= true;
576 first_stmt_of_seq
= false;
581 /* Create and return a new empty basic block after bb AFTER. */
584 create_bb (void *h
, void *e
, basic_block after
)
590 /* Create and initialize a new basic block. Since alloc_block uses
591 GC allocation that clears memory to allocate a basic block, we do
592 not have to clear the newly allocated basic block here. */
595 bb
->index
= last_basic_block
;
597 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
599 /* Add the new block to the linked list of blocks. */
600 link_block (bb
, after
);
602 /* Grow the basic block array if needed. */
603 if ((size_t) last_basic_block
== basic_block_info
->length ())
605 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
606 vec_safe_grow_cleared (basic_block_info
, new_size
);
609 /* Add the newly created block to the array. */
610 SET_BASIC_BLOCK (last_basic_block
, bb
);
612 n_basic_blocks_for_fn (cfun
)++;
619 /*---------------------------------------------------------------------------
621 ---------------------------------------------------------------------------*/
623 /* Fold COND_EXPR_COND of each COND_EXPR. */
626 fold_cond_expr_cond (void)
632 gimple stmt
= last_stmt (bb
);
634 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
636 location_t loc
= gimple_location (stmt
);
640 fold_defer_overflow_warnings ();
641 cond
= fold_binary_loc (loc
, gimple_cond_code (stmt
), boolean_type_node
,
642 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
645 zerop
= integer_zerop (cond
);
646 onep
= integer_onep (cond
);
649 zerop
= onep
= false;
651 fold_undefer_overflow_warnings (zerop
|| onep
,
653 WARN_STRICT_OVERFLOW_CONDITIONAL
);
655 gimple_cond_make_false (stmt
);
657 gimple_cond_make_true (stmt
);
662 /* Join all the blocks in the flowgraph. */
668 struct omp_region
*cur_region
= NULL
;
670 /* Create an edge from entry to the first block with executable
672 make_edge (ENTRY_BLOCK_PTR
, BASIC_BLOCK (NUM_FIXED_BLOCKS
), EDGE_FALLTHRU
);
674 /* Traverse the basic block array placing edges. */
677 gimple last
= last_stmt (bb
);
682 enum gimple_code code
= gimple_code (last
);
686 make_goto_expr_edges (bb
);
690 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
694 make_cond_expr_edges (bb
);
698 make_gimple_switch_edges (bb
);
702 make_eh_edges (last
);
705 case GIMPLE_EH_DISPATCH
:
706 fallthru
= make_eh_dispatch_edges (last
);
710 /* If this function receives a nonlocal goto, then we need to
711 make edges from this call site to all the nonlocal goto
713 if (stmt_can_make_abnormal_goto (last
))
714 make_abnormal_goto_edges (bb
, true);
716 /* If this statement has reachable exception handlers, then
717 create abnormal edges to them. */
718 make_eh_edges (last
);
720 /* BUILTIN_RETURN is really a return statement. */
721 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
722 make_edge (bb
, EXIT_BLOCK_PTR
, 0), fallthru
= false;
723 /* Some calls are known not to return. */
725 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
729 /* A GIMPLE_ASSIGN may throw internally and thus be considered
731 if (is_ctrl_altering_stmt (last
))
732 make_eh_edges (last
);
737 make_gimple_asm_edges (bb
);
742 fallthru
= make_gimple_omp_edges (bb
, &cur_region
);
745 case GIMPLE_TRANSACTION
:
747 tree abort_label
= gimple_transaction_label (last
);
749 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
755 gcc_assert (!stmt_ends_bb_p (last
));
763 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
768 /* Fold COND_EXPR_COND of each COND_EXPR. */
769 fold_cond_expr_cond ();
772 /* Find the next available discriminator value for LOCUS. The
773 discriminator distinguishes among several basic blocks that
774 share a common locus, allowing for more accurate sample-based
778 next_discriminator_for_locus (location_t locus
)
780 struct locus_discrim_map item
;
781 struct locus_discrim_map
**slot
;
784 item
.discriminator
= 0;
785 slot
= discriminator_per_locus
.find_slot_with_hash (
786 &item
, LOCATION_LINE (locus
), INSERT
);
788 if (*slot
== HTAB_EMPTY_ENTRY
)
790 *slot
= XNEW (struct locus_discrim_map
);
792 (*slot
)->locus
= locus
;
793 (*slot
)->discriminator
= 0;
795 (*slot
)->discriminator
++;
796 return (*slot
)->discriminator
;
799 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
802 same_line_p (location_t locus1
, location_t locus2
)
804 expanded_location from
, to
;
806 if (locus1
== locus2
)
809 from
= expand_location (locus1
);
810 to
= expand_location (locus2
);
812 if (from
.line
!= to
.line
)
814 if (from
.file
== to
.file
)
816 return (from
.file
!= NULL
818 && filename_cmp (from
.file
, to
.file
) == 0);
821 /* Assign discriminators to each basic block. */
824 assign_discriminators (void)
832 gimple last
= last_stmt (bb
);
833 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
835 if (locus
== UNKNOWN_LOCATION
)
838 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
840 gimple first
= first_non_label_stmt (e
->dest
);
841 gimple last
= last_stmt (e
->dest
);
842 if ((first
&& same_line_p (locus
, gimple_location (first
)))
843 || (last
&& same_line_p (locus
, gimple_location (last
))))
845 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
846 bb
->discriminator
= next_discriminator_for_locus (locus
);
848 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
854 /* Create the edges for a GIMPLE_COND starting at block BB. */
857 make_cond_expr_edges (basic_block bb
)
859 gimple entry
= last_stmt (bb
);
860 gimple then_stmt
, else_stmt
;
861 basic_block then_bb
, else_bb
;
862 tree then_label
, else_label
;
866 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
868 /* Entry basic blocks for each component. */
869 then_label
= gimple_cond_true_label (entry
);
870 else_label
= gimple_cond_false_label (entry
);
871 then_bb
= label_to_block (then_label
);
872 else_bb
= label_to_block (else_label
);
873 then_stmt
= first_stmt (then_bb
);
874 else_stmt
= first_stmt (else_bb
);
876 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
877 e
->goto_locus
= gimple_location (then_stmt
);
878 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
880 e
->goto_locus
= gimple_location (else_stmt
);
882 /* We do not need the labels anymore. */
883 gimple_cond_set_true_label (entry
, NULL_TREE
);
884 gimple_cond_set_false_label (entry
, NULL_TREE
);
888 /* Called for each element in the hash table (P) as we delete the
889 edge to cases hash table.
891 Clear all the TREE_CHAINs to prevent problems with copying of
892 SWITCH_EXPRs and structure sharing rules, then free the hash table
896 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
897 void *data ATTRIBUTE_UNUSED
)
901 for (t
= (tree
) *value
; t
; t
= next
)
903 next
= CASE_CHAIN (t
);
904 CASE_CHAIN (t
) = NULL
;
911 /* Start recording information mapping edges to case labels. */
914 start_recording_case_labels (void)
916 gcc_assert (edge_to_cases
== NULL
);
917 edge_to_cases
= pointer_map_create ();
918 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
921 /* Return nonzero if we are recording information for case labels. */
924 recording_case_labels_p (void)
926 return (edge_to_cases
!= NULL
);
929 /* Stop recording information mapping edges to case labels and
930 remove any information we have recorded. */
932 end_recording_case_labels (void)
936 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
937 pointer_map_destroy (edge_to_cases
);
938 edge_to_cases
= NULL
;
939 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
941 basic_block bb
= BASIC_BLOCK (i
);
944 gimple stmt
= last_stmt (bb
);
945 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
946 group_case_labels_stmt (stmt
);
949 BITMAP_FREE (touched_switch_bbs
);
952 /* If we are inside a {start,end}_recording_cases block, then return
953 a chain of CASE_LABEL_EXPRs from T which reference E.
955 Otherwise return NULL. */
958 get_cases_for_edge (edge e
, gimple t
)
963 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
964 chains available. Return NULL so the caller can detect this case. */
965 if (!recording_case_labels_p ())
968 slot
= pointer_map_contains (edge_to_cases
, e
);
972 /* If we did not find E in the hash table, then this must be the first
973 time we have been queried for information about E & T. Add all the
974 elements from T to the hash table then perform the query again. */
976 n
= gimple_switch_num_labels (t
);
977 for (i
= 0; i
< n
; i
++)
979 tree elt
= gimple_switch_label (t
, i
);
980 tree lab
= CASE_LABEL (elt
);
981 basic_block label_bb
= label_to_block (lab
);
982 edge this_edge
= find_edge (e
->src
, label_bb
);
984 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
986 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
987 CASE_CHAIN (elt
) = (tree
) *slot
;
991 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
994 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
997 make_gimple_switch_edges (basic_block bb
)
999 gimple entry
= last_stmt (bb
);
1002 n
= gimple_switch_num_labels (entry
);
1004 for (i
= 0; i
< n
; ++i
)
1006 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1007 basic_block label_bb
= label_to_block (lab
);
1008 make_edge (bb
, label_bb
, 0);
1013 /* Return the basic block holding label DEST. */
1016 label_to_block_fn (struct function
*ifun
, tree dest
)
1018 int uid
= LABEL_DECL_UID (dest
);
1020 /* We would die hard when faced by an undefined label. Emit a label to
1021 the very first basic block. This will hopefully make even the dataflow
1022 and undefined variable warnings quite right. */
1023 if (seen_error () && uid
< 0)
1025 gimple_stmt_iterator gsi
= gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS
));
1028 stmt
= gimple_build_label (dest
);
1029 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1030 uid
= LABEL_DECL_UID (dest
);
1032 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1034 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1037 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1038 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1041 make_abnormal_goto_edges (basic_block bb
, bool for_call
)
1043 basic_block target_bb
;
1044 gimple_stmt_iterator gsi
;
1046 FOR_EACH_BB (target_bb
)
1048 for (gsi
= gsi_start_bb (target_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1050 gimple label_stmt
= gsi_stmt (gsi
);
1053 if (gimple_code (label_stmt
) != GIMPLE_LABEL
)
1056 target
= gimple_label_label (label_stmt
);
1058 /* Make an edge to every label block that has been marked as a
1059 potential target for a computed goto or a non-local goto. */
1060 if ((FORCED_LABEL (target
) && !for_call
)
1061 || (DECL_NONLOCAL (target
) && for_call
))
1063 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1067 if (!gsi_end_p (gsi
)
1068 && is_gimple_debug (gsi_stmt (gsi
)))
1069 gsi_next_nondebug (&gsi
);
1070 if (!gsi_end_p (gsi
))
1072 /* Make an edge to every setjmp-like call. */
1073 gimple call_stmt
= gsi_stmt (gsi
);
1074 if (is_gimple_call (call_stmt
)
1075 && (gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
))
1076 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1081 /* Create edges for a goto statement at block BB. */
1084 make_goto_expr_edges (basic_block bb
)
1086 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1087 gimple goto_t
= gsi_stmt (last
);
1089 /* A simple GOTO creates normal edges. */
1090 if (simple_goto_p (goto_t
))
1092 tree dest
= gimple_goto_dest (goto_t
);
1093 basic_block label_bb
= label_to_block (dest
);
1094 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1095 e
->goto_locus
= gimple_location (goto_t
);
1096 gsi_remove (&last
, true);
1100 /* A computed GOTO creates abnormal edges. */
1101 make_abnormal_goto_edges (bb
, false);
1104 /* Create edges for an asm statement with labels at block BB. */
1107 make_gimple_asm_edges (basic_block bb
)
1109 gimple stmt
= last_stmt (bb
);
1110 int i
, n
= gimple_asm_nlabels (stmt
);
1112 for (i
= 0; i
< n
; ++i
)
1114 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1115 basic_block label_bb
= label_to_block (label
);
1116 make_edge (bb
, label_bb
, 0);
1120 /*---------------------------------------------------------------------------
1122 ---------------------------------------------------------------------------*/
1124 /* Cleanup useless labels in basic blocks. This is something we wish
1125 to do early because it allows us to group case labels before creating
1126 the edges for the CFG, and it speeds up block statement iterators in
1127 all passes later on.
1128 We rerun this pass after CFG is created, to get rid of the labels that
1129 are no longer referenced. After then we do not run it any more, since
1130 (almost) no new labels should be created. */
1132 /* A map from basic block index to the leading label of that block. */
1133 static struct label_record
1138 /* True if the label is referenced from somewhere. */
1142 /* Given LABEL return the first label in the same basic block. */
1145 main_block_label (tree label
)
1147 basic_block bb
= label_to_block (label
);
1148 tree main_label
= label_for_bb
[bb
->index
].label
;
1150 /* label_to_block possibly inserted undefined label into the chain. */
1153 label_for_bb
[bb
->index
].label
= label
;
1157 label_for_bb
[bb
->index
].used
= true;
1161 /* Clean up redundant labels within the exception tree. */
1164 cleanup_dead_labels_eh (void)
1171 if (cfun
->eh
== NULL
)
1174 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1175 if (lp
&& lp
->post_landing_pad
)
1177 lab
= main_block_label (lp
->post_landing_pad
);
1178 if (lab
!= lp
->post_landing_pad
)
1180 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1181 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1185 FOR_ALL_EH_REGION (r
)
1189 case ERT_MUST_NOT_THROW
:
1195 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1199 c
->label
= main_block_label (lab
);
1204 case ERT_ALLOWED_EXCEPTIONS
:
1205 lab
= r
->u
.allowed
.label
;
1207 r
->u
.allowed
.label
= main_block_label (lab
);
1213 /* Cleanup redundant labels. This is a three-step process:
1214 1) Find the leading label for each block.
1215 2) Redirect all references to labels to the leading labels.
1216 3) Cleanup all useless labels. */
1219 cleanup_dead_labels (void)
1222 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block
);
1224 /* Find a suitable label for each block. We use the first user-defined
1225 label if there is one, or otherwise just the first label we see. */
1228 gimple_stmt_iterator i
;
1230 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1233 gimple stmt
= gsi_stmt (i
);
1235 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1238 label
= gimple_label_label (stmt
);
1240 /* If we have not yet seen a label for the current block,
1241 remember this one and see if there are more labels. */
1242 if (!label_for_bb
[bb
->index
].label
)
1244 label_for_bb
[bb
->index
].label
= label
;
1248 /* If we did see a label for the current block already, but it
1249 is an artificially created label, replace it if the current
1250 label is a user defined label. */
1251 if (!DECL_ARTIFICIAL (label
)
1252 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1254 label_for_bb
[bb
->index
].label
= label
;
1260 /* Now redirect all jumps/branches to the selected label.
1261 First do so for each block ending in a control statement. */
1264 gimple stmt
= last_stmt (bb
);
1265 tree label
, new_label
;
1270 switch (gimple_code (stmt
))
1273 label
= gimple_cond_true_label (stmt
);
1276 new_label
= main_block_label (label
);
1277 if (new_label
!= label
)
1278 gimple_cond_set_true_label (stmt
, new_label
);
1281 label
= gimple_cond_false_label (stmt
);
1284 new_label
= main_block_label (label
);
1285 if (new_label
!= label
)
1286 gimple_cond_set_false_label (stmt
, new_label
);
1292 size_t i
, n
= gimple_switch_num_labels (stmt
);
1294 /* Replace all destination labels. */
1295 for (i
= 0; i
< n
; ++i
)
1297 tree case_label
= gimple_switch_label (stmt
, i
);
1298 label
= CASE_LABEL (case_label
);
1299 new_label
= main_block_label (label
);
1300 if (new_label
!= label
)
1301 CASE_LABEL (case_label
) = new_label
;
1308 int i
, n
= gimple_asm_nlabels (stmt
);
1310 for (i
= 0; i
< n
; ++i
)
1312 tree cons
= gimple_asm_label_op (stmt
, i
);
1313 tree label
= main_block_label (TREE_VALUE (cons
));
1314 TREE_VALUE (cons
) = label
;
1319 /* We have to handle gotos until they're removed, and we don't
1320 remove them until after we've created the CFG edges. */
1322 if (!computed_goto_p (stmt
))
1324 label
= gimple_goto_dest (stmt
);
1325 new_label
= main_block_label (label
);
1326 if (new_label
!= label
)
1327 gimple_goto_set_dest (stmt
, new_label
);
1331 case GIMPLE_TRANSACTION
:
1333 tree label
= gimple_transaction_label (stmt
);
1336 tree new_label
= main_block_label (label
);
1337 if (new_label
!= label
)
1338 gimple_transaction_set_label (stmt
, new_label
);
1348 /* Do the same for the exception region tree labels. */
1349 cleanup_dead_labels_eh ();
1351 /* Finally, purge dead labels. All user-defined labels and labels that
1352 can be the target of non-local gotos and labels which have their
1353 address taken are preserved. */
1356 gimple_stmt_iterator i
;
1357 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1359 if (!label_for_this_bb
)
1362 /* If the main label of the block is unused, we may still remove it. */
1363 if (!label_for_bb
[bb
->index
].used
)
1364 label_for_this_bb
= NULL
;
1366 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1369 gimple stmt
= gsi_stmt (i
);
1371 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1374 label
= gimple_label_label (stmt
);
1376 if (label
== label_for_this_bb
1377 || !DECL_ARTIFICIAL (label
)
1378 || DECL_NONLOCAL (label
)
1379 || FORCED_LABEL (label
))
1382 gsi_remove (&i
, true);
1386 free (label_for_bb
);
1389 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1390 the ones jumping to the same label.
1391 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1394 group_case_labels_stmt (gimple stmt
)
1396 int old_size
= gimple_switch_num_labels (stmt
);
1397 int i
, j
, new_size
= old_size
;
1398 basic_block default_bb
= NULL
;
1400 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1402 /* Look for possible opportunities to merge cases. */
1404 while (i
< old_size
)
1406 tree base_case
, base_high
;
1407 basic_block base_bb
;
1409 base_case
= gimple_switch_label (stmt
, i
);
1411 gcc_assert (base_case
);
1412 base_bb
= label_to_block (CASE_LABEL (base_case
));
1414 /* Discard cases that have the same destination as the
1416 if (base_bb
== default_bb
)
1418 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1424 base_high
= CASE_HIGH (base_case
)
1425 ? CASE_HIGH (base_case
)
1426 : CASE_LOW (base_case
);
1429 /* Try to merge case labels. Break out when we reach the end
1430 of the label vector or when we cannot merge the next case
1431 label with the current one. */
1432 while (i
< old_size
)
1434 tree merge_case
= gimple_switch_label (stmt
, i
);
1435 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1436 double_int bhp1
= tree_to_double_int (base_high
) + double_int_one
;
1438 /* Merge the cases if they jump to the same place,
1439 and their ranges are consecutive. */
1440 if (merge_bb
== base_bb
1441 && tree_to_double_int (CASE_LOW (merge_case
)) == bhp1
)
1443 base_high
= CASE_HIGH (merge_case
) ?
1444 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1445 CASE_HIGH (base_case
) = base_high
;
1446 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1455 /* Compress the case labels in the label vector, and adjust the
1456 length of the vector. */
1457 for (i
= 0, j
= 0; i
< new_size
; i
++)
1459 while (! gimple_switch_label (stmt
, j
))
1461 gimple_switch_set_label (stmt
, i
,
1462 gimple_switch_label (stmt
, j
++));
1465 gcc_assert (new_size
<= old_size
);
1466 gimple_switch_set_num_labels (stmt
, new_size
);
1469 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1470 and scan the sorted vector of cases. Combine the ones jumping to the
1474 group_case_labels (void)
1480 gimple stmt
= last_stmt (bb
);
1481 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1482 group_case_labels_stmt (stmt
);
1486 /* Checks whether we can merge block B into block A. */
1489 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1492 gimple_stmt_iterator gsi
;
1494 if (!single_succ_p (a
))
1497 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1500 if (single_succ (a
) != b
)
1503 if (!single_pred_p (b
))
1506 if (b
== EXIT_BLOCK_PTR
)
1509 /* If A ends by a statement causing exceptions or something similar, we
1510 cannot merge the blocks. */
1511 stmt
= last_stmt (a
);
1512 if (stmt
&& stmt_ends_bb_p (stmt
))
1515 /* Do not allow a block with only a non-local label to be merged. */
1517 && gimple_code (stmt
) == GIMPLE_LABEL
1518 && DECL_NONLOCAL (gimple_label_label (stmt
)))
1521 /* Examine the labels at the beginning of B. */
1522 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1525 stmt
= gsi_stmt (gsi
);
1526 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1528 lab
= gimple_label_label (stmt
);
1530 /* Do not remove user forced labels or for -O0 any user labels. */
1531 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1535 /* Protect the loop latches. */
1536 if (current_loops
&& b
->loop_father
->latch
== b
)
1539 /* It must be possible to eliminate all phi nodes in B. If ssa form
1540 is not up-to-date and a name-mapping is registered, we cannot eliminate
1541 any phis. Symbols marked for renaming are never a problem though. */
1542 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1544 gimple phi
= gsi_stmt (gsi
);
1545 /* Technically only new names matter. */
1546 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1550 /* When not optimizing, don't merge if we'd lose goto_locus. */
1552 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1554 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1555 gimple_stmt_iterator prev
, next
;
1556 prev
= gsi_last_nondebug_bb (a
);
1557 next
= gsi_after_labels (b
);
1558 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1559 gsi_next_nondebug (&next
);
1560 if ((gsi_end_p (prev
)
1561 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1562 && (gsi_end_p (next
)
1563 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1570 /* Replaces all uses of NAME by VAL. */
1573 replace_uses_by (tree name
, tree val
)
1575 imm_use_iterator imm_iter
;
1580 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1582 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1584 replace_exp (use
, val
);
1586 if (gimple_code (stmt
) == GIMPLE_PHI
)
1588 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1589 if (e
->flags
& EDGE_ABNORMAL
)
1591 /* This can only occur for virtual operands, since
1592 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1593 would prevent replacement. */
1594 gcc_checking_assert (virtual_operand_p (name
));
1595 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1600 if (gimple_code (stmt
) != GIMPLE_PHI
)
1602 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1603 gimple orig_stmt
= stmt
;
1606 /* Mark the block if we changed the last stmt in it. */
1607 if (cfgcleanup_altered_bbs
1608 && stmt_ends_bb_p (stmt
))
1609 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1611 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1612 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1613 only change sth from non-invariant to invariant, and only
1614 when propagating constants. */
1615 if (is_gimple_min_invariant (val
))
1616 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1618 tree op
= gimple_op (stmt
, i
);
1619 /* Operands may be empty here. For example, the labels
1620 of a GIMPLE_COND are nulled out following the creation
1621 of the corresponding CFG edges. */
1622 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1623 recompute_tree_invariant_for_addr_expr (op
);
1626 if (fold_stmt (&gsi
))
1627 stmt
= gsi_stmt (gsi
);
1629 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1630 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1636 gcc_checking_assert (has_zero_uses (name
));
1638 /* Also update the trees stored in loop structures. */
1643 FOR_EACH_LOOP (loop
, 0)
1645 substitute_in_loop_info (loop
, name
, val
);
1650 /* Merge block B into block A. */
1653 gimple_merge_blocks (basic_block a
, basic_block b
)
1655 gimple_stmt_iterator last
, gsi
, psi
;
1658 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1660 /* Remove all single-valued PHI nodes from block B of the form
1661 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1662 gsi
= gsi_last_bb (a
);
1663 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1665 gimple phi
= gsi_stmt (psi
);
1666 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1668 bool may_replace_uses
= (virtual_operand_p (def
)
1669 || may_propagate_copy (def
, use
));
1671 /* In case we maintain loop closed ssa form, do not propagate arguments
1672 of loop exit phi nodes. */
1674 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1675 && !virtual_operand_p (def
)
1676 && TREE_CODE (use
) == SSA_NAME
1677 && a
->loop_father
!= b
->loop_father
)
1678 may_replace_uses
= false;
1680 if (!may_replace_uses
)
1682 gcc_assert (!virtual_operand_p (def
));
1684 /* Note that just emitting the copies is fine -- there is no problem
1685 with ordering of phi nodes. This is because A is the single
1686 predecessor of B, therefore results of the phi nodes cannot
1687 appear as arguments of the phi nodes. */
1688 copy
= gimple_build_assign (def
, use
);
1689 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1690 remove_phi_node (&psi
, false);
1694 /* If we deal with a PHI for virtual operands, we can simply
1695 propagate these without fussing with folding or updating
1697 if (virtual_operand_p (def
))
1699 imm_use_iterator iter
;
1700 use_operand_p use_p
;
1703 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1704 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1705 SET_USE (use_p
, use
);
1707 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1708 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1711 replace_uses_by (def
, use
);
1713 remove_phi_node (&psi
, true);
1717 /* Ensure that B follows A. */
1718 move_block_after (b
, a
);
1720 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1721 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1723 /* Remove labels from B and set gimple_bb to A for other statements. */
1724 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1726 gimple stmt
= gsi_stmt (gsi
);
1727 if (gimple_code (stmt
) == GIMPLE_LABEL
)
1729 tree label
= gimple_label_label (stmt
);
1732 gsi_remove (&gsi
, false);
1734 /* Now that we can thread computed gotos, we might have
1735 a situation where we have a forced label in block B
1736 However, the label at the start of block B might still be
1737 used in other ways (think about the runtime checking for
1738 Fortran assigned gotos). So we can not just delete the
1739 label. Instead we move the label to the start of block A. */
1740 if (FORCED_LABEL (label
))
1742 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1743 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1745 /* Other user labels keep around in a form of a debug stmt. */
1746 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1748 gimple dbg
= gimple_build_debug_bind (label
,
1751 gimple_debug_bind_reset_value (dbg
);
1752 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1755 lp_nr
= EH_LANDING_PAD_NR (label
);
1758 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1759 lp
->post_landing_pad
= NULL
;
1764 gimple_set_bb (stmt
, a
);
1769 /* Merge the sequences. */
1770 last
= gsi_last_bb (a
);
1771 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1772 set_bb_seq (b
, NULL
);
1774 if (cfgcleanup_altered_bbs
)
1775 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1779 /* Return the one of two successors of BB that is not reachable by a
1780 complex edge, if there is one. Else, return BB. We use
1781 this in optimizations that use post-dominators for their heuristics,
1782 to catch the cases in C++ where function calls are involved. */
1785 single_noncomplex_succ (basic_block bb
)
1788 if (EDGE_COUNT (bb
->succs
) != 2)
1791 e0
= EDGE_SUCC (bb
, 0);
1792 e1
= EDGE_SUCC (bb
, 1);
1793 if (e0
->flags
& EDGE_COMPLEX
)
1795 if (e1
->flags
& EDGE_COMPLEX
)
1801 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1804 notice_special_calls (gimple call
)
1806 int flags
= gimple_call_flags (call
);
1808 if (flags
& ECF_MAY_BE_ALLOCA
)
1809 cfun
->calls_alloca
= true;
1810 if (flags
& ECF_RETURNS_TWICE
)
1811 cfun
->calls_setjmp
= true;
1815 /* Clear flags set by notice_special_calls. Used by dead code removal
1816 to update the flags. */
1819 clear_special_calls (void)
1821 cfun
->calls_alloca
= false;
1822 cfun
->calls_setjmp
= false;
1825 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1828 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1830 /* Since this block is no longer reachable, we can just delete all
1831 of its PHI nodes. */
1832 remove_phi_nodes (bb
);
1834 /* Remove edges to BB's successors. */
1835 while (EDGE_COUNT (bb
->succs
) > 0)
1836 remove_edge (EDGE_SUCC (bb
, 0));
1840 /* Remove statements of basic block BB. */
1843 remove_bb (basic_block bb
)
1845 gimple_stmt_iterator i
;
1849 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1850 if (dump_flags
& TDF_DETAILS
)
1852 dump_bb (dump_file
, bb
, 0, dump_flags
);
1853 fprintf (dump_file
, "\n");
1859 struct loop
*loop
= bb
->loop_father
;
1861 /* If a loop gets removed, clean up the information associated
1863 if (loop
->latch
== bb
1864 || loop
->header
== bb
)
1865 free_numbers_of_iterations_estimates_loop (loop
);
1868 /* Remove all the instructions in the block. */
1869 if (bb_seq (bb
) != NULL
)
1871 /* Walk backwards so as to get a chance to substitute all
1872 released DEFs into debug stmts. See
1873 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1875 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
1877 gimple stmt
= gsi_stmt (i
);
1878 if (gimple_code (stmt
) == GIMPLE_LABEL
1879 && (FORCED_LABEL (gimple_label_label (stmt
))
1880 || DECL_NONLOCAL (gimple_label_label (stmt
))))
1883 gimple_stmt_iterator new_gsi
;
1885 /* A non-reachable non-local label may still be referenced.
1886 But it no longer needs to carry the extra semantics of
1888 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
1890 DECL_NONLOCAL (gimple_label_label (stmt
)) = 0;
1891 FORCED_LABEL (gimple_label_label (stmt
)) = 1;
1894 new_bb
= bb
->prev_bb
;
1895 new_gsi
= gsi_start_bb (new_bb
);
1896 gsi_remove (&i
, false);
1897 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
1901 /* Release SSA definitions if we are in SSA. Note that we
1902 may be called when not in SSA. For example,
1903 final_cleanup calls this function via
1904 cleanup_tree_cfg. */
1905 if (gimple_in_ssa_p (cfun
))
1906 release_defs (stmt
);
1908 gsi_remove (&i
, true);
1912 i
= gsi_last_bb (bb
);
1918 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
1919 bb
->il
.gimple
.seq
= NULL
;
1920 bb
->il
.gimple
.phi_nodes
= NULL
;
1924 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1925 predicate VAL, return the edge that will be taken out of the block.
1926 If VAL does not match a unique edge, NULL is returned. */
1929 find_taken_edge (basic_block bb
, tree val
)
1933 stmt
= last_stmt (bb
);
1936 gcc_assert (is_ctrl_stmt (stmt
));
1941 if (!is_gimple_min_invariant (val
))
1944 if (gimple_code (stmt
) == GIMPLE_COND
)
1945 return find_taken_edge_cond_expr (bb
, val
);
1947 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
1948 return find_taken_edge_switch_expr (bb
, val
);
1950 if (computed_goto_p (stmt
))
1952 /* Only optimize if the argument is a label, if the argument is
1953 not a label then we can not construct a proper CFG.
1955 It may be the case that we only need to allow the LABEL_REF to
1956 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1957 appear inside a LABEL_EXPR just to be safe. */
1958 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
1959 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
1960 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
1967 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1968 statement, determine which of the outgoing edges will be taken out of the
1969 block. Return NULL if either edge may be taken. */
1972 find_taken_edge_computed_goto (basic_block bb
, tree val
)
1977 dest
= label_to_block (val
);
1980 e
= find_edge (bb
, dest
);
1981 gcc_assert (e
!= NULL
);
1987 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1988 statement, determine which of the two edges will be taken out of the
1989 block. Return NULL if either edge may be taken. */
1992 find_taken_edge_cond_expr (basic_block bb
, tree val
)
1994 edge true_edge
, false_edge
;
1996 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1998 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
1999 return (integer_zerop (val
) ? false_edge
: true_edge
);
2002 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2003 statement, determine which edge will be taken out of the block. Return
2004 NULL if any edge may be taken. */
2007 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2009 basic_block dest_bb
;
2014 switch_stmt
= last_stmt (bb
);
2015 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2016 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2018 e
= find_edge (bb
, dest_bb
);
2024 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2025 We can make optimal use here of the fact that the case labels are
2026 sorted: We can do a binary search for a case matching VAL. */
2029 find_case_label_for_value (gimple switch_stmt
, tree val
)
2031 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2032 tree default_case
= gimple_switch_default_label (switch_stmt
);
2034 for (low
= 0, high
= n
; high
- low
> 1; )
2036 size_t i
= (high
+ low
) / 2;
2037 tree t
= gimple_switch_label (switch_stmt
, i
);
2040 /* Cache the result of comparing CASE_LOW and val. */
2041 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2048 if (CASE_HIGH (t
) == NULL
)
2050 /* A singe-valued case label. */
2056 /* A case range. We can only handle integer ranges. */
2057 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2062 return default_case
;
2066 /* Dump a basic block on stderr. */
2069 gimple_debug_bb (basic_block bb
)
2071 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2075 /* Dump basic block with index N on stderr. */
2078 gimple_debug_bb_n (int n
)
2080 gimple_debug_bb (BASIC_BLOCK (n
));
2081 return BASIC_BLOCK (n
);
2085 /* Dump the CFG on stderr.
2087 FLAGS are the same used by the tree dumping functions
2088 (see TDF_* in dumpfile.h). */
2091 gimple_debug_cfg (int flags
)
2093 gimple_dump_cfg (stderr
, flags
);
2097 /* Dump the program showing basic block boundaries on the given FILE.
2099 FLAGS are the same used by the tree dumping functions (see TDF_* in
2103 gimple_dump_cfg (FILE *file
, int flags
)
2105 if (flags
& TDF_DETAILS
)
2107 dump_function_header (file
, current_function_decl
, flags
);
2108 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2109 n_basic_blocks_for_fn (cfun
), n_edges
, last_basic_block
);
2111 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2112 fprintf (file
, "\n");
2115 if (flags
& TDF_STATS
)
2116 dump_cfg_stats (file
);
2118 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2122 /* Dump CFG statistics on FILE. */
2125 dump_cfg_stats (FILE *file
)
2127 static long max_num_merged_labels
= 0;
2128 unsigned long size
, total
= 0;
2131 const char * const fmt_str
= "%-30s%-13s%12s\n";
2132 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2133 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2134 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2135 const char *funcname
= current_function_name ();
2137 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2139 fprintf (file
, "---------------------------------------------------------\n");
2140 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2141 fprintf (file
, fmt_str
, "", " instances ", "used ");
2142 fprintf (file
, "---------------------------------------------------------\n");
2144 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2146 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2147 SCALE (size
), LABEL (size
));
2151 num_edges
+= EDGE_COUNT (bb
->succs
);
2152 size
= num_edges
* sizeof (struct edge_def
);
2154 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2156 fprintf (file
, "---------------------------------------------------------\n");
2157 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2159 fprintf (file
, "---------------------------------------------------------\n");
2160 fprintf (file
, "\n");
2162 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2163 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2165 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2166 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2168 fprintf (file
, "\n");
2172 /* Dump CFG statistics on stderr. Keep extern so that it's always
2173 linked in the final executable. */
2176 debug_cfg_stats (void)
2178 dump_cfg_stats (stderr
);
2181 /*---------------------------------------------------------------------------
2182 Miscellaneous helpers
2183 ---------------------------------------------------------------------------*/
2185 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2186 flow. Transfers of control flow associated with EH are excluded. */
2189 call_can_make_abnormal_goto (gimple t
)
2191 /* If the function has no non-local labels, then a call cannot make an
2192 abnormal transfer of control. */
2193 if (!cfun
->has_nonlocal_label
2194 && !cfun
->calls_setjmp
)
2197 /* Likewise if the call has no side effects. */
2198 if (!gimple_has_side_effects (t
))
2201 /* Likewise if the called function is leaf. */
2202 if (gimple_call_flags (t
) & ECF_LEAF
)
2209 /* Return true if T can make an abnormal transfer of control flow.
2210 Transfers of control flow associated with EH are excluded. */
2213 stmt_can_make_abnormal_goto (gimple t
)
2215 if (computed_goto_p (t
))
2217 if (is_gimple_call (t
))
2218 return call_can_make_abnormal_goto (t
);
2223 /* Return true if T represents a stmt that always transfers control. */
2226 is_ctrl_stmt (gimple t
)
2228 switch (gimple_code (t
))
2242 /* Return true if T is a statement that may alter the flow of control
2243 (e.g., a call to a non-returning function). */
2246 is_ctrl_altering_stmt (gimple t
)
2250 switch (gimple_code (t
))
2254 int flags
= gimple_call_flags (t
);
2256 /* A call alters control flow if it can make an abnormal goto. */
2257 if (call_can_make_abnormal_goto (t
))
2260 /* A call also alters control flow if it does not return. */
2261 if (flags
& ECF_NORETURN
)
2264 /* TM ending statements have backedges out of the transaction.
2265 Return true so we split the basic block containing them.
2266 Note that the TM_BUILTIN test is merely an optimization. */
2267 if ((flags
& ECF_TM_BUILTIN
)
2268 && is_tm_ending_fndecl (gimple_call_fndecl (t
)))
2271 /* BUILT_IN_RETURN call is same as return statement. */
2272 if (gimple_call_builtin_p (t
, BUILT_IN_RETURN
))
2277 case GIMPLE_EH_DISPATCH
:
2278 /* EH_DISPATCH branches to the individual catch handlers at
2279 this level of a try or allowed-exceptions region. It can
2280 fallthru to the next statement as well. */
2284 if (gimple_asm_nlabels (t
) > 0)
2289 /* OpenMP directives alter control flow. */
2292 case GIMPLE_TRANSACTION
:
2293 /* A transaction start alters control flow. */
2300 /* If a statement can throw, it alters control flow. */
2301 return stmt_can_throw_internal (t
);
2305 /* Return true if T is a simple local goto. */
2308 simple_goto_p (gimple t
)
2310 return (gimple_code (t
) == GIMPLE_GOTO
2311 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2315 /* Return true if STMT should start a new basic block. PREV_STMT is
2316 the statement preceding STMT. It is used when STMT is a label or a
2317 case label. Labels should only start a new basic block if their
2318 previous statement wasn't a label. Otherwise, sequence of labels
2319 would generate unnecessary basic blocks that only contain a single
2323 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2328 /* Labels start a new basic block only if the preceding statement
2329 wasn't a label of the same type. This prevents the creation of
2330 consecutive blocks that have nothing but a single label. */
2331 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2333 /* Nonlocal and computed GOTO targets always start a new block. */
2334 if (DECL_NONLOCAL (gimple_label_label (stmt
))
2335 || FORCED_LABEL (gimple_label_label (stmt
)))
2338 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2340 if (DECL_NONLOCAL (gimple_label_label (prev_stmt
)))
2343 cfg_stats
.num_merged_labels
++;
2349 else if (gimple_code (stmt
) == GIMPLE_CALL
2350 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2351 /* setjmp acts similar to a nonlocal GOTO target and thus should
2352 start a new block. */
2359 /* Return true if T should end a basic block. */
2362 stmt_ends_bb_p (gimple t
)
2364 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2367 /* Remove block annotations and other data structures. */
2370 delete_tree_cfg_annotations (void)
2372 vec_free (label_to_block_map
);
2376 /* Return the first statement in basic block BB. */
2379 first_stmt (basic_block bb
)
2381 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2384 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2392 /* Return the first non-label statement in basic block BB. */
2395 first_non_label_stmt (basic_block bb
)
2397 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2398 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2400 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2403 /* Return the last statement in basic block BB. */
2406 last_stmt (basic_block bb
)
2408 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2411 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2419 /* Return the last statement of an otherwise empty block. Return NULL
2420 if the block is totally empty, or if it contains more than one
2424 last_and_only_stmt (basic_block bb
)
2426 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2432 last
= gsi_stmt (i
);
2433 gsi_prev_nondebug (&i
);
2437 /* Empty statements should no longer appear in the instruction stream.
2438 Everything that might have appeared before should be deleted by
2439 remove_useless_stmts, and the optimizers should just gsi_remove
2440 instead of smashing with build_empty_stmt.
2442 Thus the only thing that should appear here in a block containing
2443 one executable statement is a label. */
2444 prev
= gsi_stmt (i
);
2445 if (gimple_code (prev
) == GIMPLE_LABEL
)
2451 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2454 reinstall_phi_args (edge new_edge
, edge old_edge
)
2456 edge_var_map_vector
*v
;
2459 gimple_stmt_iterator phis
;
2461 v
= redirect_edge_var_map_vector (old_edge
);
2465 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2466 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2467 i
++, gsi_next (&phis
))
2469 gimple phi
= gsi_stmt (phis
);
2470 tree result
= redirect_edge_var_map_result (vm
);
2471 tree arg
= redirect_edge_var_map_def (vm
);
2473 gcc_assert (result
== gimple_phi_result (phi
));
2475 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2478 redirect_edge_var_map_clear (old_edge
);
2481 /* Returns the basic block after which the new basic block created
2482 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2483 near its "logical" location. This is of most help to humans looking
2484 at debugging dumps. */
2487 split_edge_bb_loc (edge edge_in
)
2489 basic_block dest
= edge_in
->dest
;
2490 basic_block dest_prev
= dest
->prev_bb
;
2494 edge e
= find_edge (dest_prev
, dest
);
2495 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2496 return edge_in
->src
;
2501 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2502 Abort on abnormal edges. */
2505 gimple_split_edge (edge edge_in
)
2507 basic_block new_bb
, after_bb
, dest
;
2510 /* Abnormal edges cannot be split. */
2511 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2513 dest
= edge_in
->dest
;
2515 after_bb
= split_edge_bb_loc (edge_in
);
2517 new_bb
= create_empty_bb (after_bb
);
2518 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2519 new_bb
->count
= edge_in
->count
;
2520 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2521 new_edge
->probability
= REG_BR_PROB_BASE
;
2522 new_edge
->count
= edge_in
->count
;
2524 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2525 gcc_assert (e
== edge_in
);
2526 reinstall_phi_args (new_edge
, e
);
2532 /* Verify properties of the address expression T with base object BASE. */
2535 verify_address (tree t
, tree base
)
2538 bool old_side_effects
;
2540 bool new_side_effects
;
2542 old_constant
= TREE_CONSTANT (t
);
2543 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2545 recompute_tree_invariant_for_addr_expr (t
);
2546 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2547 new_constant
= TREE_CONSTANT (t
);
2549 if (old_constant
!= new_constant
)
2551 error ("constant not recomputed when ADDR_EXPR changed");
2554 if (old_side_effects
!= new_side_effects
)
2556 error ("side effects not recomputed when ADDR_EXPR changed");
2560 if (!(TREE_CODE (base
) == VAR_DECL
2561 || TREE_CODE (base
) == PARM_DECL
2562 || TREE_CODE (base
) == RESULT_DECL
))
2565 if (DECL_GIMPLE_REG_P (base
))
2567 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2574 /* Callback for walk_tree, check that all elements with address taken are
2575 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2576 inside a PHI node. */
2579 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2586 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2587 #define CHECK_OP(N, MSG) \
2588 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2589 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2591 switch (TREE_CODE (t
))
2594 if (SSA_NAME_IN_FREE_LIST (t
))
2596 error ("SSA name in freelist but still referenced");
2602 error ("INDIRECT_REF in gimple IL");
2606 x
= TREE_OPERAND (t
, 0);
2607 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2608 || !is_gimple_mem_ref_addr (x
))
2610 error ("invalid first operand of MEM_REF");
2613 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2614 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2616 error ("invalid offset operand of MEM_REF");
2617 return TREE_OPERAND (t
, 1);
2619 if (TREE_CODE (x
) == ADDR_EXPR
2620 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2626 x
= fold (ASSERT_EXPR_COND (t
));
2627 if (x
== boolean_false_node
)
2629 error ("ASSERT_EXPR with an always-false condition");
2635 error ("MODIFY_EXPR not expected while having tuples");
2642 gcc_assert (is_gimple_address (t
));
2644 /* Skip any references (they will be checked when we recurse down the
2645 tree) and ensure that any variable used as a prefix is marked
2647 for (x
= TREE_OPERAND (t
, 0);
2648 handled_component_p (x
);
2649 x
= TREE_OPERAND (x
, 0))
2652 if ((tem
= verify_address (t
, x
)))
2655 if (!(TREE_CODE (x
) == VAR_DECL
2656 || TREE_CODE (x
) == PARM_DECL
2657 || TREE_CODE (x
) == RESULT_DECL
))
2660 if (!TREE_ADDRESSABLE (x
))
2662 error ("address taken, but ADDRESSABLE bit not set");
2670 x
= COND_EXPR_COND (t
);
2671 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2673 error ("non-integral used in condition");
2676 if (!is_gimple_condexpr (x
))
2678 error ("invalid conditional operand");
2683 case NON_LVALUE_EXPR
:
2684 case TRUTH_NOT_EXPR
:
2688 case FIX_TRUNC_EXPR
:
2693 CHECK_OP (0, "invalid operand to unary operator");
2699 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2701 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2705 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2707 if (!tree_fits_uhwi_p (TREE_OPERAND (t
, 1))
2708 || !tree_fits_uhwi_p (TREE_OPERAND (t
, 2)))
2710 error ("invalid position or size operand to BIT_FIELD_REF");
2713 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2714 && (TYPE_PRECISION (TREE_TYPE (t
))
2715 != TREE_INT_CST_LOW (TREE_OPERAND (t
, 1))))
2717 error ("integral result type precision does not match "
2718 "field size of BIT_FIELD_REF");
2721 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2722 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2723 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2724 != TREE_INT_CST_LOW (TREE_OPERAND (t
, 1))))
2726 error ("mode precision of non-integral result does not "
2727 "match field size of BIT_FIELD_REF");
2731 t
= TREE_OPERAND (t
, 0);
2736 case ARRAY_RANGE_REF
:
2737 case VIEW_CONVERT_EXPR
:
2738 /* We have a nest of references. Verify that each of the operands
2739 that determine where to reference is either a constant or a variable,
2740 verify that the base is valid, and then show we've already checked
2742 while (handled_component_p (t
))
2744 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2745 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2746 else if (TREE_CODE (t
) == ARRAY_REF
2747 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2749 CHECK_OP (1, "invalid array index");
2750 if (TREE_OPERAND (t
, 2))
2751 CHECK_OP (2, "invalid array lower bound");
2752 if (TREE_OPERAND (t
, 3))
2753 CHECK_OP (3, "invalid array stride");
2755 else if (TREE_CODE (t
) == BIT_FIELD_REF
2756 || TREE_CODE (t
) == REALPART_EXPR
2757 || TREE_CODE (t
) == IMAGPART_EXPR
)
2759 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2764 t
= TREE_OPERAND (t
, 0);
2767 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2769 error ("invalid reference prefix");
2776 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2777 POINTER_PLUS_EXPR. */
2778 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2780 error ("invalid operand to plus/minus, type is a pointer");
2783 CHECK_OP (0, "invalid operand to binary operator");
2784 CHECK_OP (1, "invalid operand to binary operator");
2787 case POINTER_PLUS_EXPR
:
2788 /* Check to make sure the first operand is a pointer or reference type. */
2789 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2791 error ("invalid operand to pointer plus, first operand is not a pointer");
2794 /* Check to make sure the second operand is a ptrofftype. */
2795 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2797 error ("invalid operand to pointer plus, second operand is not an "
2798 "integer type of appropriate width");
2808 case UNORDERED_EXPR
:
2817 case TRUNC_DIV_EXPR
:
2819 case FLOOR_DIV_EXPR
:
2820 case ROUND_DIV_EXPR
:
2821 case TRUNC_MOD_EXPR
:
2823 case FLOOR_MOD_EXPR
:
2824 case ROUND_MOD_EXPR
:
2826 case EXACT_DIV_EXPR
:
2836 CHECK_OP (0, "invalid operand to binary operator");
2837 CHECK_OP (1, "invalid operand to binary operator");
2841 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2845 case CASE_LABEL_EXPR
:
2848 error ("invalid CASE_CHAIN");
2862 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2863 Returns true if there is an error, otherwise false. */
2866 verify_types_in_gimple_min_lval (tree expr
)
2870 if (is_gimple_id (expr
))
2873 if (TREE_CODE (expr
) != TARGET_MEM_REF
2874 && TREE_CODE (expr
) != MEM_REF
)
2876 error ("invalid expression for min lvalue");
2880 /* TARGET_MEM_REFs are strange beasts. */
2881 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
2884 op
= TREE_OPERAND (expr
, 0);
2885 if (!is_gimple_val (op
))
2887 error ("invalid operand in indirect reference");
2888 debug_generic_stmt (op
);
2891 /* Memory references now generally can involve a value conversion. */
2896 /* Verify if EXPR is a valid GIMPLE reference expression. If
2897 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2898 if there is an error, otherwise false. */
2901 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
2903 while (handled_component_p (expr
))
2905 tree op
= TREE_OPERAND (expr
, 0);
2907 if (TREE_CODE (expr
) == ARRAY_REF
2908 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
2910 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
2911 || (TREE_OPERAND (expr
, 2)
2912 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
2913 || (TREE_OPERAND (expr
, 3)
2914 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
2916 error ("invalid operands to array reference");
2917 debug_generic_stmt (expr
);
2922 /* Verify if the reference array element types are compatible. */
2923 if (TREE_CODE (expr
) == ARRAY_REF
2924 && !useless_type_conversion_p (TREE_TYPE (expr
),
2925 TREE_TYPE (TREE_TYPE (op
))))
2927 error ("type mismatch in array reference");
2928 debug_generic_stmt (TREE_TYPE (expr
));
2929 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2932 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
2933 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
2934 TREE_TYPE (TREE_TYPE (op
))))
2936 error ("type mismatch in array range reference");
2937 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
2938 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2942 if ((TREE_CODE (expr
) == REALPART_EXPR
2943 || TREE_CODE (expr
) == IMAGPART_EXPR
)
2944 && !useless_type_conversion_p (TREE_TYPE (expr
),
2945 TREE_TYPE (TREE_TYPE (op
))))
2947 error ("type mismatch in real/imagpart reference");
2948 debug_generic_stmt (TREE_TYPE (expr
));
2949 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2953 if (TREE_CODE (expr
) == COMPONENT_REF
2954 && !useless_type_conversion_p (TREE_TYPE (expr
),
2955 TREE_TYPE (TREE_OPERAND (expr
, 1))))
2957 error ("type mismatch in component reference");
2958 debug_generic_stmt (TREE_TYPE (expr
));
2959 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
2963 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2965 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2966 that their operand is not an SSA name or an invariant when
2967 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2968 bug). Otherwise there is nothing to verify, gross mismatches at
2969 most invoke undefined behavior. */
2971 && (TREE_CODE (op
) == SSA_NAME
2972 || is_gimple_min_invariant (op
)))
2974 error ("conversion of an SSA_NAME on the left hand side");
2975 debug_generic_stmt (expr
);
2978 else if (TREE_CODE (op
) == SSA_NAME
2979 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
2981 error ("conversion of register to a different size");
2982 debug_generic_stmt (expr
);
2985 else if (!handled_component_p (op
))
2992 if (TREE_CODE (expr
) == MEM_REF
)
2994 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
2996 error ("invalid address operand in MEM_REF");
2997 debug_generic_stmt (expr
);
3000 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3001 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3003 error ("invalid offset operand in MEM_REF");
3004 debug_generic_stmt (expr
);
3008 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3010 if (!TMR_BASE (expr
)
3011 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3013 error ("invalid address operand in TARGET_MEM_REF");
3016 if (!TMR_OFFSET (expr
)
3017 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3018 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3020 error ("invalid offset operand in TARGET_MEM_REF");
3021 debug_generic_stmt (expr
);
3026 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3027 && verify_types_in_gimple_min_lval (expr
));
3030 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3031 list of pointer-to types that is trivially convertible to DEST. */
3034 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3038 if (!TYPE_POINTER_TO (src_obj
))
3041 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3042 if (useless_type_conversion_p (dest
, src
))
3048 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3049 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3052 valid_fixed_convert_types_p (tree type1
, tree type2
)
3054 return (FIXED_POINT_TYPE_P (type1
)
3055 && (INTEGRAL_TYPE_P (type2
)
3056 || SCALAR_FLOAT_TYPE_P (type2
)
3057 || FIXED_POINT_TYPE_P (type2
)));
3060 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3061 is a problem, otherwise false. */
3064 verify_gimple_call (gimple stmt
)
3066 tree fn
= gimple_call_fn (stmt
);
3067 tree fntype
, fndecl
;
3070 if (gimple_call_internal_p (stmt
))
3074 error ("gimple call has two targets");
3075 debug_generic_stmt (fn
);
3083 error ("gimple call has no target");
3088 if (fn
&& !is_gimple_call_addr (fn
))
3090 error ("invalid function in gimple call");
3091 debug_generic_stmt (fn
);
3096 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3097 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3098 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3100 error ("non-function in gimple call");
3104 fndecl
= gimple_call_fndecl (stmt
);
3106 && TREE_CODE (fndecl
) == FUNCTION_DECL
3107 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3108 && !DECL_PURE_P (fndecl
)
3109 && !TREE_READONLY (fndecl
))
3111 error ("invalid pure const state for function");
3115 if (gimple_call_lhs (stmt
)
3116 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3117 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3119 error ("invalid LHS in gimple call");
3123 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3125 error ("LHS in noreturn call");
3129 fntype
= gimple_call_fntype (stmt
);
3131 && gimple_call_lhs (stmt
)
3132 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3134 /* ??? At least C++ misses conversions at assignments from
3135 void * call results.
3136 ??? Java is completely off. Especially with functions
3137 returning java.lang.Object.
3138 For now simply allow arbitrary pointer type conversions. */
3139 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3140 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3142 error ("invalid conversion in gimple call");
3143 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3144 debug_generic_stmt (TREE_TYPE (fntype
));
3148 if (gimple_call_chain (stmt
)
3149 && !is_gimple_val (gimple_call_chain (stmt
)))
3151 error ("invalid static chain in gimple call");
3152 debug_generic_stmt (gimple_call_chain (stmt
));
3156 /* If there is a static chain argument, this should not be an indirect
3157 call, and the decl should have DECL_STATIC_CHAIN set. */
3158 if (gimple_call_chain (stmt
))
3160 if (!gimple_call_fndecl (stmt
))
3162 error ("static chain in indirect gimple call");
3165 fn
= TREE_OPERAND (fn
, 0);
3167 if (!DECL_STATIC_CHAIN (fn
))
3169 error ("static chain with function that doesn%'t use one");
3174 /* ??? The C frontend passes unpromoted arguments in case it
3175 didn't see a function declaration before the call. So for now
3176 leave the call arguments mostly unverified. Once we gimplify
3177 unit-at-a-time we have a chance to fix this. */
3179 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3181 tree arg
= gimple_call_arg (stmt
, i
);
3182 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3183 && !is_gimple_val (arg
))
3184 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3185 && !is_gimple_lvalue (arg
)))
3187 error ("invalid argument to gimple call");
3188 debug_generic_expr (arg
);
3196 /* Verifies the gimple comparison with the result type TYPE and
3197 the operands OP0 and OP1. */
3200 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3202 tree op0_type
= TREE_TYPE (op0
);
3203 tree op1_type
= TREE_TYPE (op1
);
3205 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3207 error ("invalid operands in gimple comparison");
3211 /* For comparisons we do not have the operations type as the
3212 effective type the comparison is carried out in. Instead
3213 we require that either the first operand is trivially
3214 convertible into the second, or the other way around.
3215 Because we special-case pointers to void we allow
3216 comparisons of pointers with the same mode as well. */
3217 if (!useless_type_conversion_p (op0_type
, op1_type
)
3218 && !useless_type_conversion_p (op1_type
, op0_type
)
3219 && (!POINTER_TYPE_P (op0_type
)
3220 || !POINTER_TYPE_P (op1_type
)
3221 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3223 error ("mismatching comparison operand types");
3224 debug_generic_expr (op0_type
);
3225 debug_generic_expr (op1_type
);
3229 /* The resulting type of a comparison may be an effective boolean type. */
3230 if (INTEGRAL_TYPE_P (type
)
3231 && (TREE_CODE (type
) == BOOLEAN_TYPE
3232 || TYPE_PRECISION (type
) == 1))
3234 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3235 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3237 error ("vector comparison returning a boolean");
3238 debug_generic_expr (op0_type
);
3239 debug_generic_expr (op1_type
);
3243 /* Or an integer vector type with the same size and element count
3244 as the comparison operand types. */
3245 else if (TREE_CODE (type
) == VECTOR_TYPE
3246 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3248 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3249 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3251 error ("non-vector operands in vector comparison");
3252 debug_generic_expr (op0_type
);
3253 debug_generic_expr (op1_type
);
3257 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3258 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3259 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3260 /* The result of a vector comparison is of signed
3262 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3264 error ("invalid vector comparison resulting type");
3265 debug_generic_expr (type
);
3271 error ("bogus comparison result type");
3272 debug_generic_expr (type
);
3279 /* Verify a gimple assignment statement STMT with an unary rhs.
3280 Returns true if anything is wrong. */
3283 verify_gimple_assign_unary (gimple stmt
)
3285 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3286 tree lhs
= gimple_assign_lhs (stmt
);
3287 tree lhs_type
= TREE_TYPE (lhs
);
3288 tree rhs1
= gimple_assign_rhs1 (stmt
);
3289 tree rhs1_type
= TREE_TYPE (rhs1
);
3291 if (!is_gimple_reg (lhs
))
3293 error ("non-register as LHS of unary operation");
3297 if (!is_gimple_val (rhs1
))
3299 error ("invalid operand in unary operation");
3303 /* First handle conversions. */
3308 /* Allow conversions from pointer type to integral type only if
3309 there is no sign or zero extension involved.
3310 For targets were the precision of ptrofftype doesn't match that
3311 of pointers we need to allow arbitrary conversions to ptrofftype. */
3312 if ((POINTER_TYPE_P (lhs_type
)
3313 && INTEGRAL_TYPE_P (rhs1_type
))
3314 || (POINTER_TYPE_P (rhs1_type
)
3315 && INTEGRAL_TYPE_P (lhs_type
)
3316 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3317 || ptrofftype_p (sizetype
))))
3320 /* Allow conversion from integral to offset type and vice versa. */
3321 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3322 && INTEGRAL_TYPE_P (rhs1_type
))
3323 || (INTEGRAL_TYPE_P (lhs_type
)
3324 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3327 /* Otherwise assert we are converting between types of the
3329 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3331 error ("invalid types in nop conversion");
3332 debug_generic_expr (lhs_type
);
3333 debug_generic_expr (rhs1_type
);
3340 case ADDR_SPACE_CONVERT_EXPR
:
3342 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3343 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3344 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3346 error ("invalid types in address space conversion");
3347 debug_generic_expr (lhs_type
);
3348 debug_generic_expr (rhs1_type
);
3355 case FIXED_CONVERT_EXPR
:
3357 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3358 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3360 error ("invalid types in fixed-point conversion");
3361 debug_generic_expr (lhs_type
);
3362 debug_generic_expr (rhs1_type
);
3371 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3372 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3373 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3375 error ("invalid types in conversion to floating point");
3376 debug_generic_expr (lhs_type
);
3377 debug_generic_expr (rhs1_type
);
3384 case FIX_TRUNC_EXPR
:
3386 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3387 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3388 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3390 error ("invalid types in conversion to integer");
3391 debug_generic_expr (lhs_type
);
3392 debug_generic_expr (rhs1_type
);
3399 case VEC_UNPACK_HI_EXPR
:
3400 case VEC_UNPACK_LO_EXPR
:
3401 case REDUC_MAX_EXPR
:
3402 case REDUC_MIN_EXPR
:
3403 case REDUC_PLUS_EXPR
:
3404 case VEC_UNPACK_FLOAT_HI_EXPR
:
3405 case VEC_UNPACK_FLOAT_LO_EXPR
:
3413 case NON_LVALUE_EXPR
:
3421 /* For the remaining codes assert there is no conversion involved. */
3422 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3424 error ("non-trivial conversion in unary operation");
3425 debug_generic_expr (lhs_type
);
3426 debug_generic_expr (rhs1_type
);
3433 /* Verify a gimple assignment statement STMT with a binary rhs.
3434 Returns true if anything is wrong. */
3437 verify_gimple_assign_binary (gimple stmt
)
3439 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3440 tree lhs
= gimple_assign_lhs (stmt
);
3441 tree lhs_type
= TREE_TYPE (lhs
);
3442 tree rhs1
= gimple_assign_rhs1 (stmt
);
3443 tree rhs1_type
= TREE_TYPE (rhs1
);
3444 tree rhs2
= gimple_assign_rhs2 (stmt
);
3445 tree rhs2_type
= TREE_TYPE (rhs2
);
3447 if (!is_gimple_reg (lhs
))
3449 error ("non-register as LHS of binary operation");
3453 if (!is_gimple_val (rhs1
)
3454 || !is_gimple_val (rhs2
))
3456 error ("invalid operands in binary operation");
3460 /* First handle operations that involve different types. */
3465 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3466 || !(INTEGRAL_TYPE_P (rhs1_type
)
3467 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3468 || !(INTEGRAL_TYPE_P (rhs2_type
)
3469 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3471 error ("type mismatch in complex expression");
3472 debug_generic_expr (lhs_type
);
3473 debug_generic_expr (rhs1_type
);
3474 debug_generic_expr (rhs2_type
);
3486 /* Shifts and rotates are ok on integral types, fixed point
3487 types and integer vector types. */
3488 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3489 && !FIXED_POINT_TYPE_P (rhs1_type
)
3490 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3491 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3492 || (!INTEGRAL_TYPE_P (rhs2_type
)
3493 /* Vector shifts of vectors are also ok. */
3494 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3495 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3496 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3497 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3498 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3500 error ("type mismatch in shift expression");
3501 debug_generic_expr (lhs_type
);
3502 debug_generic_expr (rhs1_type
);
3503 debug_generic_expr (rhs2_type
);
3510 case VEC_LSHIFT_EXPR
:
3511 case VEC_RSHIFT_EXPR
:
3513 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3514 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3515 || POINTER_TYPE_P (TREE_TYPE (rhs1_type
))
3516 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type
))
3517 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3518 || (!INTEGRAL_TYPE_P (rhs2_type
)
3519 && (TREE_CODE (rhs2_type
) != VECTOR_TYPE
3520 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3521 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3523 error ("type mismatch in vector shift expression");
3524 debug_generic_expr (lhs_type
);
3525 debug_generic_expr (rhs1_type
);
3526 debug_generic_expr (rhs2_type
);
3529 /* For shifting a vector of non-integral components we
3530 only allow shifting by a constant multiple of the element size. */
3531 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3532 && (TREE_CODE (rhs2
) != INTEGER_CST
3533 || !div_if_zero_remainder (EXACT_DIV_EXPR
, rhs2
,
3534 TYPE_SIZE (TREE_TYPE (rhs1_type
)))))
3536 error ("non-element sized vector shift of floating point vector");
3543 case WIDEN_LSHIFT_EXPR
:
3545 if (!INTEGRAL_TYPE_P (lhs_type
)
3546 || !INTEGRAL_TYPE_P (rhs1_type
)
3547 || TREE_CODE (rhs2
) != INTEGER_CST
3548 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3550 error ("type mismatch in widening vector shift expression");
3551 debug_generic_expr (lhs_type
);
3552 debug_generic_expr (rhs1_type
);
3553 debug_generic_expr (rhs2_type
);
3560 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3561 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3563 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3564 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3565 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3566 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3567 || TREE_CODE (rhs2
) != INTEGER_CST
3568 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3569 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3571 error ("type mismatch in widening vector shift expression");
3572 debug_generic_expr (lhs_type
);
3573 debug_generic_expr (rhs1_type
);
3574 debug_generic_expr (rhs2_type
);
3584 tree lhs_etype
= lhs_type
;
3585 tree rhs1_etype
= rhs1_type
;
3586 tree rhs2_etype
= rhs2_type
;
3587 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3589 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3590 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3592 error ("invalid non-vector operands to vector valued plus");
3595 lhs_etype
= TREE_TYPE (lhs_type
);
3596 rhs1_etype
= TREE_TYPE (rhs1_type
);
3597 rhs2_etype
= TREE_TYPE (rhs2_type
);
3599 if (POINTER_TYPE_P (lhs_etype
)
3600 || POINTER_TYPE_P (rhs1_etype
)
3601 || POINTER_TYPE_P (rhs2_etype
))
3603 error ("invalid (pointer) operands to plus/minus");
3607 /* Continue with generic binary expression handling. */
3611 case POINTER_PLUS_EXPR
:
3613 if (!POINTER_TYPE_P (rhs1_type
)
3614 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3615 || !ptrofftype_p (rhs2_type
))
3617 error ("type mismatch in pointer plus expression");
3618 debug_generic_stmt (lhs_type
);
3619 debug_generic_stmt (rhs1_type
);
3620 debug_generic_stmt (rhs2_type
);
3627 case TRUTH_ANDIF_EXPR
:
3628 case TRUTH_ORIF_EXPR
:
3629 case TRUTH_AND_EXPR
:
3631 case TRUTH_XOR_EXPR
:
3641 case UNORDERED_EXPR
:
3649 /* Comparisons are also binary, but the result type is not
3650 connected to the operand types. */
3651 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3653 case WIDEN_MULT_EXPR
:
3654 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3656 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3657 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3659 case WIDEN_SUM_EXPR
:
3660 case VEC_WIDEN_MULT_HI_EXPR
:
3661 case VEC_WIDEN_MULT_LO_EXPR
:
3662 case VEC_WIDEN_MULT_EVEN_EXPR
:
3663 case VEC_WIDEN_MULT_ODD_EXPR
:
3664 case VEC_PACK_TRUNC_EXPR
:
3665 case VEC_PACK_SAT_EXPR
:
3666 case VEC_PACK_FIX_TRUNC_EXPR
:
3671 case MULT_HIGHPART_EXPR
:
3672 case TRUNC_DIV_EXPR
:
3674 case FLOOR_DIV_EXPR
:
3675 case ROUND_DIV_EXPR
:
3676 case TRUNC_MOD_EXPR
:
3678 case FLOOR_MOD_EXPR
:
3679 case ROUND_MOD_EXPR
:
3681 case EXACT_DIV_EXPR
:
3687 /* Continue with generic binary expression handling. */
3694 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3695 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3697 error ("type mismatch in binary expression");
3698 debug_generic_stmt (lhs_type
);
3699 debug_generic_stmt (rhs1_type
);
3700 debug_generic_stmt (rhs2_type
);
3707 /* Verify a gimple assignment statement STMT with a ternary rhs.
3708 Returns true if anything is wrong. */
3711 verify_gimple_assign_ternary (gimple stmt
)
3713 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3714 tree lhs
= gimple_assign_lhs (stmt
);
3715 tree lhs_type
= TREE_TYPE (lhs
);
3716 tree rhs1
= gimple_assign_rhs1 (stmt
);
3717 tree rhs1_type
= TREE_TYPE (rhs1
);
3718 tree rhs2
= gimple_assign_rhs2 (stmt
);
3719 tree rhs2_type
= TREE_TYPE (rhs2
);
3720 tree rhs3
= gimple_assign_rhs3 (stmt
);
3721 tree rhs3_type
= TREE_TYPE (rhs3
);
3723 if (!is_gimple_reg (lhs
))
3725 error ("non-register as LHS of ternary operation");
3729 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3730 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3731 || !is_gimple_val (rhs2
)
3732 || !is_gimple_val (rhs3
))
3734 error ("invalid operands in ternary operation");
3738 /* First handle operations that involve different types. */
3741 case WIDEN_MULT_PLUS_EXPR
:
3742 case WIDEN_MULT_MINUS_EXPR
:
3743 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3744 && !FIXED_POINT_TYPE_P (rhs1_type
))
3745 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3746 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3747 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3748 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3750 error ("type mismatch in widening multiply-accumulate expression");
3751 debug_generic_expr (lhs_type
);
3752 debug_generic_expr (rhs1_type
);
3753 debug_generic_expr (rhs2_type
);
3754 debug_generic_expr (rhs3_type
);
3760 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3761 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3762 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3764 error ("type mismatch in fused multiply-add expression");
3765 debug_generic_expr (lhs_type
);
3766 debug_generic_expr (rhs1_type
);
3767 debug_generic_expr (rhs2_type
);
3768 debug_generic_expr (rhs3_type
);
3775 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3776 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3778 error ("type mismatch in conditional expression");
3779 debug_generic_expr (lhs_type
);
3780 debug_generic_expr (rhs2_type
);
3781 debug_generic_expr (rhs3_type
);
3787 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3788 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3790 error ("type mismatch in vector permute expression");
3791 debug_generic_expr (lhs_type
);
3792 debug_generic_expr (rhs1_type
);
3793 debug_generic_expr (rhs2_type
);
3794 debug_generic_expr (rhs3_type
);
3798 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3799 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3800 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3802 error ("vector types expected in vector permute expression");
3803 debug_generic_expr (lhs_type
);
3804 debug_generic_expr (rhs1_type
);
3805 debug_generic_expr (rhs2_type
);
3806 debug_generic_expr (rhs3_type
);
3810 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3811 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3812 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3813 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3814 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3816 error ("vectors with different element number found "
3817 "in vector permute expression");
3818 debug_generic_expr (lhs_type
);
3819 debug_generic_expr (rhs1_type
);
3820 debug_generic_expr (rhs2_type
);
3821 debug_generic_expr (rhs3_type
);
3825 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3826 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3827 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3829 error ("invalid mask type in vector permute expression");
3830 debug_generic_expr (lhs_type
);
3831 debug_generic_expr (rhs1_type
);
3832 debug_generic_expr (rhs2_type
);
3833 debug_generic_expr (rhs3_type
);
3840 case REALIGN_LOAD_EXPR
:
3850 /* Verify a gimple assignment statement STMT with a single rhs.
3851 Returns true if anything is wrong. */
3854 verify_gimple_assign_single (gimple stmt
)
3856 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3857 tree lhs
= gimple_assign_lhs (stmt
);
3858 tree lhs_type
= TREE_TYPE (lhs
);
3859 tree rhs1
= gimple_assign_rhs1 (stmt
);
3860 tree rhs1_type
= TREE_TYPE (rhs1
);
3863 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3865 error ("non-trivial conversion at assignment");
3866 debug_generic_expr (lhs_type
);
3867 debug_generic_expr (rhs1_type
);
3871 if (gimple_clobber_p (stmt
)
3872 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
3874 error ("non-decl/MEM_REF LHS in clobber statement");
3875 debug_generic_expr (lhs
);
3879 if (handled_component_p (lhs
))
3880 res
|= verify_types_in_gimple_reference (lhs
, true);
3882 /* Special codes we cannot handle via their class. */
3887 tree op
= TREE_OPERAND (rhs1
, 0);
3888 if (!is_gimple_addressable (op
))
3890 error ("invalid operand in unary expression");
3894 /* Technically there is no longer a need for matching types, but
3895 gimple hygiene asks for this check. In LTO we can end up
3896 combining incompatible units and thus end up with addresses
3897 of globals that change their type to a common one. */
3899 && !types_compatible_p (TREE_TYPE (op
),
3900 TREE_TYPE (TREE_TYPE (rhs1
)))
3901 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
3904 error ("type mismatch in address expression");
3905 debug_generic_stmt (TREE_TYPE (rhs1
));
3906 debug_generic_stmt (TREE_TYPE (op
));
3910 return verify_types_in_gimple_reference (op
, true);
3915 error ("INDIRECT_REF in gimple IL");
3921 case ARRAY_RANGE_REF
:
3922 case VIEW_CONVERT_EXPR
:
3925 case TARGET_MEM_REF
:
3927 if (!is_gimple_reg (lhs
)
3928 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3930 error ("invalid rhs for gimple memory store");
3931 debug_generic_stmt (lhs
);
3932 debug_generic_stmt (rhs1
);
3935 return res
|| verify_types_in_gimple_reference (rhs1
, false);
3947 /* tcc_declaration */
3952 if (!is_gimple_reg (lhs
)
3953 && !is_gimple_reg (rhs1
)
3954 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3956 error ("invalid rhs for gimple memory store");
3957 debug_generic_stmt (lhs
);
3958 debug_generic_stmt (rhs1
);
3964 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
3967 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
3969 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
3971 /* For vector CONSTRUCTORs we require that either it is empty
3972 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3973 (then the element count must be correct to cover the whole
3974 outer vector and index must be NULL on all elements, or it is
3975 a CONSTRUCTOR of scalar elements, where we as an exception allow
3976 smaller number of elements (assuming zero filling) and
3977 consecutive indexes as compared to NULL indexes (such
3978 CONSTRUCTORs can appear in the IL from FEs). */
3979 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
3981 if (elt_t
== NULL_TREE
)
3983 elt_t
= TREE_TYPE (elt_v
);
3984 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
3986 tree elt_t
= TREE_TYPE (elt_v
);
3987 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
3990 error ("incorrect type of vector CONSTRUCTOR"
3992 debug_generic_stmt (rhs1
);
3995 else if (CONSTRUCTOR_NELTS (rhs1
)
3996 * TYPE_VECTOR_SUBPARTS (elt_t
)
3997 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
3999 error ("incorrect number of vector CONSTRUCTOR"
4001 debug_generic_stmt (rhs1
);
4005 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4008 error ("incorrect type of vector CONSTRUCTOR elements");
4009 debug_generic_stmt (rhs1
);
4012 else if (CONSTRUCTOR_NELTS (rhs1
)
4013 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4015 error ("incorrect number of vector CONSTRUCTOR elements");
4016 debug_generic_stmt (rhs1
);
4020 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4022 error ("incorrect type of vector CONSTRUCTOR elements");
4023 debug_generic_stmt (rhs1
);
4026 if (elt_i
!= NULL_TREE
4027 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4028 || TREE_CODE (elt_i
) != INTEGER_CST
4029 || compare_tree_int (elt_i
, i
) != 0))
4031 error ("vector CONSTRUCTOR with non-NULL element index");
4032 debug_generic_stmt (rhs1
);
4040 case WITH_SIZE_EXPR
:
4050 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4051 is a problem, otherwise false. */
4054 verify_gimple_assign (gimple stmt
)
4056 switch (gimple_assign_rhs_class (stmt
))
4058 case GIMPLE_SINGLE_RHS
:
4059 return verify_gimple_assign_single (stmt
);
4061 case GIMPLE_UNARY_RHS
:
4062 return verify_gimple_assign_unary (stmt
);
4064 case GIMPLE_BINARY_RHS
:
4065 return verify_gimple_assign_binary (stmt
);
4067 case GIMPLE_TERNARY_RHS
:
4068 return verify_gimple_assign_ternary (stmt
);
4075 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4076 is a problem, otherwise false. */
4079 verify_gimple_return (gimple stmt
)
4081 tree op
= gimple_return_retval (stmt
);
4082 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4084 /* We cannot test for present return values as we do not fix up missing
4085 return values from the original source. */
4089 if (!is_gimple_val (op
)
4090 && TREE_CODE (op
) != RESULT_DECL
)
4092 error ("invalid operand in return statement");
4093 debug_generic_stmt (op
);
4097 if ((TREE_CODE (op
) == RESULT_DECL
4098 && DECL_BY_REFERENCE (op
))
4099 || (TREE_CODE (op
) == SSA_NAME
4100 && SSA_NAME_VAR (op
)
4101 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4102 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4103 op
= TREE_TYPE (op
);
4105 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4107 error ("invalid conversion in return statement");
4108 debug_generic_stmt (restype
);
4109 debug_generic_stmt (TREE_TYPE (op
));
4117 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4118 is a problem, otherwise false. */
4121 verify_gimple_goto (gimple stmt
)
4123 tree dest
= gimple_goto_dest (stmt
);
4125 /* ??? We have two canonical forms of direct goto destinations, a
4126 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4127 if (TREE_CODE (dest
) != LABEL_DECL
4128 && (!is_gimple_val (dest
)
4129 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4131 error ("goto destination is neither a label nor a pointer");
4138 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4139 is a problem, otherwise false. */
4142 verify_gimple_switch (gimple stmt
)
4145 tree elt
, prev_upper_bound
= NULL_TREE
;
4146 tree index_type
, elt_type
= NULL_TREE
;
4148 if (!is_gimple_val (gimple_switch_index (stmt
)))
4150 error ("invalid operand to switch statement");
4151 debug_generic_stmt (gimple_switch_index (stmt
));
4155 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4156 if (! INTEGRAL_TYPE_P (index_type
))
4158 error ("non-integral type switch statement");
4159 debug_generic_expr (index_type
);
4163 elt
= gimple_switch_label (stmt
, 0);
4164 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4166 error ("invalid default case label in switch statement");
4167 debug_generic_expr (elt
);
4171 n
= gimple_switch_num_labels (stmt
);
4172 for (i
= 1; i
< n
; i
++)
4174 elt
= gimple_switch_label (stmt
, i
);
4176 if (! CASE_LOW (elt
))
4178 error ("invalid case label in switch statement");
4179 debug_generic_expr (elt
);
4183 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4185 error ("invalid case range in switch statement");
4186 debug_generic_expr (elt
);
4192 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4193 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4195 error ("type mismatch for case label in switch statement");
4196 debug_generic_expr (elt
);
4202 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4203 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4205 error ("type precision mismatch in switch statement");
4210 if (prev_upper_bound
)
4212 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4214 error ("case labels not sorted in switch statement");
4219 prev_upper_bound
= CASE_HIGH (elt
);
4220 if (! prev_upper_bound
)
4221 prev_upper_bound
= CASE_LOW (elt
);
4227 /* Verify a gimple debug statement STMT.
4228 Returns true if anything is wrong. */
4231 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4233 /* There isn't much that could be wrong in a gimple debug stmt. A
4234 gimple debug bind stmt, for example, maps a tree, that's usually
4235 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4236 component or member of an aggregate type, to another tree, that
4237 can be an arbitrary expression. These stmts expand into debug
4238 insns, and are converted to debug notes by var-tracking.c. */
4242 /* Verify a gimple label statement STMT.
4243 Returns true if anything is wrong. */
4246 verify_gimple_label (gimple stmt
)
4248 tree decl
= gimple_label_label (stmt
);
4252 if (TREE_CODE (decl
) != LABEL_DECL
)
4254 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4255 && DECL_CONTEXT (decl
) != current_function_decl
)
4257 error ("label's context is not the current function decl");
4261 uid
= LABEL_DECL_UID (decl
);
4263 && (uid
== -1 || (*label_to_block_map
)[uid
] != gimple_bb (stmt
)))
4265 error ("incorrect entry in label_to_block_map");
4269 uid
= EH_LANDING_PAD_NR (decl
);
4272 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4273 if (decl
!= lp
->post_landing_pad
)
4275 error ("incorrect setting of landing pad number");
4283 /* Verify the GIMPLE statement STMT. Returns true if there is an
4284 error, otherwise false. */
4287 verify_gimple_stmt (gimple stmt
)
4289 switch (gimple_code (stmt
))
4292 return verify_gimple_assign (stmt
);
4295 return verify_gimple_label (stmt
);
4298 return verify_gimple_call (stmt
);
4301 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4303 error ("invalid comparison code in gimple cond");
4306 if (!(!gimple_cond_true_label (stmt
)
4307 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4308 || !(!gimple_cond_false_label (stmt
)
4309 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4311 error ("invalid labels in gimple cond");
4315 return verify_gimple_comparison (boolean_type_node
,
4316 gimple_cond_lhs (stmt
),
4317 gimple_cond_rhs (stmt
));
4320 return verify_gimple_goto (stmt
);
4323 return verify_gimple_switch (stmt
);
4326 return verify_gimple_return (stmt
);
4331 case GIMPLE_TRANSACTION
:
4332 return verify_gimple_transaction (stmt
);
4334 /* Tuples that do not have tree operands. */
4336 case GIMPLE_PREDICT
:
4338 case GIMPLE_EH_DISPATCH
:
4339 case GIMPLE_EH_MUST_NOT_THROW
:
4343 /* OpenMP directives are validated by the FE and never operated
4344 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4345 non-gimple expressions when the main index variable has had
4346 its address taken. This does not affect the loop itself
4347 because the header of an GIMPLE_OMP_FOR is merely used to determine
4348 how to setup the parallel iteration. */
4352 return verify_gimple_debug (stmt
);
4359 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4360 and false otherwise. */
4363 verify_gimple_phi (gimple phi
)
4367 tree phi_result
= gimple_phi_result (phi
);
4372 error ("invalid PHI result");
4376 virtual_p
= virtual_operand_p (phi_result
);
4377 if (TREE_CODE (phi_result
) != SSA_NAME
4379 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4381 error ("invalid PHI result");
4385 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4387 tree t
= gimple_phi_arg_def (phi
, i
);
4391 error ("missing PHI def");
4395 /* Addressable variables do have SSA_NAMEs but they
4396 are not considered gimple values. */
4397 else if ((TREE_CODE (t
) == SSA_NAME
4398 && virtual_p
!= virtual_operand_p (t
))
4400 && (TREE_CODE (t
) != SSA_NAME
4401 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4403 && !is_gimple_val (t
)))
4405 error ("invalid PHI argument");
4406 debug_generic_expr (t
);
4409 #ifdef ENABLE_TYPES_CHECKING
4410 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4412 error ("incompatible types in PHI argument %u", i
);
4413 debug_generic_stmt (TREE_TYPE (phi_result
));
4414 debug_generic_stmt (TREE_TYPE (t
));
4423 /* Verify the GIMPLE statements inside the sequence STMTS. */
4426 verify_gimple_in_seq_2 (gimple_seq stmts
)
4428 gimple_stmt_iterator ittr
;
4431 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4433 gimple stmt
= gsi_stmt (ittr
);
4435 switch (gimple_code (stmt
))
4438 err
|= verify_gimple_in_seq_2 (gimple_bind_body (stmt
));
4442 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4443 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4446 case GIMPLE_EH_FILTER
:
4447 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4450 case GIMPLE_EH_ELSE
:
4451 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt
));
4452 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt
));
4456 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (stmt
));
4459 case GIMPLE_TRANSACTION
:
4460 err
|= verify_gimple_transaction (stmt
);
4465 bool err2
= verify_gimple_stmt (stmt
);
4467 debug_gimple_stmt (stmt
);
4476 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4477 is a problem, otherwise false. */
4480 verify_gimple_transaction (gimple stmt
)
4482 tree lab
= gimple_transaction_label (stmt
);
4483 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4485 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4489 /* Verify the GIMPLE statements inside the statement list STMTS. */
4492 verify_gimple_in_seq (gimple_seq stmts
)
4494 timevar_push (TV_TREE_STMT_VERIFY
);
4495 if (verify_gimple_in_seq_2 (stmts
))
4496 internal_error ("verify_gimple failed");
4497 timevar_pop (TV_TREE_STMT_VERIFY
);
4500 /* Return true when the T can be shared. */
4503 tree_node_can_be_shared (tree t
)
4505 if (IS_TYPE_OR_DECL_P (t
)
4506 || is_gimple_min_invariant (t
)
4507 || TREE_CODE (t
) == SSA_NAME
4508 || t
== error_mark_node
4509 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4512 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4521 /* Called via walk_tree. Verify tree sharing. */
4524 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4526 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4528 if (tree_node_can_be_shared (*tp
))
4530 *walk_subtrees
= false;
4534 if (pointer_set_insert (visited
, *tp
))
4540 /* Called via walk_gimple_stmt. Verify tree sharing. */
4543 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4545 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4546 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4549 static bool eh_error_found
;
4551 verify_eh_throw_stmt_node (void **slot
, void *data
)
4553 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4554 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4556 if (!pointer_set_contains (visited
, node
->stmt
))
4558 error ("dead STMT in EH table");
4559 debug_gimple_stmt (node
->stmt
);
4560 eh_error_found
= true;
4565 /* Verify if the location LOCs block is in BLOCKS. */
4568 verify_location (pointer_set_t
*blocks
, location_t loc
)
4570 tree block
= LOCATION_BLOCK (loc
);
4571 if (block
!= NULL_TREE
4572 && !pointer_set_contains (blocks
, block
))
4574 error ("location references block not in block tree");
4577 if (block
!= NULL_TREE
)
4578 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4582 /* Called via walk_tree. Verify that expressions have no blocks. */
4585 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4589 *walk_subtrees
= false;
4593 location_t loc
= EXPR_LOCATION (*tp
);
4594 if (LOCATION_BLOCK (loc
) != NULL
)
4600 /* Called via walk_tree. Verify locations of expressions. */
4603 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4605 struct pointer_set_t
*blocks
= (struct pointer_set_t
*) data
;
4607 if (TREE_CODE (*tp
) == VAR_DECL
4608 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4610 tree t
= DECL_DEBUG_EXPR (*tp
);
4611 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4615 if ((TREE_CODE (*tp
) == VAR_DECL
4616 || TREE_CODE (*tp
) == PARM_DECL
4617 || TREE_CODE (*tp
) == RESULT_DECL
)
4618 && DECL_HAS_VALUE_EXPR_P (*tp
))
4620 tree t
= DECL_VALUE_EXPR (*tp
);
4621 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4628 *walk_subtrees
= false;
4632 location_t loc
= EXPR_LOCATION (*tp
);
4633 if (verify_location (blocks
, loc
))
4639 /* Called via walk_gimple_op. Verify locations of expressions. */
4642 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4644 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4645 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4648 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4651 collect_subblocks (pointer_set_t
*blocks
, tree block
)
4654 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4656 pointer_set_insert (blocks
, t
);
4657 collect_subblocks (blocks
, t
);
4661 /* Verify the GIMPLE statements in the CFG of FN. */
4664 verify_gimple_in_cfg (struct function
*fn
)
4668 struct pointer_set_t
*visited
, *visited_stmts
, *blocks
;
4670 timevar_push (TV_TREE_STMT_VERIFY
);
4671 visited
= pointer_set_create ();
4672 visited_stmts
= pointer_set_create ();
4674 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4675 blocks
= pointer_set_create ();
4676 if (DECL_INITIAL (fn
->decl
))
4678 pointer_set_insert (blocks
, DECL_INITIAL (fn
->decl
));
4679 collect_subblocks (blocks
, DECL_INITIAL (fn
->decl
));
4682 FOR_EACH_BB_FN (bb
, fn
)
4684 gimple_stmt_iterator gsi
;
4686 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4688 gimple phi
= gsi_stmt (gsi
);
4692 pointer_set_insert (visited_stmts
, phi
);
4694 if (gimple_bb (phi
) != bb
)
4696 error ("gimple_bb (phi) is set to a wrong basic block");
4700 err2
|= verify_gimple_phi (phi
);
4702 /* Only PHI arguments have locations. */
4703 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4705 error ("PHI node with location");
4709 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4711 tree arg
= gimple_phi_arg_def (phi
, i
);
4712 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4716 error ("incorrect sharing of tree nodes");
4717 debug_generic_expr (addr
);
4720 location_t loc
= gimple_phi_arg_location (phi
, i
);
4721 if (virtual_operand_p (gimple_phi_result (phi
))
4722 && loc
!= UNKNOWN_LOCATION
)
4724 error ("virtual PHI with argument locations");
4727 addr
= walk_tree (&arg
, verify_expr_location_1
, blocks
, NULL
);
4730 debug_generic_expr (addr
);
4733 err2
|= verify_location (blocks
, loc
);
4737 debug_gimple_stmt (phi
);
4741 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4743 gimple stmt
= gsi_stmt (gsi
);
4745 struct walk_stmt_info wi
;
4749 pointer_set_insert (visited_stmts
, stmt
);
4751 if (gimple_bb (stmt
) != bb
)
4753 error ("gimple_bb (stmt) is set to a wrong basic block");
4757 err2
|= verify_gimple_stmt (stmt
);
4758 err2
|= verify_location (blocks
, gimple_location (stmt
));
4760 memset (&wi
, 0, sizeof (wi
));
4761 wi
.info
= (void *) visited
;
4762 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4765 error ("incorrect sharing of tree nodes");
4766 debug_generic_expr (addr
);
4770 memset (&wi
, 0, sizeof (wi
));
4771 wi
.info
= (void *) blocks
;
4772 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4775 debug_generic_expr (addr
);
4779 /* ??? Instead of not checking these stmts at all the walker
4780 should know its context via wi. */
4781 if (!is_gimple_debug (stmt
)
4782 && !is_gimple_omp (stmt
))
4784 memset (&wi
, 0, sizeof (wi
));
4785 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
4788 debug_generic_expr (addr
);
4789 inform (gimple_location (stmt
), "in statement");
4794 /* If the statement is marked as part of an EH region, then it is
4795 expected that the statement could throw. Verify that when we
4796 have optimizations that simplify statements such that we prove
4797 that they cannot throw, that we update other data structures
4799 lp_nr
= lookup_stmt_eh_lp (stmt
);
4802 if (!stmt_could_throw_p (stmt
))
4804 error ("statement marked for throw, but doesn%'t");
4808 && !gsi_one_before_end_p (gsi
)
4809 && stmt_can_throw_internal (stmt
))
4811 error ("statement marked for throw in middle of block");
4817 debug_gimple_stmt (stmt
);
4822 eh_error_found
= false;
4823 if (get_eh_throw_stmt_table (cfun
))
4824 htab_traverse (get_eh_throw_stmt_table (cfun
),
4825 verify_eh_throw_stmt_node
,
4828 if (err
|| eh_error_found
)
4829 internal_error ("verify_gimple failed");
4831 pointer_set_destroy (visited
);
4832 pointer_set_destroy (visited_stmts
);
4833 pointer_set_destroy (blocks
);
4834 verify_histograms ();
4835 timevar_pop (TV_TREE_STMT_VERIFY
);
4839 /* Verifies that the flow information is OK. */
4842 gimple_verify_flow_info (void)
4846 gimple_stmt_iterator gsi
;
4851 if (ENTRY_BLOCK_PTR
->il
.gimple
.seq
|| ENTRY_BLOCK_PTR
->il
.gimple
.phi_nodes
)
4853 error ("ENTRY_BLOCK has IL associated with it");
4857 if (EXIT_BLOCK_PTR
->il
.gimple
.seq
|| EXIT_BLOCK_PTR
->il
.gimple
.phi_nodes
)
4859 error ("EXIT_BLOCK has IL associated with it");
4863 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
4864 if (e
->flags
& EDGE_FALLTHRU
)
4866 error ("fallthru to exit from bb %d", e
->src
->index
);
4872 bool found_ctrl_stmt
= false;
4876 /* Skip labels on the start of basic block. */
4877 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4880 gimple prev_stmt
= stmt
;
4882 stmt
= gsi_stmt (gsi
);
4884 if (gimple_code (stmt
) != GIMPLE_LABEL
)
4887 label
= gimple_label_label (stmt
);
4888 if (prev_stmt
&& DECL_NONLOCAL (label
))
4890 error ("nonlocal label ");
4891 print_generic_expr (stderr
, label
, 0);
4892 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4897 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
4899 error ("EH landing pad label ");
4900 print_generic_expr (stderr
, label
, 0);
4901 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4906 if (label_to_block (label
) != bb
)
4909 print_generic_expr (stderr
, label
, 0);
4910 fprintf (stderr
, " to block does not match in bb %d",
4915 if (decl_function_context (label
) != current_function_decl
)
4918 print_generic_expr (stderr
, label
, 0);
4919 fprintf (stderr
, " has incorrect context in bb %d",
4925 /* Verify that body of basic block BB is free of control flow. */
4926 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
4928 gimple stmt
= gsi_stmt (gsi
);
4930 if (found_ctrl_stmt
)
4932 error ("control flow in the middle of basic block %d",
4937 if (stmt_ends_bb_p (stmt
))
4938 found_ctrl_stmt
= true;
4940 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4943 print_generic_expr (stderr
, gimple_label_label (stmt
), 0);
4944 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
4949 gsi
= gsi_last_bb (bb
);
4950 if (gsi_end_p (gsi
))
4953 stmt
= gsi_stmt (gsi
);
4955 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4958 err
|= verify_eh_edges (stmt
);
4960 if (is_ctrl_stmt (stmt
))
4962 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4963 if (e
->flags
& EDGE_FALLTHRU
)
4965 error ("fallthru edge after a control statement in bb %d",
4971 if (gimple_code (stmt
) != GIMPLE_COND
)
4973 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4974 after anything else but if statement. */
4975 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4976 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
4978 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4984 switch (gimple_code (stmt
))
4991 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
4995 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
4996 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
4997 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
4998 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
4999 || EDGE_COUNT (bb
->succs
) >= 3)
5001 error ("wrong outgoing edge flags at end of bb %d",
5009 if (simple_goto_p (stmt
))
5011 error ("explicit goto at end of bb %d", bb
->index
);
5016 /* FIXME. We should double check that the labels in the
5017 destination blocks have their address taken. */
5018 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5019 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5020 | EDGE_FALSE_VALUE
))
5021 || !(e
->flags
& EDGE_ABNORMAL
))
5023 error ("wrong outgoing edge flags at end of bb %d",
5031 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5033 /* ... fallthru ... */
5035 if (!single_succ_p (bb
)
5036 || (single_succ_edge (bb
)->flags
5037 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5038 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5040 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5043 if (single_succ (bb
) != EXIT_BLOCK_PTR
)
5045 error ("return edge does not point to exit in bb %d",
5057 n
= gimple_switch_num_labels (stmt
);
5059 /* Mark all the destination basic blocks. */
5060 for (i
= 0; i
< n
; ++i
)
5062 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5063 basic_block label_bb
= label_to_block (lab
);
5064 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5065 label_bb
->aux
= (void *)1;
5068 /* Verify that the case labels are sorted. */
5069 prev
= gimple_switch_label (stmt
, 0);
5070 for (i
= 1; i
< n
; ++i
)
5072 tree c
= gimple_switch_label (stmt
, i
);
5075 error ("found default case not at the start of "
5081 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5083 error ("case labels not sorted: ");
5084 print_generic_expr (stderr
, prev
, 0);
5085 fprintf (stderr
," is greater than ");
5086 print_generic_expr (stderr
, c
, 0);
5087 fprintf (stderr
," but comes before it.\n");
5092 /* VRP will remove the default case if it can prove it will
5093 never be executed. So do not verify there always exists
5094 a default case here. */
5096 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5100 error ("extra outgoing edge %d->%d",
5101 bb
->index
, e
->dest
->index
);
5105 e
->dest
->aux
= (void *)2;
5106 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5107 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5109 error ("wrong outgoing edge flags at end of bb %d",
5115 /* Check that we have all of them. */
5116 for (i
= 0; i
< n
; ++i
)
5118 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5119 basic_block label_bb
= label_to_block (lab
);
5121 if (label_bb
->aux
!= (void *)2)
5123 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5128 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5129 e
->dest
->aux
= (void *)0;
5133 case GIMPLE_EH_DISPATCH
:
5134 err
|= verify_eh_dispatch_edge (stmt
);
5142 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5143 verify_dominators (CDI_DOMINATORS
);
5149 /* Updates phi nodes after creating a forwarder block joined
5150 by edge FALLTHRU. */
5153 gimple_make_forwarder_block (edge fallthru
)
5157 basic_block dummy
, bb
;
5159 gimple_stmt_iterator gsi
;
5161 dummy
= fallthru
->src
;
5162 bb
= fallthru
->dest
;
5164 if (single_pred_p (bb
))
5167 /* If we redirected a branch we must create new PHI nodes at the
5169 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5171 gimple phi
, new_phi
;
5173 phi
= gsi_stmt (gsi
);
5174 var
= gimple_phi_result (phi
);
5175 new_phi
= create_phi_node (var
, bb
);
5176 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5177 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5181 /* Add the arguments we have stored on edges. */
5182 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5187 flush_pending_stmts (e
);
5192 /* Return a non-special label in the head of basic block BLOCK.
5193 Create one if it doesn't exist. */
5196 gimple_block_label (basic_block bb
)
5198 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5203 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5205 stmt
= gsi_stmt (i
);
5206 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5208 label
= gimple_label_label (stmt
);
5209 if (!DECL_NONLOCAL (label
))
5212 gsi_move_before (&i
, &s
);
5217 label
= create_artificial_label (UNKNOWN_LOCATION
);
5218 stmt
= gimple_build_label (label
);
5219 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5224 /* Attempt to perform edge redirection by replacing a possibly complex
5225 jump instruction by a goto or by removing the jump completely.
5226 This can apply only if all edges now point to the same block. The
5227 parameters and return values are equivalent to
5228 redirect_edge_and_branch. */
5231 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5233 basic_block src
= e
->src
;
5234 gimple_stmt_iterator i
;
5237 /* We can replace or remove a complex jump only when we have exactly
5239 if (EDGE_COUNT (src
->succs
) != 2
5240 /* Verify that all targets will be TARGET. Specifically, the
5241 edge that is not E must also go to TARGET. */
5242 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5245 i
= gsi_last_bb (src
);
5249 stmt
= gsi_stmt (i
);
5251 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5253 gsi_remove (&i
, true);
5254 e
= ssa_redirect_edge (e
, target
);
5255 e
->flags
= EDGE_FALLTHRU
;
5263 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5264 edge representing the redirected branch. */
5267 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5269 basic_block bb
= e
->src
;
5270 gimple_stmt_iterator gsi
;
5274 if (e
->flags
& EDGE_ABNORMAL
)
5277 if (e
->dest
== dest
)
5280 if (e
->flags
& EDGE_EH
)
5281 return redirect_eh_edge (e
, dest
);
5283 if (e
->src
!= ENTRY_BLOCK_PTR
)
5285 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5290 gsi
= gsi_last_bb (bb
);
5291 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5293 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5296 /* For COND_EXPR, we only need to redirect the edge. */
5300 /* No non-abnormal edges should lead from a non-simple goto, and
5301 simple ones should be represented implicitly. */
5306 tree label
= gimple_block_label (dest
);
5307 tree cases
= get_cases_for_edge (e
, stmt
);
5309 /* If we have a list of cases associated with E, then use it
5310 as it's a lot faster than walking the entire case vector. */
5313 edge e2
= find_edge (e
->src
, dest
);
5320 CASE_LABEL (cases
) = label
;
5321 cases
= CASE_CHAIN (cases
);
5324 /* If there was already an edge in the CFG, then we need
5325 to move all the cases associated with E to E2. */
5328 tree cases2
= get_cases_for_edge (e2
, stmt
);
5330 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5331 CASE_CHAIN (cases2
) = first
;
5333 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5337 size_t i
, n
= gimple_switch_num_labels (stmt
);
5339 for (i
= 0; i
< n
; i
++)
5341 tree elt
= gimple_switch_label (stmt
, i
);
5342 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5343 CASE_LABEL (elt
) = label
;
5351 int i
, n
= gimple_asm_nlabels (stmt
);
5354 for (i
= 0; i
< n
; ++i
)
5356 tree cons
= gimple_asm_label_op (stmt
, i
);
5357 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5360 label
= gimple_block_label (dest
);
5361 TREE_VALUE (cons
) = label
;
5365 /* If we didn't find any label matching the former edge in the
5366 asm labels, we must be redirecting the fallthrough
5368 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5373 gsi_remove (&gsi
, true);
5374 e
->flags
|= EDGE_FALLTHRU
;
5377 case GIMPLE_OMP_RETURN
:
5378 case GIMPLE_OMP_CONTINUE
:
5379 case GIMPLE_OMP_SECTIONS_SWITCH
:
5380 case GIMPLE_OMP_FOR
:
5381 /* The edges from OMP constructs can be simply redirected. */
5384 case GIMPLE_EH_DISPATCH
:
5385 if (!(e
->flags
& EDGE_FALLTHRU
))
5386 redirect_eh_dispatch_edge (stmt
, e
, dest
);
5389 case GIMPLE_TRANSACTION
:
5390 /* The ABORT edge has a stored label associated with it, otherwise
5391 the edges are simply redirectable. */
5393 gimple_transaction_set_label (stmt
, gimple_block_label (dest
));
5397 /* Otherwise it must be a fallthru edge, and we don't need to
5398 do anything besides redirecting it. */
5399 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5403 /* Update/insert PHI nodes as necessary. */
5405 /* Now update the edges in the CFG. */
5406 e
= ssa_redirect_edge (e
, dest
);
5411 /* Returns true if it is possible to remove edge E by redirecting
5412 it to the destination of the other edge from E->src. */
5415 gimple_can_remove_branch_p (const_edge e
)
5417 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5423 /* Simple wrapper, as we can always redirect fallthru edges. */
5426 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5428 e
= gimple_redirect_edge_and_branch (e
, dest
);
5435 /* Splits basic block BB after statement STMT (but at least after the
5436 labels). If STMT is NULL, BB is split just after the labels. */
5439 gimple_split_block (basic_block bb
, void *stmt
)
5441 gimple_stmt_iterator gsi
;
5442 gimple_stmt_iterator gsi_tgt
;
5449 new_bb
= create_empty_bb (bb
);
5451 /* Redirect the outgoing edges. */
5452 new_bb
->succs
= bb
->succs
;
5454 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5457 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5460 /* Move everything from GSI to the new basic block. */
5461 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5463 act
= gsi_stmt (gsi
);
5464 if (gimple_code (act
) == GIMPLE_LABEL
)
5477 if (gsi_end_p (gsi
))
5480 /* Split the statement list - avoid re-creating new containers as this
5481 brings ugly quadratic memory consumption in the inliner.
5482 (We are still quadratic since we need to update stmt BB pointers,
5484 gsi_split_seq_before (&gsi
, &list
);
5485 set_bb_seq (new_bb
, list
);
5486 for (gsi_tgt
= gsi_start (list
);
5487 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5488 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5494 /* Moves basic block BB after block AFTER. */
5497 gimple_move_block_after (basic_block bb
, basic_block after
)
5499 if (bb
->prev_bb
== after
)
5503 link_block (bb
, after
);
5509 /* Return TRUE if block BB has no executable statements, otherwise return
5513 gimple_empty_block_p (basic_block bb
)
5515 /* BB must have no executable statements. */
5516 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5519 if (gsi_end_p (gsi
))
5521 if (is_gimple_debug (gsi_stmt (gsi
)))
5522 gsi_next_nondebug (&gsi
);
5523 return gsi_end_p (gsi
);
5527 /* Split a basic block if it ends with a conditional branch and if the
5528 other part of the block is not empty. */
5531 gimple_split_block_before_cond_jump (basic_block bb
)
5533 gimple last
, split_point
;
5534 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5535 if (gsi_end_p (gsi
))
5537 last
= gsi_stmt (gsi
);
5538 if (gimple_code (last
) != GIMPLE_COND
5539 && gimple_code (last
) != GIMPLE_SWITCH
)
5541 gsi_prev_nondebug (&gsi
);
5542 split_point
= gsi_stmt (gsi
);
5543 return split_block (bb
, split_point
)->dest
;
5547 /* Return true if basic_block can be duplicated. */
5550 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5555 /* Create a duplicate of the basic block BB. NOTE: This does not
5556 preserve SSA form. */
5559 gimple_duplicate_bb (basic_block bb
)
5562 gimple_stmt_iterator gsi
, gsi_tgt
;
5563 gimple_seq phis
= phi_nodes (bb
);
5564 gimple phi
, stmt
, copy
;
5566 new_bb
= create_empty_bb (EXIT_BLOCK_PTR
->prev_bb
);
5568 /* Copy the PHI nodes. We ignore PHI node arguments here because
5569 the incoming edges have not been setup yet. */
5570 for (gsi
= gsi_start (phis
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5572 phi
= gsi_stmt (gsi
);
5573 copy
= create_phi_node (NULL_TREE
, new_bb
);
5574 create_new_def_for (gimple_phi_result (phi
), copy
,
5575 gimple_phi_result_ptr (copy
));
5576 gimple_set_uid (copy
, gimple_uid (phi
));
5579 gsi_tgt
= gsi_start_bb (new_bb
);
5580 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5582 def_operand_p def_p
;
5583 ssa_op_iter op_iter
;
5586 stmt
= gsi_stmt (gsi
);
5587 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5590 /* Don't duplicate label debug stmts. */
5591 if (gimple_debug_bind_p (stmt
)
5592 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5596 /* Create a new copy of STMT and duplicate STMT's virtual
5598 copy
= gimple_copy (stmt
);
5599 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5601 maybe_duplicate_eh_stmt (copy
, stmt
);
5602 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5604 /* When copying around a stmt writing into a local non-user
5605 aggregate, make sure it won't share stack slot with other
5607 lhs
= gimple_get_lhs (stmt
);
5608 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5610 tree base
= get_base_address (lhs
);
5612 && (TREE_CODE (base
) == VAR_DECL
5613 || TREE_CODE (base
) == RESULT_DECL
)
5614 && DECL_IGNORED_P (base
)
5615 && !TREE_STATIC (base
)
5616 && !DECL_EXTERNAL (base
)
5617 && (TREE_CODE (base
) != VAR_DECL
5618 || !DECL_HAS_VALUE_EXPR_P (base
)))
5619 DECL_NONSHAREABLE (base
) = 1;
5622 /* Create new names for all the definitions created by COPY and
5623 add replacement mappings for each new name. */
5624 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5625 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5631 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5634 add_phi_args_after_copy_edge (edge e_copy
)
5636 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5639 gimple phi
, phi_copy
;
5641 gimple_stmt_iterator psi
, psi_copy
;
5643 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5646 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5648 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5649 dest
= get_bb_original (e_copy
->dest
);
5651 dest
= e_copy
->dest
;
5653 e
= find_edge (bb
, dest
);
5656 /* During loop unrolling the target of the latch edge is copied.
5657 In this case we are not looking for edge to dest, but to
5658 duplicated block whose original was dest. */
5659 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5661 if ((e
->dest
->flags
& BB_DUPLICATED
)
5662 && get_bb_original (e
->dest
) == dest
)
5666 gcc_assert (e
!= NULL
);
5669 for (psi
= gsi_start_phis (e
->dest
),
5670 psi_copy
= gsi_start_phis (e_copy
->dest
);
5672 gsi_next (&psi
), gsi_next (&psi_copy
))
5674 phi
= gsi_stmt (psi
);
5675 phi_copy
= gsi_stmt (psi_copy
);
5676 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5677 add_phi_arg (phi_copy
, def
, e_copy
,
5678 gimple_phi_arg_location_from_edge (phi
, e
));
5683 /* Basic block BB_COPY was created by code duplication. Add phi node
5684 arguments for edges going out of BB_COPY. The blocks that were
5685 duplicated have BB_DUPLICATED set. */
5688 add_phi_args_after_copy_bb (basic_block bb_copy
)
5693 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5695 add_phi_args_after_copy_edge (e_copy
);
5699 /* Blocks in REGION_COPY array of length N_REGION were created by
5700 duplication of basic blocks. Add phi node arguments for edges
5701 going from these blocks. If E_COPY is not NULL, also add
5702 phi node arguments for its destination.*/
5705 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5710 for (i
= 0; i
< n_region
; i
++)
5711 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5713 for (i
= 0; i
< n_region
; i
++)
5714 add_phi_args_after_copy_bb (region_copy
[i
]);
5716 add_phi_args_after_copy_edge (e_copy
);
5718 for (i
= 0; i
< n_region
; i
++)
5719 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5722 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5723 important exit edge EXIT. By important we mean that no SSA name defined
5724 inside region is live over the other exit edges of the region. All entry
5725 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5726 to the duplicate of the region. Dominance and loop information is
5727 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5728 UPDATE_DOMINANCE is false then we assume that the caller will update the
5729 dominance information after calling this function. The new basic
5730 blocks are stored to REGION_COPY in the same order as they had in REGION,
5731 provided that REGION_COPY is not NULL.
5732 The function returns false if it is unable to copy the region,
5736 gimple_duplicate_sese_region (edge entry
, edge exit
,
5737 basic_block
*region
, unsigned n_region
,
5738 basic_block
*region_copy
,
5739 bool update_dominance
)
5742 bool free_region_copy
= false, copying_header
= false;
5743 struct loop
*loop
= entry
->dest
->loop_father
;
5745 vec
<basic_block
> doms
;
5747 int total_freq
= 0, entry_freq
= 0;
5748 gcov_type total_count
= 0, entry_count
= 0;
5750 if (!can_copy_bbs_p (region
, n_region
))
5753 /* Some sanity checking. Note that we do not check for all possible
5754 missuses of the functions. I.e. if you ask to copy something weird,
5755 it will work, but the state of structures probably will not be
5757 for (i
= 0; i
< n_region
; i
++)
5759 /* We do not handle subloops, i.e. all the blocks must belong to the
5761 if (region
[i
]->loop_father
!= loop
)
5764 if (region
[i
] != entry
->dest
5765 && region
[i
] == loop
->header
)
5769 set_loop_copy (loop
, loop
);
5771 /* In case the function is used for loop header copying (which is the primary
5772 use), ensure that EXIT and its copy will be new latch and entry edges. */
5773 if (loop
->header
== entry
->dest
)
5775 copying_header
= true;
5776 set_loop_copy (loop
, loop_outer (loop
));
5778 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5781 for (i
= 0; i
< n_region
; i
++)
5782 if (region
[i
] != exit
->src
5783 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5789 region_copy
= XNEWVEC (basic_block
, n_region
);
5790 free_region_copy
= true;
5793 initialize_original_copy_tables ();
5795 /* Record blocks outside the region that are dominated by something
5797 if (update_dominance
)
5800 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5803 if (entry
->dest
->count
)
5805 total_count
= entry
->dest
->count
;
5806 entry_count
= entry
->count
;
5807 /* Fix up corner cases, to avoid division by zero or creation of negative
5809 if (entry_count
> total_count
)
5810 entry_count
= total_count
;
5814 total_freq
= entry
->dest
->frequency
;
5815 entry_freq
= EDGE_FREQUENCY (entry
);
5816 /* Fix up corner cases, to avoid division by zero or creation of negative
5818 if (total_freq
== 0)
5820 else if (entry_freq
> total_freq
)
5821 entry_freq
= total_freq
;
5824 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5825 split_edge_bb_loc (entry
), update_dominance
);
5828 scale_bbs_frequencies_gcov_type (region
, n_region
,
5829 total_count
- entry_count
,
5831 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
5836 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
5838 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
5843 loop
->header
= exit
->dest
;
5844 loop
->latch
= exit
->src
;
5847 /* Redirect the entry and add the phi node arguments. */
5848 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
5849 gcc_assert (redirected
!= NULL
);
5850 flush_pending_stmts (entry
);
5852 /* Concerning updating of dominators: We must recount dominators
5853 for entry block and its copy. Anything that is outside of the
5854 region, but was dominated by something inside needs recounting as
5856 if (update_dominance
)
5858 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
5859 doms
.safe_push (get_bb_original (entry
->dest
));
5860 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
5864 /* Add the other PHI node arguments. */
5865 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
5867 if (free_region_copy
)
5870 free_original_copy_tables ();
5874 /* Checks if BB is part of the region defined by N_REGION BBS. */
5876 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
5880 for (n
= 0; n
< n_region
; n
++)
5888 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5889 are stored to REGION_COPY in the same order in that they appear
5890 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5891 the region, EXIT an exit from it. The condition guarding EXIT
5892 is moved to ENTRY. Returns true if duplication succeeds, false
5918 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
5919 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
5920 basic_block
*region_copy ATTRIBUTE_UNUSED
)
5923 bool free_region_copy
= false;
5924 struct loop
*loop
= exit
->dest
->loop_father
;
5925 struct loop
*orig_loop
= entry
->dest
->loop_father
;
5926 basic_block switch_bb
, entry_bb
, nentry_bb
;
5927 vec
<basic_block
> doms
;
5928 int total_freq
= 0, exit_freq
= 0;
5929 gcov_type total_count
= 0, exit_count
= 0;
5930 edge exits
[2], nexits
[2], e
;
5931 gimple_stmt_iterator gsi
;
5934 basic_block exit_bb
;
5935 gimple_stmt_iterator psi
;
5938 struct loop
*target
, *aloop
, *cloop
;
5940 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
5942 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
5944 if (!can_copy_bbs_p (region
, n_region
))
5947 initialize_original_copy_tables ();
5948 set_loop_copy (orig_loop
, loop
);
5951 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
5953 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
5955 cloop
= duplicate_loop (aloop
, target
);
5956 duplicate_subloops (aloop
, cloop
);
5962 region_copy
= XNEWVEC (basic_block
, n_region
);
5963 free_region_copy
= true;
5966 gcc_assert (!need_ssa_update_p (cfun
));
5968 /* Record blocks outside the region that are dominated by something
5970 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5972 if (exit
->src
->count
)
5974 total_count
= exit
->src
->count
;
5975 exit_count
= exit
->count
;
5976 /* Fix up corner cases, to avoid division by zero or creation of negative
5978 if (exit_count
> total_count
)
5979 exit_count
= total_count
;
5983 total_freq
= exit
->src
->frequency
;
5984 exit_freq
= EDGE_FREQUENCY (exit
);
5985 /* Fix up corner cases, to avoid division by zero or creation of negative
5987 if (total_freq
== 0)
5989 if (exit_freq
> total_freq
)
5990 exit_freq
= total_freq
;
5993 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
5994 split_edge_bb_loc (exit
), true);
5997 scale_bbs_frequencies_gcov_type (region
, n_region
,
5998 total_count
- exit_count
,
6000 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6005 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6007 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6010 /* Create the switch block, and put the exit condition to it. */
6011 entry_bb
= entry
->dest
;
6012 nentry_bb
= get_bb_copy (entry_bb
);
6013 if (!last_stmt (entry
->src
)
6014 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6015 switch_bb
= entry
->src
;
6017 switch_bb
= split_edge (entry
);
6018 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6020 gsi
= gsi_last_bb (switch_bb
);
6021 cond_stmt
= last_stmt (exit
->src
);
6022 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6023 cond_stmt
= gimple_copy (cond_stmt
);
6025 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6027 sorig
= single_succ_edge (switch_bb
);
6028 sorig
->flags
= exits
[1]->flags
;
6029 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6031 /* Register the new edge from SWITCH_BB in loop exit lists. */
6032 rescan_loop_exit (snew
, true, false);
6034 /* Add the PHI node arguments. */
6035 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6037 /* Get rid of now superfluous conditions and associated edges (and phi node
6039 exit_bb
= exit
->dest
;
6041 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6042 PENDING_STMT (e
) = NULL
;
6044 /* The latch of ORIG_LOOP was copied, and so was the backedge
6045 to the original header. We redirect this backedge to EXIT_BB. */
6046 for (i
= 0; i
< n_region
; i
++)
6047 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6049 gcc_assert (single_succ_edge (region_copy
[i
]));
6050 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6051 PENDING_STMT (e
) = NULL
;
6052 for (psi
= gsi_start_phis (exit_bb
);
6056 phi
= gsi_stmt (psi
);
6057 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6058 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6061 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6062 PENDING_STMT (e
) = NULL
;
6064 /* Anything that is outside of the region, but was dominated by something
6065 inside needs to update dominance info. */
6066 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6068 /* Update the SSA web. */
6069 update_ssa (TODO_update_ssa
);
6071 if (free_region_copy
)
6074 free_original_copy_tables ();
6078 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6079 adding blocks when the dominator traversal reaches EXIT. This
6080 function silently assumes that ENTRY strictly dominates EXIT. */
6083 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6084 vec
<basic_block
> *bbs_p
)
6088 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6090 son
= next_dom_son (CDI_DOMINATORS
, son
))
6092 bbs_p
->safe_push (son
);
6094 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6098 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6099 The duplicates are recorded in VARS_MAP. */
6102 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
6105 tree t
= *tp
, new_t
;
6106 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6109 if (DECL_CONTEXT (t
) == to_context
)
6112 loc
= pointer_map_contains (vars_map
, t
);
6116 loc
= pointer_map_insert (vars_map
, t
);
6120 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6121 add_local_decl (f
, new_t
);
6125 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6126 new_t
= copy_node (t
);
6128 DECL_CONTEXT (new_t
) = to_context
;
6133 new_t
= (tree
) *loc
;
6139 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6140 VARS_MAP maps old ssa names and var_decls to the new ones. */
6143 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
6149 gcc_assert (!virtual_operand_p (name
));
6151 loc
= pointer_map_contains (vars_map
, name
);
6155 tree decl
= SSA_NAME_VAR (name
);
6158 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6159 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6160 decl
, SSA_NAME_DEF_STMT (name
));
6161 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6162 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6166 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6167 name
, SSA_NAME_DEF_STMT (name
));
6169 loc
= pointer_map_insert (vars_map
, name
);
6173 new_name
= (tree
) *loc
;
6184 struct pointer_map_t
*vars_map
;
6185 htab_t new_label_map
;
6186 struct pointer_map_t
*eh_map
;
6190 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6191 contained in *TP if it has been ORIG_BLOCK previously and change the
6192 DECL_CONTEXT of every local variable referenced in *TP. */
6195 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6197 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6198 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6203 tree block
= TREE_BLOCK (t
);
6204 if (block
== p
->orig_block
6205 || (p
->orig_block
== NULL_TREE
6206 && block
!= NULL_TREE
))
6207 TREE_SET_BLOCK (t
, p
->new_block
);
6208 #ifdef ENABLE_CHECKING
6209 else if (block
!= NULL_TREE
)
6211 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6212 block
= BLOCK_SUPERCONTEXT (block
);
6213 gcc_assert (block
== p
->orig_block
);
6217 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6219 if (TREE_CODE (t
) == SSA_NAME
)
6220 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6221 else if (TREE_CODE (t
) == LABEL_DECL
)
6223 if (p
->new_label_map
)
6225 struct tree_map in
, *out
;
6227 out
= (struct tree_map
*)
6228 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6233 DECL_CONTEXT (t
) = p
->to_context
;
6235 else if (p
->remap_decls_p
)
6237 /* Replace T with its duplicate. T should no longer appear in the
6238 parent function, so this looks wasteful; however, it may appear
6239 in referenced_vars, and more importantly, as virtual operands of
6240 statements, and in alias lists of other variables. It would be
6241 quite difficult to expunge it from all those places. ??? It might
6242 suffice to do this for addressable variables. */
6243 if ((TREE_CODE (t
) == VAR_DECL
6244 && !is_global_var (t
))
6245 || TREE_CODE (t
) == CONST_DECL
)
6246 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6250 else if (TYPE_P (t
))
6256 /* Helper for move_stmt_r. Given an EH region number for the source
6257 function, map that to the duplicate EH regio number in the dest. */
6260 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6262 eh_region old_r
, new_r
;
6265 old_r
= get_eh_region_from_number (old_nr
);
6266 slot
= pointer_map_contains (p
->eh_map
, old_r
);
6267 new_r
= (eh_region
) *slot
;
6269 return new_r
->index
;
6272 /* Similar, but operate on INTEGER_CSTs. */
6275 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6279 old_nr
= tree_to_shwi (old_t_nr
);
6280 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6282 return build_int_cst (integer_type_node
, new_nr
);
6285 /* Like move_stmt_op, but for gimple statements.
6287 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6288 contained in the current statement in *GSI_P and change the
6289 DECL_CONTEXT of every local variable referenced in the current
6293 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6294 struct walk_stmt_info
*wi
)
6296 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6297 gimple stmt
= gsi_stmt (*gsi_p
);
6298 tree block
= gimple_block (stmt
);
6300 if (block
== p
->orig_block
6301 || (p
->orig_block
== NULL_TREE
6302 && block
!= NULL_TREE
))
6303 gimple_set_block (stmt
, p
->new_block
);
6305 switch (gimple_code (stmt
))
6308 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6310 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6311 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6312 switch (DECL_FUNCTION_CODE (fndecl
))
6314 case BUILT_IN_EH_COPY_VALUES
:
6315 r
= gimple_call_arg (stmt
, 1);
6316 r
= move_stmt_eh_region_tree_nr (r
, p
);
6317 gimple_call_set_arg (stmt
, 1, r
);
6320 case BUILT_IN_EH_POINTER
:
6321 case BUILT_IN_EH_FILTER
:
6322 r
= gimple_call_arg (stmt
, 0);
6323 r
= move_stmt_eh_region_tree_nr (r
, p
);
6324 gimple_call_set_arg (stmt
, 0, r
);
6335 int r
= gimple_resx_region (stmt
);
6336 r
= move_stmt_eh_region_nr (r
, p
);
6337 gimple_resx_set_region (stmt
, r
);
6341 case GIMPLE_EH_DISPATCH
:
6343 int r
= gimple_eh_dispatch_region (stmt
);
6344 r
= move_stmt_eh_region_nr (r
, p
);
6345 gimple_eh_dispatch_set_region (stmt
, r
);
6349 case GIMPLE_OMP_RETURN
:
6350 case GIMPLE_OMP_CONTINUE
:
6353 if (is_gimple_omp (stmt
))
6355 /* Do not remap variables inside OMP directives. Variables
6356 referenced in clauses and directive header belong to the
6357 parent function and should not be moved into the child
6359 bool save_remap_decls_p
= p
->remap_decls_p
;
6360 p
->remap_decls_p
= false;
6361 *handled_ops_p
= true;
6363 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6366 p
->remap_decls_p
= save_remap_decls_p
;
6374 /* Move basic block BB from function CFUN to function DEST_FN. The
6375 block is moved out of the original linked list and placed after
6376 block AFTER in the new list. Also, the block is removed from the
6377 original array of blocks and placed in DEST_FN's array of blocks.
6378 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6379 updated to reflect the moved edges.
6381 The local variables are remapped to new instances, VARS_MAP is used
6382 to record the mapping. */
6385 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6386 basic_block after
, bool update_edge_count_p
,
6387 struct move_stmt_d
*d
)
6389 struct control_flow_graph
*cfg
;
6392 gimple_stmt_iterator si
;
6393 unsigned old_len
, new_len
;
6395 /* Remove BB from dominance structures. */
6396 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6398 /* Move BB from its current loop to the copy in the new function. */
6401 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6403 bb
->loop_father
= new_loop
;
6406 /* Link BB to the new linked list. */
6407 move_block_after (bb
, after
);
6409 /* Update the edge count in the corresponding flowgraphs. */
6410 if (update_edge_count_p
)
6411 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6413 cfun
->cfg
->x_n_edges
--;
6414 dest_cfun
->cfg
->x_n_edges
++;
6417 /* Remove BB from the original basic block array. */
6418 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6419 cfun
->cfg
->x_n_basic_blocks
--;
6421 /* Grow DEST_CFUN's basic block array if needed. */
6422 cfg
= dest_cfun
->cfg
;
6423 cfg
->x_n_basic_blocks
++;
6424 if (bb
->index
>= cfg
->x_last_basic_block
)
6425 cfg
->x_last_basic_block
= bb
->index
+ 1;
6427 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6428 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6430 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6431 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6434 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6436 /* Remap the variables in phi nodes. */
6437 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); )
6439 gimple phi
= gsi_stmt (si
);
6441 tree op
= PHI_RESULT (phi
);
6445 if (virtual_operand_p (op
))
6447 /* Remove the phi nodes for virtual operands (alias analysis will be
6448 run for the new function, anyway). */
6449 remove_phi_node (&si
, true);
6453 SET_PHI_RESULT (phi
,
6454 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6455 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6457 op
= USE_FROM_PTR (use
);
6458 if (TREE_CODE (op
) == SSA_NAME
)
6459 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6462 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6464 location_t locus
= gimple_phi_arg_location (phi
, i
);
6465 tree block
= LOCATION_BLOCK (locus
);
6467 if (locus
== UNKNOWN_LOCATION
)
6469 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6471 if (d
->new_block
== NULL_TREE
)
6472 locus
= LOCATION_LOCUS (locus
);
6474 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6475 gimple_phi_arg_set_location (phi
, i
, locus
);
6482 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6484 gimple stmt
= gsi_stmt (si
);
6485 struct walk_stmt_info wi
;
6487 memset (&wi
, 0, sizeof (wi
));
6489 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6491 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6493 tree label
= gimple_label_label (stmt
);
6494 int uid
= LABEL_DECL_UID (label
);
6496 gcc_assert (uid
> -1);
6498 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6499 if (old_len
<= (unsigned) uid
)
6501 new_len
= 3 * uid
/ 2 + 1;
6502 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6505 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6506 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6508 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6510 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6511 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6514 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6515 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6517 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6518 gimple_remove_stmt_histograms (cfun
, stmt
);
6520 /* We cannot leave any operands allocated from the operand caches of
6521 the current function. */
6522 free_stmt_operands (stmt
);
6523 push_cfun (dest_cfun
);
6528 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6529 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6531 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6532 if (d
->orig_block
== NULL_TREE
6533 || block
== d
->orig_block
)
6534 e
->goto_locus
= d
->new_block
?
6535 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6536 LOCATION_LOCUS (e
->goto_locus
);
6540 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6541 the outermost EH region. Use REGION as the incoming base EH region. */
6544 find_outermost_region_in_block (struct function
*src_cfun
,
6545 basic_block bb
, eh_region region
)
6547 gimple_stmt_iterator si
;
6549 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6551 gimple stmt
= gsi_stmt (si
);
6552 eh_region stmt_region
;
6555 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6556 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6560 region
= stmt_region
;
6561 else if (stmt_region
!= region
)
6563 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6564 gcc_assert (region
!= NULL
);
6573 new_label_mapper (tree decl
, void *data
)
6575 htab_t hash
= (htab_t
) data
;
6579 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6581 m
= XNEW (struct tree_map
);
6582 m
->hash
= DECL_UID (decl
);
6583 m
->base
.from
= decl
;
6584 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6585 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6586 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6587 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6589 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6590 gcc_assert (*slot
== NULL
);
6597 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6601 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6606 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6609 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6611 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6614 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6616 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6617 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6619 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6624 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6625 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6628 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6632 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6635 /* Discard it from the old loop array. */
6636 (*get_loops (fn1
))[loop
->num
] = NULL
;
6638 /* Place it in the new loop array, assigning it a new number. */
6639 loop
->num
= number_of_loops (fn2
);
6640 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6642 /* Recurse to children. */
6643 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6644 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6647 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6648 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6649 single basic block in the original CFG and the new basic block is
6650 returned. DEST_CFUN must not have a CFG yet.
6652 Note that the region need not be a pure SESE region. Blocks inside
6653 the region may contain calls to abort/exit. The only restriction
6654 is that ENTRY_BB should be the only entry point and it must
6657 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6658 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6659 to the new function.
6661 All local variables referenced in the region are assumed to be in
6662 the corresponding BLOCK_VARS and unexpanded variable lists
6663 associated with DEST_CFUN. */
6666 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6667 basic_block exit_bb
, tree orig_block
)
6669 vec
<basic_block
> bbs
, dom_bbs
;
6670 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6671 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6672 struct function
*saved_cfun
= cfun
;
6673 int *entry_flag
, *exit_flag
;
6674 unsigned *entry_prob
, *exit_prob
;
6675 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6678 htab_t new_label_map
;
6679 struct pointer_map_t
*vars_map
, *eh_map
;
6680 struct loop
*loop
= entry_bb
->loop_father
;
6681 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6682 struct move_stmt_d d
;
6684 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6686 gcc_assert (entry_bb
!= exit_bb
6688 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6690 /* Collect all the blocks in the region. Manually add ENTRY_BB
6691 because it won't be added by dfs_enumerate_from. */
6693 bbs
.safe_push (entry_bb
);
6694 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6696 /* The blocks that used to be dominated by something in BBS will now be
6697 dominated by the new block. */
6698 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6702 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6703 the predecessor edges to ENTRY_BB and the successor edges to
6704 EXIT_BB so that we can re-attach them to the new basic block that
6705 will replace the region. */
6706 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6707 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6708 entry_flag
= XNEWVEC (int, num_entry_edges
);
6709 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6711 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6713 entry_prob
[i
] = e
->probability
;
6714 entry_flag
[i
] = e
->flags
;
6715 entry_pred
[i
++] = e
->src
;
6721 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6722 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6723 exit_flag
= XNEWVEC (int, num_exit_edges
);
6724 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6726 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6728 exit_prob
[i
] = e
->probability
;
6729 exit_flag
[i
] = e
->flags
;
6730 exit_succ
[i
++] = e
->dest
;
6742 /* Switch context to the child function to initialize DEST_FN's CFG. */
6743 gcc_assert (dest_cfun
->cfg
== NULL
);
6744 push_cfun (dest_cfun
);
6746 init_empty_tree_cfg ();
6748 /* Initialize EH information for the new function. */
6750 new_label_map
= NULL
;
6753 eh_region region
= NULL
;
6755 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6756 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6758 init_eh_for_function ();
6761 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6762 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6763 new_label_mapper
, new_label_map
);
6767 /* Initialize an empty loop tree. */
6768 struct loops
*loops
= ggc_alloc_cleared_loops ();
6769 init_loops_structure (dest_cfun
, loops
, 1);
6770 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6771 set_loops_for_fn (dest_cfun
, loops
);
6773 /* Move the outlined loop tree part. */
6774 num_nodes
= bbs
.length ();
6775 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6777 if (bb
->loop_father
->header
== bb
)
6779 struct loop
*this_loop
= bb
->loop_father
;
6780 struct loop
*outer
= loop_outer (this_loop
);
6782 /* If the SESE region contains some bbs ending with
6783 a noreturn call, those are considered to belong
6784 to the outermost loop in saved_cfun, rather than
6785 the entry_bb's loop_father. */
6789 num_nodes
-= this_loop
->num_nodes
;
6790 flow_loop_tree_node_remove (bb
->loop_father
);
6791 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
6792 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
6795 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
6798 /* Remove loop exits from the outlined region. */
6799 if (loops_for_fn (saved_cfun
)->exits
)
6800 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6802 void **slot
= htab_find_slot_with_hash
6803 (loops_for_fn (saved_cfun
)->exits
, e
,
6804 htab_hash_pointer (e
), NO_INSERT
);
6806 htab_clear_slot (loops_for_fn (saved_cfun
)->exits
, slot
);
6811 /* Adjust the number of blocks in the tree root of the outlined part. */
6812 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
6814 /* Setup a mapping to be used by move_block_to_fn. */
6815 loop
->aux
= current_loops
->tree_root
;
6816 loop0
->aux
= current_loops
->tree_root
;
6820 /* Move blocks from BBS into DEST_CFUN. */
6821 gcc_assert (bbs
.length () >= 2);
6822 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6823 vars_map
= pointer_map_create ();
6825 memset (&d
, 0, sizeof (d
));
6826 d
.orig_block
= orig_block
;
6827 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
6828 d
.from_context
= cfun
->decl
;
6829 d
.to_context
= dest_cfun
->decl
;
6830 d
.vars_map
= vars_map
;
6831 d
.new_label_map
= new_label_map
;
6833 d
.remap_decls_p
= true;
6835 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6837 /* No need to update edge counts on the last block. It has
6838 already been updated earlier when we detached the region from
6839 the original CFG. */
6840 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
6846 /* Loop sizes are no longer correct, fix them up. */
6847 loop
->num_nodes
-= num_nodes
;
6848 for (struct loop
*outer
= loop_outer (loop
);
6849 outer
; outer
= loop_outer (outer
))
6850 outer
->num_nodes
-= num_nodes
;
6851 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
6853 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vect_loops
)
6856 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
6861 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
6863 dest_cfun
->has_simduid_loops
= true;
6865 if (aloop
->force_vect
)
6866 dest_cfun
->has_force_vect_loops
= true;
6870 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6874 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6876 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6877 = BLOCK_SUBBLOCKS (orig_block
);
6878 for (block
= BLOCK_SUBBLOCKS (orig_block
);
6879 block
; block
= BLOCK_CHAIN (block
))
6880 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
6881 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
6884 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
6885 vars_map
, dest_cfun
->decl
);
6888 htab_delete (new_label_map
);
6890 pointer_map_destroy (eh_map
);
6891 pointer_map_destroy (vars_map
);
6893 /* Rewire the entry and exit blocks. The successor to the entry
6894 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6895 the child function. Similarly, the predecessor of DEST_FN's
6896 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6897 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6898 various CFG manipulation function get to the right CFG.
6900 FIXME, this is silly. The CFG ought to become a parameter to
6902 push_cfun (dest_cfun
);
6903 make_edge (ENTRY_BLOCK_PTR
, entry_bb
, EDGE_FALLTHRU
);
6905 make_edge (exit_bb
, EXIT_BLOCK_PTR
, 0);
6908 /* Back in the original function, the SESE region has disappeared,
6909 create a new basic block in its place. */
6910 bb
= create_empty_bb (entry_pred
[0]);
6912 add_bb_to_loop (bb
, loop
);
6913 for (i
= 0; i
< num_entry_edges
; i
++)
6915 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
6916 e
->probability
= entry_prob
[i
];
6919 for (i
= 0; i
< num_exit_edges
; i
++)
6921 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
6922 e
->probability
= exit_prob
[i
];
6925 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
6926 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
6927 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
6945 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6949 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
6951 tree arg
, var
, old_current_fndecl
= current_function_decl
;
6952 struct function
*dsf
;
6953 bool ignore_topmost_bind
= false, any_var
= false;
6956 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
6957 && decl_is_tm_clone (fndecl
));
6958 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
6960 current_function_decl
= fndecl
;
6961 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
6963 arg
= DECL_ARGUMENTS (fndecl
);
6966 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
6967 fprintf (file
, " ");
6968 print_generic_expr (file
, arg
, dump_flags
);
6969 if (flags
& TDF_VERBOSE
)
6970 print_node (file
, "", arg
, 4);
6971 if (DECL_CHAIN (arg
))
6972 fprintf (file
, ", ");
6973 arg
= DECL_CHAIN (arg
);
6975 fprintf (file
, ")\n");
6977 if (flags
& TDF_VERBOSE
)
6978 print_node (file
, "", fndecl
, 2);
6980 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
6981 if (dsf
&& (flags
& TDF_EH
))
6982 dump_eh_tree (file
, dsf
);
6984 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
6986 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
6987 current_function_decl
= old_current_fndecl
;
6991 /* When GIMPLE is lowered, the variables are no longer available in
6992 BIND_EXPRs, so display them separately. */
6993 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
6996 ignore_topmost_bind
= true;
6998 fprintf (file
, "{\n");
6999 if (!vec_safe_is_empty (fun
->local_decls
))
7000 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7002 print_generic_decl (file
, var
, flags
);
7003 if (flags
& TDF_VERBOSE
)
7004 print_node (file
, "", var
, 4);
7005 fprintf (file
, "\n");
7009 if (gimple_in_ssa_p (cfun
))
7010 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7012 tree name
= ssa_name (ix
);
7013 if (name
&& !SSA_NAME_VAR (name
))
7015 fprintf (file
, " ");
7016 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7017 fprintf (file
, " ");
7018 print_generic_expr (file
, name
, flags
);
7019 fprintf (file
, ";\n");
7026 if (fun
&& fun
->decl
== fndecl
7028 && basic_block_info_for_function (fun
))
7030 /* If the CFG has been built, emit a CFG-based dump. */
7031 if (!ignore_topmost_bind
)
7032 fprintf (file
, "{\n");
7034 if (any_var
&& n_basic_blocks_for_fn (fun
))
7035 fprintf (file
, "\n");
7037 FOR_EACH_BB_FN (bb
, fun
)
7038 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7040 fprintf (file
, "}\n");
7042 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7044 /* The function is now in GIMPLE form but the CFG has not been
7045 built yet. Emit the single sequence of GIMPLE statements
7046 that make up its body. */
7047 gimple_seq body
= gimple_body (fndecl
);
7049 if (gimple_seq_first_stmt (body
)
7050 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7051 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7052 print_gimple_seq (file
, body
, 0, flags
);
7055 if (!ignore_topmost_bind
)
7056 fprintf (file
, "{\n");
7059 fprintf (file
, "\n");
7061 print_gimple_seq (file
, body
, 2, flags
);
7062 fprintf (file
, "}\n");
7069 /* Make a tree based dump. */
7070 chain
= DECL_SAVED_TREE (fndecl
);
7071 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7073 if (ignore_topmost_bind
)
7075 chain
= BIND_EXPR_BODY (chain
);
7083 if (!ignore_topmost_bind
)
7084 fprintf (file
, "{\n");
7089 fprintf (file
, "\n");
7091 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7092 if (ignore_topmost_bind
)
7093 fprintf (file
, "}\n");
7096 if (flags
& TDF_ENUMERATE_LOCALS
)
7097 dump_enumerated_decls (file
, flags
);
7098 fprintf (file
, "\n\n");
7100 current_function_decl
= old_current_fndecl
;
7103 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7106 debug_function (tree fn
, int flags
)
7108 dump_function_to_file (fn
, stderr
, flags
);
7112 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7115 print_pred_bbs (FILE *file
, basic_block bb
)
7120 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7121 fprintf (file
, "bb_%d ", e
->src
->index
);
7125 /* Print on FILE the indexes for the successors of basic_block BB. */
7128 print_succ_bbs (FILE *file
, basic_block bb
)
7133 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7134 fprintf (file
, "bb_%d ", e
->dest
->index
);
7137 /* Print to FILE the basic block BB following the VERBOSITY level. */
7140 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7142 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7143 memset ((void *) s_indent
, ' ', (size_t) indent
);
7144 s_indent
[indent
] = '\0';
7146 /* Print basic_block's header. */
7149 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7150 print_pred_bbs (file
, bb
);
7151 fprintf (file
, "}, succs = {");
7152 print_succ_bbs (file
, bb
);
7153 fprintf (file
, "})\n");
7156 /* Print basic_block's body. */
7159 fprintf (file
, "%s {\n", s_indent
);
7160 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7161 fprintf (file
, "%s }\n", s_indent
);
7165 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7167 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7168 VERBOSITY level this outputs the contents of the loop, or just its
7172 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7180 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7181 memset ((void *) s_indent
, ' ', (size_t) indent
);
7182 s_indent
[indent
] = '\0';
7184 /* Print loop's header. */
7185 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7187 fprintf (file
, "header = %d", loop
->header
->index
);
7190 fprintf (file
, "deleted)\n");
7194 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7196 fprintf (file
, ", multiple latches");
7197 fprintf (file
, ", niter = ");
7198 print_generic_expr (file
, loop
->nb_iterations
, 0);
7200 if (loop
->any_upper_bound
)
7202 fprintf (file
, ", upper_bound = ");
7203 dump_double_int (file
, loop
->nb_iterations_upper_bound
, true);
7206 if (loop
->any_estimate
)
7208 fprintf (file
, ", estimate = ");
7209 dump_double_int (file
, loop
->nb_iterations_estimate
, true);
7211 fprintf (file
, ")\n");
7213 /* Print loop's body. */
7216 fprintf (file
, "%s{\n", s_indent
);
7218 if (bb
->loop_father
== loop
)
7219 print_loops_bb (file
, bb
, indent
, verbosity
);
7221 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7222 fprintf (file
, "%s}\n", s_indent
);
7226 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7227 spaces. Following VERBOSITY level this outputs the contents of the
7228 loop, or just its structure. */
7231 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7237 print_loop (file
, loop
, indent
, verbosity
);
7238 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7241 /* Follow a CFG edge from the entry point of the program, and on entry
7242 of a loop, pretty print the loop structure on FILE. */
7245 print_loops (FILE *file
, int verbosity
)
7249 bb
= ENTRY_BLOCK_PTR
;
7250 if (bb
&& bb
->loop_father
)
7251 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7257 debug (struct loop
&ref
)
7259 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7263 debug (struct loop
*ptr
)
7268 fprintf (stderr
, "<nil>\n");
7271 /* Dump a loop verbosely. */
7274 debug_verbose (struct loop
&ref
)
7276 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7280 debug_verbose (struct loop
*ptr
)
7285 fprintf (stderr
, "<nil>\n");
7289 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7292 debug_loops (int verbosity
)
7294 print_loops (stderr
, verbosity
);
7297 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7300 debug_loop (struct loop
*loop
, int verbosity
)
7302 print_loop (stderr
, loop
, 0, verbosity
);
7305 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7309 debug_loop_num (unsigned num
, int verbosity
)
7311 debug_loop (get_loop (cfun
, num
), verbosity
);
7314 /* Return true if BB ends with a call, possibly followed by some
7315 instructions that must stay with the call. Return false,
7319 gimple_block_ends_with_call_p (basic_block bb
)
7321 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7322 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7326 /* Return true if BB ends with a conditional branch. Return false,
7330 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7332 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7333 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7337 /* Return true if we need to add fake edge to exit at statement T.
7338 Helper function for gimple_flow_call_edges_add. */
7341 need_fake_edge_p (gimple t
)
7343 tree fndecl
= NULL_TREE
;
7346 /* NORETURN and LONGJMP calls already have an edge to exit.
7347 CONST and PURE calls do not need one.
7348 We don't currently check for CONST and PURE here, although
7349 it would be a good idea, because those attributes are
7350 figured out from the RTL in mark_constant_function, and
7351 the counter incrementation code from -fprofile-arcs
7352 leads to different results from -fbranch-probabilities. */
7353 if (is_gimple_call (t
))
7355 fndecl
= gimple_call_fndecl (t
);
7356 call_flags
= gimple_call_flags (t
);
7359 if (is_gimple_call (t
)
7361 && DECL_BUILT_IN (fndecl
)
7362 && (call_flags
& ECF_NOTHROW
)
7363 && !(call_flags
& ECF_RETURNS_TWICE
)
7364 /* fork() doesn't really return twice, but the effect of
7365 wrapping it in __gcov_fork() which calls __gcov_flush()
7366 and clears the counters before forking has the same
7367 effect as returning twice. Force a fake edge. */
7368 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7369 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7372 if (is_gimple_call (t
))
7378 if (!(call_flags
& ECF_NORETURN
))
7382 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7383 if ((e
->flags
& EDGE_FAKE
) == 0)
7387 if (gimple_code (t
) == GIMPLE_ASM
7388 && (gimple_asm_volatile_p (t
) || gimple_asm_input_p (t
)))
7395 /* Add fake edges to the function exit for any non constant and non
7396 noreturn calls (or noreturn calls with EH/abnormal edges),
7397 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7398 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7401 The goal is to expose cases in which entering a basic block does
7402 not imply that all subsequent instructions must be executed. */
7405 gimple_flow_call_edges_add (sbitmap blocks
)
7408 int blocks_split
= 0;
7409 int last_bb
= last_basic_block
;
7410 bool check_last_block
= false;
7412 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7416 check_last_block
= true;
7418 check_last_block
= bitmap_bit_p (blocks
, EXIT_BLOCK_PTR
->prev_bb
->index
);
7420 /* In the last basic block, before epilogue generation, there will be
7421 a fallthru edge to EXIT. Special care is required if the last insn
7422 of the last basic block is a call because make_edge folds duplicate
7423 edges, which would result in the fallthru edge also being marked
7424 fake, which would result in the fallthru edge being removed by
7425 remove_fake_edges, which would result in an invalid CFG.
7427 Moreover, we can't elide the outgoing fake edge, since the block
7428 profiler needs to take this into account in order to solve the minimal
7429 spanning tree in the case that the call doesn't return.
7431 Handle this by adding a dummy instruction in a new last basic block. */
7432 if (check_last_block
)
7434 basic_block bb
= EXIT_BLOCK_PTR
->prev_bb
;
7435 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7438 if (!gsi_end_p (gsi
))
7441 if (t
&& need_fake_edge_p (t
))
7445 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
7448 gsi_insert_on_edge (e
, gimple_build_nop ());
7449 gsi_commit_edge_inserts ();
7454 /* Now add fake edges to the function exit for any non constant
7455 calls since there is no way that we can determine if they will
7457 for (i
= 0; i
< last_bb
; i
++)
7459 basic_block bb
= BASIC_BLOCK (i
);
7460 gimple_stmt_iterator gsi
;
7461 gimple stmt
, last_stmt
;
7466 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7469 gsi
= gsi_last_nondebug_bb (bb
);
7470 if (!gsi_end_p (gsi
))
7472 last_stmt
= gsi_stmt (gsi
);
7475 stmt
= gsi_stmt (gsi
);
7476 if (need_fake_edge_p (stmt
))
7480 /* The handling above of the final block before the
7481 epilogue should be enough to verify that there is
7482 no edge to the exit block in CFG already.
7483 Calling make_edge in such case would cause us to
7484 mark that edge as fake and remove it later. */
7485 #ifdef ENABLE_CHECKING
7486 if (stmt
== last_stmt
)
7488 e
= find_edge (bb
, EXIT_BLOCK_PTR
);
7489 gcc_assert (e
== NULL
);
7493 /* Note that the following may create a new basic block
7494 and renumber the existing basic blocks. */
7495 if (stmt
!= last_stmt
)
7497 e
= split_block (bb
, stmt
);
7501 make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_FAKE
);
7505 while (!gsi_end_p (gsi
));
7510 verify_flow_info ();
7512 return blocks_split
;
7515 /* Removes edge E and all the blocks dominated by it, and updates dominance
7516 information. The IL in E->src needs to be updated separately.
7517 If dominance info is not available, only the edge E is removed.*/
7520 remove_edge_and_dominated_blocks (edge e
)
7522 vec
<basic_block
> bbs_to_remove
= vNULL
;
7523 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7527 bool none_removed
= false;
7529 basic_block bb
, dbb
;
7532 if (!dom_info_available_p (CDI_DOMINATORS
))
7538 /* No updating is needed for edges to exit. */
7539 if (e
->dest
== EXIT_BLOCK_PTR
)
7541 if (cfgcleanup_altered_bbs
)
7542 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7547 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7548 that is not dominated by E->dest, then this set is empty. Otherwise,
7549 all the basic blocks dominated by E->dest are removed.
7551 Also, to DF_IDOM we store the immediate dominators of the blocks in
7552 the dominance frontier of E (i.e., of the successors of the
7553 removed blocks, if there are any, and of E->dest otherwise). */
7554 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7559 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7561 none_removed
= true;
7566 df
= BITMAP_ALLOC (NULL
);
7567 df_idom
= BITMAP_ALLOC (NULL
);
7570 bitmap_set_bit (df_idom
,
7571 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7574 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7575 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7577 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7579 if (f
->dest
!= EXIT_BLOCK_PTR
)
7580 bitmap_set_bit (df
, f
->dest
->index
);
7583 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7584 bitmap_clear_bit (df
, bb
->index
);
7586 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7588 bb
= BASIC_BLOCK (i
);
7589 bitmap_set_bit (df_idom
,
7590 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7594 if (cfgcleanup_altered_bbs
)
7596 /* Record the set of the altered basic blocks. */
7597 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7598 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7601 /* Remove E and the cancelled blocks. */
7606 /* Walk backwards so as to get a chance to substitute all
7607 released DEFs into debug stmts. See
7608 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7610 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7611 delete_basic_block (bbs_to_remove
[i
]);
7614 /* Update the dominance information. The immediate dominator may change only
7615 for blocks whose immediate dominator belongs to DF_IDOM:
7617 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7618 removal. Let Z the arbitrary block such that idom(Z) = Y and
7619 Z dominates X after the removal. Before removal, there exists a path P
7620 from Y to X that avoids Z. Let F be the last edge on P that is
7621 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7622 dominates W, and because of P, Z does not dominate W), and W belongs to
7623 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7624 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7626 bb
= BASIC_BLOCK (i
);
7627 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7629 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7630 bbs_to_fix_dom
.safe_push (dbb
);
7633 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7636 BITMAP_FREE (df_idom
);
7637 bbs_to_remove
.release ();
7638 bbs_to_fix_dom
.release ();
7641 /* Purge dead EH edges from basic block BB. */
7644 gimple_purge_dead_eh_edges (basic_block bb
)
7646 bool changed
= false;
7649 gimple stmt
= last_stmt (bb
);
7651 if (stmt
&& stmt_can_throw_internal (stmt
))
7654 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7656 if (e
->flags
& EDGE_EH
)
7658 remove_edge_and_dominated_blocks (e
);
7668 /* Purge dead EH edges from basic block listed in BLOCKS. */
7671 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7673 bool changed
= false;
7677 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7679 basic_block bb
= BASIC_BLOCK (i
);
7681 /* Earlier gimple_purge_dead_eh_edges could have removed
7682 this basic block already. */
7683 gcc_assert (bb
|| changed
);
7685 changed
|= gimple_purge_dead_eh_edges (bb
);
7691 /* Purge dead abnormal call edges from basic block BB. */
7694 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7696 bool changed
= false;
7699 gimple stmt
= last_stmt (bb
);
7701 if (!cfun
->has_nonlocal_label
7702 && !cfun
->calls_setjmp
)
7705 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7708 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7710 if (e
->flags
& EDGE_ABNORMAL
)
7712 if (e
->flags
& EDGE_FALLTHRU
)
7713 e
->flags
&= ~EDGE_ABNORMAL
;
7715 remove_edge_and_dominated_blocks (e
);
7725 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7728 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7730 bool changed
= false;
7734 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7736 basic_block bb
= BASIC_BLOCK (i
);
7738 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7739 this basic block already. */
7740 gcc_assert (bb
|| changed
);
7742 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7748 /* This function is called whenever a new edge is created or
7752 gimple_execute_on_growing_pred (edge e
)
7754 basic_block bb
= e
->dest
;
7756 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7757 reserve_phi_args_for_new_edge (bb
);
7760 /* This function is called immediately before edge E is removed from
7761 the edge vector E->dest->preds. */
7764 gimple_execute_on_shrinking_pred (edge e
)
7766 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7767 remove_phi_args (e
);
7770 /*---------------------------------------------------------------------------
7771 Helper functions for Loop versioning
7772 ---------------------------------------------------------------------------*/
7774 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7775 of 'first'. Both of them are dominated by 'new_head' basic block. When
7776 'new_head' was created by 'second's incoming edge it received phi arguments
7777 on the edge by split_edge(). Later, additional edge 'e' was created to
7778 connect 'new_head' and 'first'. Now this routine adds phi args on this
7779 additional edge 'e' that new_head to second edge received as part of edge
7783 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
7784 basic_block new_head
, edge e
)
7787 gimple_stmt_iterator psi1
, psi2
;
7789 edge e2
= find_edge (new_head
, second
);
7791 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7792 edge, we should always have an edge from NEW_HEAD to SECOND. */
7793 gcc_assert (e2
!= NULL
);
7795 /* Browse all 'second' basic block phi nodes and add phi args to
7796 edge 'e' for 'first' head. PHI args are always in correct order. */
7798 for (psi2
= gsi_start_phis (second
),
7799 psi1
= gsi_start_phis (first
);
7800 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
7801 gsi_next (&psi2
), gsi_next (&psi1
))
7803 phi1
= gsi_stmt (psi1
);
7804 phi2
= gsi_stmt (psi2
);
7805 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
7806 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
7811 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7812 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7813 the destination of the ELSE part. */
7816 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
7817 basic_block second_head ATTRIBUTE_UNUSED
,
7818 basic_block cond_bb
, void *cond_e
)
7820 gimple_stmt_iterator gsi
;
7821 gimple new_cond_expr
;
7822 tree cond_expr
= (tree
) cond_e
;
7825 /* Build new conditional expr */
7826 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
7827 NULL_TREE
, NULL_TREE
);
7829 /* Add new cond in cond_bb. */
7830 gsi
= gsi_last_bb (cond_bb
);
7831 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
7833 /* Adjust edges appropriately to connect new head with first head
7834 as well as second head. */
7835 e0
= single_succ_edge (cond_bb
);
7836 e0
->flags
&= ~EDGE_FALLTHRU
;
7837 e0
->flags
|= EDGE_FALSE_VALUE
;
7841 /* Do book-keeping of basic block BB for the profile consistency checker.
7842 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7843 then do post-pass accounting. Store the counting in RECORD. */
7845 gimple_account_profile_record (basic_block bb
, int after_pass
,
7846 struct profile_record
*record
)
7848 gimple_stmt_iterator i
;
7849 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
7851 record
->size
[after_pass
]
7852 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
7853 if (profile_status
== PROFILE_READ
)
7854 record
->time
[after_pass
]
7855 += estimate_num_insns (gsi_stmt (i
),
7856 &eni_time_weights
) * bb
->count
;
7857 else if (profile_status
== PROFILE_GUESSED
)
7858 record
->time
[after_pass
]
7859 += estimate_num_insns (gsi_stmt (i
),
7860 &eni_time_weights
) * bb
->frequency
;
7864 struct cfg_hooks gimple_cfg_hooks
= {
7866 gimple_verify_flow_info
,
7867 gimple_dump_bb
, /* dump_bb */
7868 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
7869 create_bb
, /* create_basic_block */
7870 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
7871 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
7872 gimple_can_remove_branch_p
, /* can_remove_branch_p */
7873 remove_bb
, /* delete_basic_block */
7874 gimple_split_block
, /* split_block */
7875 gimple_move_block_after
, /* move_block_after */
7876 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
7877 gimple_merge_blocks
, /* merge_blocks */
7878 gimple_predict_edge
, /* predict_edge */
7879 gimple_predicted_by_p
, /* predicted_by_p */
7880 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
7881 gimple_duplicate_bb
, /* duplicate_block */
7882 gimple_split_edge
, /* split_edge */
7883 gimple_make_forwarder_block
, /* make_forward_block */
7884 NULL
, /* tidy_fallthru_edge */
7885 NULL
, /* force_nonfallthru */
7886 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
7887 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
7888 gimple_flow_call_edges_add
, /* flow_call_edges_add */
7889 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
7890 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
7891 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
7892 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
7893 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
7894 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
7895 flush_pending_stmts
, /* flush_pending_stmts */
7896 gimple_empty_block_p
, /* block_empty_p */
7897 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
7898 gimple_account_profile_record
,
7902 /* Split all critical edges. */
7905 split_critical_edges (void)
7911 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7912 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7913 mappings around the calls to split_edge. */
7914 start_recording_case_labels ();
7917 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7919 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
7921 /* PRE inserts statements to edges and expects that
7922 since split_critical_edges was done beforehand, committing edge
7923 insertions will not split more edges. In addition to critical
7924 edges we must split edges that have multiple successors and
7925 end by control flow statements, such as RESX.
7926 Go ahead and split them too. This matches the logic in
7927 gimple_find_edge_insert_loc. */
7928 else if ((!single_pred_p (e
->dest
)
7929 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
7930 || e
->dest
== EXIT_BLOCK_PTR
)
7931 && e
->src
!= ENTRY_BLOCK_PTR
7932 && !(e
->flags
& EDGE_ABNORMAL
))
7934 gimple_stmt_iterator gsi
;
7936 gsi
= gsi_last_bb (e
->src
);
7937 if (!gsi_end_p (gsi
)
7938 && stmt_ends_bb_p (gsi_stmt (gsi
))
7939 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
7940 && !gimple_call_builtin_p (gsi_stmt (gsi
),
7946 end_recording_case_labels ();
7952 const pass_data pass_data_split_crit_edges
=
7954 GIMPLE_PASS
, /* type */
7955 "crited", /* name */
7956 OPTGROUP_NONE
, /* optinfo_flags */
7957 false, /* has_gate */
7958 true, /* has_execute */
7959 TV_TREE_SPLIT_EDGES
, /* tv_id */
7960 PROP_cfg
, /* properties_required */
7961 PROP_no_crit_edges
, /* properties_provided */
7962 0, /* properties_destroyed */
7963 0, /* todo_flags_start */
7964 TODO_verify_flow
, /* todo_flags_finish */
7967 class pass_split_crit_edges
: public gimple_opt_pass
7970 pass_split_crit_edges (gcc::context
*ctxt
)
7971 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
7974 /* opt_pass methods: */
7975 unsigned int execute () { return split_critical_edges (); }
7977 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
7978 }; // class pass_split_crit_edges
7983 make_pass_split_crit_edges (gcc::context
*ctxt
)
7985 return new pass_split_crit_edges (ctxt
);
7989 /* Build a ternary operation and gimplify it. Emit code before GSI.
7990 Return the gimple_val holding the result. */
7993 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
7994 tree type
, tree a
, tree b
, tree c
)
7997 location_t loc
= gimple_location (gsi_stmt (*gsi
));
7999 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8002 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8006 /* Build a binary operation and gimplify it. Emit code before GSI.
8007 Return the gimple_val holding the result. */
8010 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8011 tree type
, tree a
, tree b
)
8015 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8018 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8022 /* Build a unary operation and gimplify it. Emit code before GSI.
8023 Return the gimple_val holding the result. */
8026 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8031 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8034 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8040 /* Emit return warnings. */
8043 execute_warn_function_return (void)
8045 source_location location
;
8050 if (!targetm
.warn_func_return (cfun
->decl
))
8053 /* If we have a path to EXIT, then we do return. */
8054 if (TREE_THIS_VOLATILE (cfun
->decl
)
8055 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0)
8057 location
= UNKNOWN_LOCATION
;
8058 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
8060 last
= last_stmt (e
->src
);
8061 if ((gimple_code (last
) == GIMPLE_RETURN
8062 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8063 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8066 if (location
== UNKNOWN_LOCATION
)
8067 location
= cfun
->function_end_locus
;
8068 warning_at (location
, 0, "%<noreturn%> function does return");
8071 /* If we see "return;" in some basic block, then we do reach the end
8072 without returning a value. */
8073 else if (warn_return_type
8074 && !TREE_NO_WARNING (cfun
->decl
)
8075 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0
8076 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
8078 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
8080 gimple last
= last_stmt (e
->src
);
8081 if (gimple_code (last
) == GIMPLE_RETURN
8082 && gimple_return_retval (last
) == NULL
8083 && !gimple_no_warning_p (last
))
8085 location
= gimple_location (last
);
8086 if (location
== UNKNOWN_LOCATION
)
8087 location
= cfun
->function_end_locus
;
8088 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8089 TREE_NO_WARNING (cfun
->decl
) = 1;
8098 /* Given a basic block B which ends with a conditional and has
8099 precisely two successors, determine which of the edges is taken if
8100 the conditional is true and which is taken if the conditional is
8101 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8104 extract_true_false_edges_from_block (basic_block b
,
8108 edge e
= EDGE_SUCC (b
, 0);
8110 if (e
->flags
& EDGE_TRUE_VALUE
)
8113 *false_edge
= EDGE_SUCC (b
, 1);
8118 *true_edge
= EDGE_SUCC (b
, 1);
8124 const pass_data pass_data_warn_function_return
=
8126 GIMPLE_PASS
, /* type */
8127 "*warn_function_return", /* name */
8128 OPTGROUP_NONE
, /* optinfo_flags */
8129 false, /* has_gate */
8130 true, /* has_execute */
8131 TV_NONE
, /* tv_id */
8132 PROP_cfg
, /* properties_required */
8133 0, /* properties_provided */
8134 0, /* properties_destroyed */
8135 0, /* todo_flags_start */
8136 0, /* todo_flags_finish */
8139 class pass_warn_function_return
: public gimple_opt_pass
8142 pass_warn_function_return (gcc::context
*ctxt
)
8143 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8146 /* opt_pass methods: */
8147 unsigned int execute () { return execute_warn_function_return (); }
8149 }; // class pass_warn_function_return
8154 make_pass_warn_function_return (gcc::context
*ctxt
)
8156 return new pass_warn_function_return (ctxt
);
8159 /* Walk a gimplified function and warn for functions whose return value is
8160 ignored and attribute((warn_unused_result)) is set. This is done before
8161 inlining, so we don't have to worry about that. */
8164 do_warn_unused_result (gimple_seq seq
)
8167 gimple_stmt_iterator i
;
8169 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8171 gimple g
= gsi_stmt (i
);
8173 switch (gimple_code (g
))
8176 do_warn_unused_result (gimple_bind_body (g
));
8179 do_warn_unused_result (gimple_try_eval (g
));
8180 do_warn_unused_result (gimple_try_cleanup (g
));
8183 do_warn_unused_result (gimple_catch_handler (g
));
8185 case GIMPLE_EH_FILTER
:
8186 do_warn_unused_result (gimple_eh_filter_failure (g
));
8190 if (gimple_call_lhs (g
))
8192 if (gimple_call_internal_p (g
))
8195 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8196 LHS. All calls whose value is ignored should be
8197 represented like this. Look for the attribute. */
8198 fdecl
= gimple_call_fndecl (g
);
8199 ftype
= gimple_call_fntype (g
);
8201 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8203 location_t loc
= gimple_location (g
);
8206 warning_at (loc
, OPT_Wunused_result
,
8207 "ignoring return value of %qD, "
8208 "declared with attribute warn_unused_result",
8211 warning_at (loc
, OPT_Wunused_result
,
8212 "ignoring return value of function "
8213 "declared with attribute warn_unused_result");
8218 /* Not a container, not a call, or a call whose value is used. */
8225 run_warn_unused_result (void)
8227 do_warn_unused_result (gimple_body (current_function_decl
));
8232 gate_warn_unused_result (void)
8234 return flag_warn_unused_result
;
8239 const pass_data pass_data_warn_unused_result
=
8241 GIMPLE_PASS
, /* type */
8242 "*warn_unused_result", /* name */
8243 OPTGROUP_NONE
, /* optinfo_flags */
8244 true, /* has_gate */
8245 true, /* has_execute */
8246 TV_NONE
, /* tv_id */
8247 PROP_gimple_any
, /* properties_required */
8248 0, /* properties_provided */
8249 0, /* properties_destroyed */
8250 0, /* todo_flags_start */
8251 0, /* todo_flags_finish */
8254 class pass_warn_unused_result
: public gimple_opt_pass
8257 pass_warn_unused_result (gcc::context
*ctxt
)
8258 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8261 /* opt_pass methods: */
8262 bool gate () { return gate_warn_unused_result (); }
8263 unsigned int execute () { return run_warn_unused_result (); }
8265 }; // class pass_warn_unused_result
8270 make_pass_warn_unused_result (gcc::context
*ctxt
)
8272 return new pass_warn_unused_result (ctxt
);
8275 /* IPA passes, compilation of earlier functions or inlining
8276 might have changed some properties, such as marked functions nothrow,
8277 pure, const or noreturn.
8278 Remove redundant edges and basic blocks, and create new ones if necessary.
8280 This pass can't be executed as stand alone pass from pass manager, because
8281 in between inlining and this fixup the verify_flow_info would fail. */
8284 execute_fixup_cfg (void)
8287 gimple_stmt_iterator gsi
;
8288 int todo
= gimple_in_ssa_p (cfun
) ? TODO_verify_ssa
: 0;
8289 gcov_type count_scale
;
8294 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl
)->count
,
8295 ENTRY_BLOCK_PTR
->count
);
8297 ENTRY_BLOCK_PTR
->count
= cgraph_get_node (current_function_decl
)->count
;
8298 EXIT_BLOCK_PTR
->count
= apply_scale (EXIT_BLOCK_PTR
->count
,
8301 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
8302 e
->count
= apply_scale (e
->count
, count_scale
);
8306 bb
->count
= apply_scale (bb
->count
, count_scale
);
8307 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8309 gimple stmt
= gsi_stmt (gsi
);
8310 tree decl
= is_gimple_call (stmt
)
8311 ? gimple_call_fndecl (stmt
)
8315 int flags
= gimple_call_flags (stmt
);
8316 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8318 if (gimple_purge_dead_abnormal_call_edges (bb
))
8319 todo
|= TODO_cleanup_cfg
;
8321 if (gimple_in_ssa_p (cfun
))
8323 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8328 if (flags
& ECF_NORETURN
8329 && fixup_noreturn_call (stmt
))
8330 todo
|= TODO_cleanup_cfg
;
8333 if (maybe_clean_eh_stmt (stmt
)
8334 && gimple_purge_dead_eh_edges (bb
))
8335 todo
|= TODO_cleanup_cfg
;
8338 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8339 e
->count
= apply_scale (e
->count
, count_scale
);
8341 /* If we have a basic block with no successors that does not
8342 end with a control statement or a noreturn call end it with
8343 a call to __builtin_unreachable. This situation can occur
8344 when inlining a noreturn call that does in fact return. */
8345 if (EDGE_COUNT (bb
->succs
) == 0)
8347 gimple stmt
= last_stmt (bb
);
8349 || (!is_ctrl_stmt (stmt
)
8350 && (!is_gimple_call (stmt
)
8351 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8353 stmt
= gimple_build_call
8354 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8355 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8356 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8360 if (count_scale
!= REG_BR_PROB_BASE
)
8361 compute_function_frequency ();
8363 /* We just processed all calls. */
8364 if (cfun
->gimple_df
)
8365 vec_free (MODIFIED_NORETURN_CALLS (cfun
));
8367 /* Dump a textual representation of the flowgraph. */
8369 gimple_dump_cfg (dump_file
, dump_flags
);
8372 && (todo
& TODO_cleanup_cfg
))
8373 loops_state_set (LOOPS_NEED_FIXUP
);
8380 const pass_data pass_data_fixup_cfg
=
8382 GIMPLE_PASS
, /* type */
8383 "*free_cfg_annotations", /* name */
8384 OPTGROUP_NONE
, /* optinfo_flags */
8385 false, /* has_gate */
8386 true, /* has_execute */
8387 TV_NONE
, /* tv_id */
8388 PROP_cfg
, /* properties_required */
8389 0, /* properties_provided */
8390 0, /* properties_destroyed */
8391 0, /* todo_flags_start */
8392 0, /* todo_flags_finish */
8395 class pass_fixup_cfg
: public gimple_opt_pass
8398 pass_fixup_cfg (gcc::context
*ctxt
)
8399 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8402 /* opt_pass methods: */
8403 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8404 unsigned int execute () { return execute_fixup_cfg (); }
8406 }; // class pass_fixup_cfg
8411 make_pass_fixup_cfg (gcc::context
*ctxt
)
8413 return new pass_fixup_cfg (ctxt
);
8416 /* Garbage collection support for edge_def. */
8418 extern void gt_ggc_mx (tree
&);
8419 extern void gt_ggc_mx (gimple
&);
8420 extern void gt_ggc_mx (rtx
&);
8421 extern void gt_ggc_mx (basic_block
&);
8424 gt_ggc_mx (edge_def
*e
)
8426 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8428 gt_ggc_mx (e
->dest
);
8429 if (current_ir_type () == IR_GIMPLE
)
8430 gt_ggc_mx (e
->insns
.g
);
8432 gt_ggc_mx (e
->insns
.r
);
8436 /* PCH support for edge_def. */
8438 extern void gt_pch_nx (tree
&);
8439 extern void gt_pch_nx (gimple
&);
8440 extern void gt_pch_nx (rtx
&);
8441 extern void gt_pch_nx (basic_block
&);
8444 gt_pch_nx (edge_def
*e
)
8446 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8448 gt_pch_nx (e
->dest
);
8449 if (current_ir_type () == IR_GIMPLE
)
8450 gt_pch_nx (e
->insns
.g
);
8452 gt_pch_nx (e
->insns
.r
);
8457 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8459 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8460 op (&(e
->src
), cookie
);
8461 op (&(e
->dest
), cookie
);
8462 if (current_ir_type () == IR_GIMPLE
)
8463 op (&(e
->insns
.g
), cookie
);
8465 op (&(e
->insns
.r
), cookie
);
8466 op (&(block
), cookie
);