1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
68 #include "tree-ssa-live.h"
70 #include "tree-cfgcleanup.h"
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
75 /* Local declarations. */
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity
= 20;
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
93 static struct pointer_map_t
*edge_to_cases
;
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
99 static bitmap touched_switch_bbs
;
101 /* CFG statistics. */
104 long num_merged_labels
;
107 static struct cfg_stats_d cfg_stats
;
109 /* Nonzero if we found a computed goto while building basic blocks. */
110 static bool found_computed_goto
;
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
119 /* Hashtable helpers. */
121 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
123 typedef locus_discrim_map value_type
;
124 typedef locus_discrim_map compare_type
;
125 static inline hashval_t
hash (const value_type
*);
126 static inline bool equal (const value_type
*, const compare_type
*);
129 /* Trivial hash function for a location_t. ITEM is a pointer to
130 a hash table entry that maps a location_t to a discriminator. */
133 locus_discrim_hasher::hash (const value_type
*item
)
135 return LOCATION_LINE (item
->locus
);
138 /* Equality function for the locus-to-discriminator map. A and B
139 point to the two hash table entries to compare. */
142 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
144 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
147 static hash_table
<locus_discrim_hasher
> discriminator_per_locus
;
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq
);
151 static void factor_computed_gotos (void);
154 static void make_edges (void);
155 static void assign_discriminators (void);
156 static void make_cond_expr_edges (basic_block
);
157 static void make_gimple_switch_edges (basic_block
);
158 static void make_goto_expr_edges (basic_block
);
159 static void make_gimple_asm_edges (basic_block
);
160 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
161 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
162 static unsigned int split_critical_edges (void);
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple
, gimple
);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge
);
168 static gimple
first_non_label_stmt (basic_block
);
169 static bool verify_gimple_transaction (gimple
);
171 /* Flowgraph optimization and cleanup. */
172 static void gimple_merge_blocks (basic_block
, basic_block
);
173 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
174 static void remove_bb (basic_block
);
175 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
176 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
177 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
178 static tree
find_case_label_for_value (gimple
, tree
);
181 init_empty_tree_cfg_for_function (struct function
*fn
)
183 /* Initialize the basic block array. */
185 profile_status_for_function (fn
) = PROFILE_ABSENT
;
186 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
187 last_basic_block_for_function (fn
) = NUM_FIXED_BLOCKS
;
188 vec_alloc (basic_block_info_for_function (fn
), initial_cfg_capacity
);
189 vec_safe_grow_cleared (basic_block_info_for_function (fn
),
190 initial_cfg_capacity
);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_alloc (label_to_block_map_for_function (fn
), initial_cfg_capacity
);
194 vec_safe_grow_cleared (label_to_block_map_for_function (fn
),
195 initial_cfg_capacity
);
197 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, ENTRY_BLOCK
,
198 ENTRY_BLOCK_PTR_FOR_FN (fn
));
199 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, EXIT_BLOCK
,
200 EXIT_BLOCK_PTR_FOR_FN (fn
));
202 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
203 = EXIT_BLOCK_PTR_FOR_FN (fn
);
204 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
205 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
209 init_empty_tree_cfg (void)
211 init_empty_tree_cfg_for_function (cfun
);
214 /*---------------------------------------------------------------------------
216 ---------------------------------------------------------------------------*/
218 /* Entry point to the CFG builder for trees. SEQ is the sequence of
219 statements to be added to the flowgraph. */
222 build_gimple_cfg (gimple_seq seq
)
224 /* Register specific gimple functions. */
225 gimple_register_cfg_hooks ();
227 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
229 init_empty_tree_cfg ();
231 found_computed_goto
= 0;
234 /* Computed gotos are hell to deal with, especially if there are
235 lots of them with a large number of destinations. So we factor
236 them to a common computed goto location before we build the
237 edge list. After we convert back to normal form, we will un-factor
238 the computed gotos since factoring introduces an unwanted jump. */
239 if (found_computed_goto
)
240 factor_computed_gotos ();
242 /* Make sure there is always at least one block, even if it's empty. */
243 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
244 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
246 /* Adjust the size of the array. */
247 if (basic_block_info
->length () < (size_t) n_basic_blocks_for_fn (cfun
))
248 vec_safe_grow_cleared (basic_block_info
, n_basic_blocks_for_fn (cfun
));
250 /* To speed up statement iterator walks, we first purge dead labels. */
251 cleanup_dead_labels ();
253 /* Group case nodes to reduce the number of edges.
254 We do this after cleaning up dead labels because otherwise we miss
255 a lot of obvious case merging opportunities. */
256 group_case_labels ();
258 /* Create the edges of the flowgraph. */
259 discriminator_per_locus
.create (13);
261 assign_discriminators ();
262 cleanup_dead_labels ();
263 discriminator_per_locus
.dispose ();
267 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
268 it and set loop->safelen to INT_MAX. We assume that the annotation
269 comes immediately before the condition. */
272 replace_loop_annotate ()
276 gimple_stmt_iterator gsi
;
279 FOR_EACH_LOOP (loop
, 0)
281 gsi
= gsi_last_bb (loop
->header
);
282 stmt
= gsi_stmt (gsi
);
283 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
285 gsi_prev_nondebug (&gsi
);
288 stmt
= gsi_stmt (gsi
);
289 if (gimple_code (stmt
) != GIMPLE_CALL
)
291 if (!gimple_call_internal_p (stmt
)
292 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
294 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
295 != annot_expr_ivdep_kind
)
297 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
298 gimple_call_arg (stmt
, 0));
299 gsi_replace (&gsi
, stmt
, true);
300 loop
->safelen
= INT_MAX
;
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
307 gsi
= gsi_last_bb (bb
);
308 stmt
= gsi_stmt (gsi
);
309 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
310 gsi_prev_nondebug (&gsi
);
313 stmt
= gsi_stmt (gsi
);
314 if (gimple_code (stmt
) != GIMPLE_CALL
)
316 if (!gimple_call_internal_p (stmt
)
317 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
319 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
320 != annot_expr_ivdep_kind
)
322 warning_at (gimple_location (stmt
), 0, "ignoring %<GCC ivdep%> "
324 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
325 gimple_call_arg (stmt
, 0));
326 gsi_replace (&gsi
, stmt
, true);
332 execute_build_cfg (void)
334 gimple_seq body
= gimple_body (current_function_decl
);
336 build_gimple_cfg (body
);
337 gimple_set_body (current_function_decl
, NULL
);
338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
340 fprintf (dump_file
, "Scope blocks:\n");
341 dump_scope_blocks (dump_file
, dump_flags
);
344 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
345 replace_loop_annotate ();
351 const pass_data pass_data_build_cfg
=
353 GIMPLE_PASS
, /* type */
355 OPTGROUP_NONE
, /* optinfo_flags */
356 false, /* has_gate */
357 true, /* has_execute */
358 TV_TREE_CFG
, /* tv_id */
359 PROP_gimple_leh
, /* properties_required */
360 ( PROP_cfg
| PROP_loops
), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 TODO_verify_stmts
, /* todo_flags_finish */
366 class pass_build_cfg
: public gimple_opt_pass
369 pass_build_cfg (gcc::context
*ctxt
)
370 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
373 /* opt_pass methods: */
374 unsigned int execute () { return execute_build_cfg (); }
376 }; // class pass_build_cfg
381 make_pass_build_cfg (gcc::context
*ctxt
)
383 return new pass_build_cfg (ctxt
);
387 /* Return true if T is a computed goto. */
390 computed_goto_p (gimple t
)
392 return (gimple_code (t
) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
406 __builtin_unreachable ();
410 assert_unreachable_fallthru_edge_p (edge e
)
412 basic_block pred_bb
= e
->src
;
413 gimple last
= last_stmt (pred_bb
);
414 if (last
&& gimple_code (last
) == GIMPLE_COND
)
416 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
417 if (other_bb
== e
->dest
)
418 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
419 if (EDGE_COUNT (other_bb
->succs
) == 0)
421 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
426 stmt
= gsi_stmt (gsi
);
427 if (is_gimple_debug (stmt
))
429 gsi_next_nondebug (&gsi
);
432 stmt
= gsi_stmt (gsi
);
434 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
441 /* Search the CFG for any computed gotos. If found, factor them to a
442 common computed goto site. Also record the location of that site so
443 that we can un-factor the gotos after we have converted back to
447 factor_computed_gotos (void)
450 tree factored_label_decl
= NULL
;
452 gimple factored_computed_goto_label
= NULL
;
453 gimple factored_computed_goto
= NULL
;
455 /* We know there are one or more computed gotos in this function.
456 Examine the last statement in each basic block to see if the block
457 ends with a computed goto. */
461 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
467 last
= gsi_stmt (gsi
);
469 /* Ignore the computed goto we create when we factor the original
471 if (last
== factored_computed_goto
)
474 /* If the last statement is a computed goto, factor it. */
475 if (computed_goto_p (last
))
479 /* The first time we find a computed goto we need to create
480 the factored goto block and the variable each original
481 computed goto will use for their goto destination. */
482 if (!factored_computed_goto
)
484 basic_block new_bb
= create_empty_bb (bb
);
485 gimple_stmt_iterator new_gsi
= gsi_start_bb (new_bb
);
487 /* Create the destination of the factored goto. Each original
488 computed goto will put its desired destination into this
489 variable and jump to the label we create immediately
491 var
= create_tmp_var (ptr_type_node
, "gotovar");
493 /* Build a label for the new block which will contain the
494 factored computed goto. */
495 factored_label_decl
= create_artificial_label (UNKNOWN_LOCATION
);
496 factored_computed_goto_label
497 = gimple_build_label (factored_label_decl
);
498 gsi_insert_after (&new_gsi
, factored_computed_goto_label
,
501 /* Build our new computed goto. */
502 factored_computed_goto
= gimple_build_goto (var
);
503 gsi_insert_after (&new_gsi
, factored_computed_goto
, GSI_NEW_STMT
);
506 /* Copy the original computed goto's destination into VAR. */
507 assignment
= gimple_build_assign (var
, gimple_goto_dest (last
));
508 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
510 /* And re-vector the computed goto to the new destination. */
511 gimple_goto_set_dest (last
, factored_label_decl
);
517 /* Build a flowgraph for the sequence of stmts SEQ. */
520 make_blocks (gimple_seq seq
)
522 gimple_stmt_iterator i
= gsi_start (seq
);
524 bool start_new_block
= true;
525 bool first_stmt_of_seq
= true;
526 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
528 while (!gsi_end_p (i
))
535 /* If the statement starts a new basic block or if we have determined
536 in a previous pass that we need to create a new block for STMT, do
538 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
540 if (!first_stmt_of_seq
)
541 gsi_split_seq_before (&i
, &seq
);
542 bb
= create_basic_block (seq
, NULL
, bb
);
543 start_new_block
= false;
546 /* Now add STMT to BB and create the subgraphs for special statement
548 gimple_set_bb (stmt
, bb
);
550 if (computed_goto_p (stmt
))
551 found_computed_goto
= true;
553 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
555 if (stmt_ends_bb_p (stmt
))
557 /* If the stmt can make abnormal goto use a new temporary
558 for the assignment to the LHS. This makes sure the old value
559 of the LHS is available on the abnormal edge. Otherwise
560 we will end up with overlapping life-ranges for abnormal
562 if (gimple_has_lhs (stmt
)
563 && stmt_can_make_abnormal_goto (stmt
)
564 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
566 tree lhs
= gimple_get_lhs (stmt
);
567 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
568 gimple s
= gimple_build_assign (lhs
, tmp
);
569 gimple_set_location (s
, gimple_location (stmt
));
570 gimple_set_block (s
, gimple_block (stmt
));
571 gimple_set_lhs (stmt
, tmp
);
572 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
573 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
574 DECL_GIMPLE_REG_P (tmp
) = 1;
575 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
577 start_new_block
= true;
581 first_stmt_of_seq
= false;
586 /* Create and return a new empty basic block after bb AFTER. */
589 create_bb (void *h
, void *e
, basic_block after
)
595 /* Create and initialize a new basic block. Since alloc_block uses
596 GC allocation that clears memory to allocate a basic block, we do
597 not have to clear the newly allocated basic block here. */
600 bb
->index
= last_basic_block
;
602 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
604 /* Add the new block to the linked list of blocks. */
605 link_block (bb
, after
);
607 /* Grow the basic block array if needed. */
608 if ((size_t) last_basic_block
== basic_block_info
->length ())
610 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
611 vec_safe_grow_cleared (basic_block_info
, new_size
);
614 /* Add the newly created block to the array. */
615 SET_BASIC_BLOCK (last_basic_block
, bb
);
617 n_basic_blocks_for_fn (cfun
)++;
624 /*---------------------------------------------------------------------------
626 ---------------------------------------------------------------------------*/
628 /* Fold COND_EXPR_COND of each COND_EXPR. */
631 fold_cond_expr_cond (void)
637 gimple stmt
= last_stmt (bb
);
639 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
641 location_t loc
= gimple_location (stmt
);
645 fold_defer_overflow_warnings ();
646 cond
= fold_binary_loc (loc
, gimple_cond_code (stmt
), boolean_type_node
,
647 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
650 zerop
= integer_zerop (cond
);
651 onep
= integer_onep (cond
);
654 zerop
= onep
= false;
656 fold_undefer_overflow_warnings (zerop
|| onep
,
658 WARN_STRICT_OVERFLOW_CONDITIONAL
);
660 gimple_cond_make_false (stmt
);
662 gimple_cond_make_true (stmt
);
667 /* Join all the blocks in the flowgraph. */
673 struct omp_region
*cur_region
= NULL
;
675 /* Create an edge from entry to the first block with executable
677 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), BASIC_BLOCK (NUM_FIXED_BLOCKS
),
680 /* Traverse the basic block array placing edges. */
683 gimple last
= last_stmt (bb
);
688 enum gimple_code code
= gimple_code (last
);
692 make_goto_expr_edges (bb
);
696 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
700 make_cond_expr_edges (bb
);
704 make_gimple_switch_edges (bb
);
708 make_eh_edges (last
);
711 case GIMPLE_EH_DISPATCH
:
712 fallthru
= make_eh_dispatch_edges (last
);
716 /* If this function receives a nonlocal goto, then we need to
717 make edges from this call site to all the nonlocal goto
719 if (stmt_can_make_abnormal_goto (last
))
720 make_abnormal_goto_edges (bb
, true);
722 /* If this statement has reachable exception handlers, then
723 create abnormal edges to them. */
724 make_eh_edges (last
);
726 /* BUILTIN_RETURN is really a return statement. */
727 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
728 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0), fallthru
=
730 /* Some calls are known not to return. */
732 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
736 /* A GIMPLE_ASSIGN may throw internally and thus be considered
738 if (is_ctrl_altering_stmt (last
))
739 make_eh_edges (last
);
744 make_gimple_asm_edges (bb
);
749 fallthru
= make_gimple_omp_edges (bb
, &cur_region
);
752 case GIMPLE_TRANSACTION
:
754 tree abort_label
= gimple_transaction_label (last
);
756 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
762 gcc_assert (!stmt_ends_bb_p (last
));
770 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
775 /* Fold COND_EXPR_COND of each COND_EXPR. */
776 fold_cond_expr_cond ();
779 /* Find the next available discriminator value for LOCUS. The
780 discriminator distinguishes among several basic blocks that
781 share a common locus, allowing for more accurate sample-based
785 next_discriminator_for_locus (location_t locus
)
787 struct locus_discrim_map item
;
788 struct locus_discrim_map
**slot
;
791 item
.discriminator
= 0;
792 slot
= discriminator_per_locus
.find_slot_with_hash (
793 &item
, LOCATION_LINE (locus
), INSERT
);
795 if (*slot
== HTAB_EMPTY_ENTRY
)
797 *slot
= XNEW (struct locus_discrim_map
);
799 (*slot
)->locus
= locus
;
800 (*slot
)->discriminator
= 0;
802 (*slot
)->discriminator
++;
803 return (*slot
)->discriminator
;
806 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
809 same_line_p (location_t locus1
, location_t locus2
)
811 expanded_location from
, to
;
813 if (locus1
== locus2
)
816 from
= expand_location (locus1
);
817 to
= expand_location (locus2
);
819 if (from
.line
!= to
.line
)
821 if (from
.file
== to
.file
)
823 return (from
.file
!= NULL
825 && filename_cmp (from
.file
, to
.file
) == 0);
828 /* Assign discriminators to each basic block. */
831 assign_discriminators (void)
839 gimple last
= last_stmt (bb
);
840 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
842 if (locus
== UNKNOWN_LOCATION
)
845 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
847 gimple first
= first_non_label_stmt (e
->dest
);
848 gimple last
= last_stmt (e
->dest
);
849 if ((first
&& same_line_p (locus
, gimple_location (first
)))
850 || (last
&& same_line_p (locus
, gimple_location (last
))))
852 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
853 bb
->discriminator
= next_discriminator_for_locus (locus
);
855 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
861 /* Create the edges for a GIMPLE_COND starting at block BB. */
864 make_cond_expr_edges (basic_block bb
)
866 gimple entry
= last_stmt (bb
);
867 gimple then_stmt
, else_stmt
;
868 basic_block then_bb
, else_bb
;
869 tree then_label
, else_label
;
873 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
875 /* Entry basic blocks for each component. */
876 then_label
= gimple_cond_true_label (entry
);
877 else_label
= gimple_cond_false_label (entry
);
878 then_bb
= label_to_block (then_label
);
879 else_bb
= label_to_block (else_label
);
880 then_stmt
= first_stmt (then_bb
);
881 else_stmt
= first_stmt (else_bb
);
883 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
884 e
->goto_locus
= gimple_location (then_stmt
);
885 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
887 e
->goto_locus
= gimple_location (else_stmt
);
889 /* We do not need the labels anymore. */
890 gimple_cond_set_true_label (entry
, NULL_TREE
);
891 gimple_cond_set_false_label (entry
, NULL_TREE
);
895 /* Called for each element in the hash table (P) as we delete the
896 edge to cases hash table.
898 Clear all the TREE_CHAINs to prevent problems with copying of
899 SWITCH_EXPRs and structure sharing rules, then free the hash table
903 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
904 void *data ATTRIBUTE_UNUSED
)
908 for (t
= (tree
) *value
; t
; t
= next
)
910 next
= CASE_CHAIN (t
);
911 CASE_CHAIN (t
) = NULL
;
918 /* Start recording information mapping edges to case labels. */
921 start_recording_case_labels (void)
923 gcc_assert (edge_to_cases
== NULL
);
924 edge_to_cases
= pointer_map_create ();
925 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
928 /* Return nonzero if we are recording information for case labels. */
931 recording_case_labels_p (void)
933 return (edge_to_cases
!= NULL
);
936 /* Stop recording information mapping edges to case labels and
937 remove any information we have recorded. */
939 end_recording_case_labels (void)
943 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
944 pointer_map_destroy (edge_to_cases
);
945 edge_to_cases
= NULL
;
946 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
948 basic_block bb
= BASIC_BLOCK (i
);
951 gimple stmt
= last_stmt (bb
);
952 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
953 group_case_labels_stmt (stmt
);
956 BITMAP_FREE (touched_switch_bbs
);
959 /* If we are inside a {start,end}_recording_cases block, then return
960 a chain of CASE_LABEL_EXPRs from T which reference E.
962 Otherwise return NULL. */
965 get_cases_for_edge (edge e
, gimple t
)
970 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
971 chains available. Return NULL so the caller can detect this case. */
972 if (!recording_case_labels_p ())
975 slot
= pointer_map_contains (edge_to_cases
, e
);
979 /* If we did not find E in the hash table, then this must be the first
980 time we have been queried for information about E & T. Add all the
981 elements from T to the hash table then perform the query again. */
983 n
= gimple_switch_num_labels (t
);
984 for (i
= 0; i
< n
; i
++)
986 tree elt
= gimple_switch_label (t
, i
);
987 tree lab
= CASE_LABEL (elt
);
988 basic_block label_bb
= label_to_block (lab
);
989 edge this_edge
= find_edge (e
->src
, label_bb
);
991 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
993 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
994 CASE_CHAIN (elt
) = (tree
) *slot
;
998 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
1001 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1004 make_gimple_switch_edges (basic_block bb
)
1006 gimple entry
= last_stmt (bb
);
1009 n
= gimple_switch_num_labels (entry
);
1011 for (i
= 0; i
< n
; ++i
)
1013 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1014 basic_block label_bb
= label_to_block (lab
);
1015 make_edge (bb
, label_bb
, 0);
1020 /* Return the basic block holding label DEST. */
1023 label_to_block_fn (struct function
*ifun
, tree dest
)
1025 int uid
= LABEL_DECL_UID (dest
);
1027 /* We would die hard when faced by an undefined label. Emit a label to
1028 the very first basic block. This will hopefully make even the dataflow
1029 and undefined variable warnings quite right. */
1030 if (seen_error () && uid
< 0)
1032 gimple_stmt_iterator gsi
= gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS
));
1035 stmt
= gimple_build_label (dest
);
1036 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1037 uid
= LABEL_DECL_UID (dest
);
1039 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1041 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1044 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1045 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1048 make_abnormal_goto_edges (basic_block bb
, bool for_call
)
1050 basic_block target_bb
;
1051 gimple_stmt_iterator gsi
;
1053 FOR_EACH_BB (target_bb
)
1055 for (gsi
= gsi_start_bb (target_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1057 gimple label_stmt
= gsi_stmt (gsi
);
1060 if (gimple_code (label_stmt
) != GIMPLE_LABEL
)
1063 target
= gimple_label_label (label_stmt
);
1065 /* Make an edge to every label block that has been marked as a
1066 potential target for a computed goto or a non-local goto. */
1067 if ((FORCED_LABEL (target
) && !for_call
)
1068 || (DECL_NONLOCAL (target
) && for_call
))
1070 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1074 if (!gsi_end_p (gsi
)
1075 && is_gimple_debug (gsi_stmt (gsi
)))
1076 gsi_next_nondebug (&gsi
);
1077 if (!gsi_end_p (gsi
))
1079 /* Make an edge to every setjmp-like call. */
1080 gimple call_stmt
= gsi_stmt (gsi
);
1081 if (is_gimple_call (call_stmt
)
1082 && (gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
))
1083 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1088 /* Create edges for a goto statement at block BB. */
1091 make_goto_expr_edges (basic_block bb
)
1093 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1094 gimple goto_t
= gsi_stmt (last
);
1096 /* A simple GOTO creates normal edges. */
1097 if (simple_goto_p (goto_t
))
1099 tree dest
= gimple_goto_dest (goto_t
);
1100 basic_block label_bb
= label_to_block (dest
);
1101 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1102 e
->goto_locus
= gimple_location (goto_t
);
1103 gsi_remove (&last
, true);
1107 /* A computed GOTO creates abnormal edges. */
1108 make_abnormal_goto_edges (bb
, false);
1111 /* Create edges for an asm statement with labels at block BB. */
1114 make_gimple_asm_edges (basic_block bb
)
1116 gimple stmt
= last_stmt (bb
);
1117 int i
, n
= gimple_asm_nlabels (stmt
);
1119 for (i
= 0; i
< n
; ++i
)
1121 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1122 basic_block label_bb
= label_to_block (label
);
1123 make_edge (bb
, label_bb
, 0);
1127 /*---------------------------------------------------------------------------
1129 ---------------------------------------------------------------------------*/
1131 /* Cleanup useless labels in basic blocks. This is something we wish
1132 to do early because it allows us to group case labels before creating
1133 the edges for the CFG, and it speeds up block statement iterators in
1134 all passes later on.
1135 We rerun this pass after CFG is created, to get rid of the labels that
1136 are no longer referenced. After then we do not run it any more, since
1137 (almost) no new labels should be created. */
1139 /* A map from basic block index to the leading label of that block. */
1140 static struct label_record
1145 /* True if the label is referenced from somewhere. */
1149 /* Given LABEL return the first label in the same basic block. */
1152 main_block_label (tree label
)
1154 basic_block bb
= label_to_block (label
);
1155 tree main_label
= label_for_bb
[bb
->index
].label
;
1157 /* label_to_block possibly inserted undefined label into the chain. */
1160 label_for_bb
[bb
->index
].label
= label
;
1164 label_for_bb
[bb
->index
].used
= true;
1168 /* Clean up redundant labels within the exception tree. */
1171 cleanup_dead_labels_eh (void)
1178 if (cfun
->eh
== NULL
)
1181 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1182 if (lp
&& lp
->post_landing_pad
)
1184 lab
= main_block_label (lp
->post_landing_pad
);
1185 if (lab
!= lp
->post_landing_pad
)
1187 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1188 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1192 FOR_ALL_EH_REGION (r
)
1196 case ERT_MUST_NOT_THROW
:
1202 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1206 c
->label
= main_block_label (lab
);
1211 case ERT_ALLOWED_EXCEPTIONS
:
1212 lab
= r
->u
.allowed
.label
;
1214 r
->u
.allowed
.label
= main_block_label (lab
);
1220 /* Cleanup redundant labels. This is a three-step process:
1221 1) Find the leading label for each block.
1222 2) Redirect all references to labels to the leading labels.
1223 3) Cleanup all useless labels. */
1226 cleanup_dead_labels (void)
1229 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block
);
1231 /* Find a suitable label for each block. We use the first user-defined
1232 label if there is one, or otherwise just the first label we see. */
1235 gimple_stmt_iterator i
;
1237 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1240 gimple stmt
= gsi_stmt (i
);
1242 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1245 label
= gimple_label_label (stmt
);
1247 /* If we have not yet seen a label for the current block,
1248 remember this one and see if there are more labels. */
1249 if (!label_for_bb
[bb
->index
].label
)
1251 label_for_bb
[bb
->index
].label
= label
;
1255 /* If we did see a label for the current block already, but it
1256 is an artificially created label, replace it if the current
1257 label is a user defined label. */
1258 if (!DECL_ARTIFICIAL (label
)
1259 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1261 label_for_bb
[bb
->index
].label
= label
;
1267 /* Now redirect all jumps/branches to the selected label.
1268 First do so for each block ending in a control statement. */
1271 gimple stmt
= last_stmt (bb
);
1272 tree label
, new_label
;
1277 switch (gimple_code (stmt
))
1280 label
= gimple_cond_true_label (stmt
);
1283 new_label
= main_block_label (label
);
1284 if (new_label
!= label
)
1285 gimple_cond_set_true_label (stmt
, new_label
);
1288 label
= gimple_cond_false_label (stmt
);
1291 new_label
= main_block_label (label
);
1292 if (new_label
!= label
)
1293 gimple_cond_set_false_label (stmt
, new_label
);
1299 size_t i
, n
= gimple_switch_num_labels (stmt
);
1301 /* Replace all destination labels. */
1302 for (i
= 0; i
< n
; ++i
)
1304 tree case_label
= gimple_switch_label (stmt
, i
);
1305 label
= CASE_LABEL (case_label
);
1306 new_label
= main_block_label (label
);
1307 if (new_label
!= label
)
1308 CASE_LABEL (case_label
) = new_label
;
1315 int i
, n
= gimple_asm_nlabels (stmt
);
1317 for (i
= 0; i
< n
; ++i
)
1319 tree cons
= gimple_asm_label_op (stmt
, i
);
1320 tree label
= main_block_label (TREE_VALUE (cons
));
1321 TREE_VALUE (cons
) = label
;
1326 /* We have to handle gotos until they're removed, and we don't
1327 remove them until after we've created the CFG edges. */
1329 if (!computed_goto_p (stmt
))
1331 label
= gimple_goto_dest (stmt
);
1332 new_label
= main_block_label (label
);
1333 if (new_label
!= label
)
1334 gimple_goto_set_dest (stmt
, new_label
);
1338 case GIMPLE_TRANSACTION
:
1340 tree label
= gimple_transaction_label (stmt
);
1343 tree new_label
= main_block_label (label
);
1344 if (new_label
!= label
)
1345 gimple_transaction_set_label (stmt
, new_label
);
1355 /* Do the same for the exception region tree labels. */
1356 cleanup_dead_labels_eh ();
1358 /* Finally, purge dead labels. All user-defined labels and labels that
1359 can be the target of non-local gotos and labels which have their
1360 address taken are preserved. */
1363 gimple_stmt_iterator i
;
1364 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1366 if (!label_for_this_bb
)
1369 /* If the main label of the block is unused, we may still remove it. */
1370 if (!label_for_bb
[bb
->index
].used
)
1371 label_for_this_bb
= NULL
;
1373 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1376 gimple stmt
= gsi_stmt (i
);
1378 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1381 label
= gimple_label_label (stmt
);
1383 if (label
== label_for_this_bb
1384 || !DECL_ARTIFICIAL (label
)
1385 || DECL_NONLOCAL (label
)
1386 || FORCED_LABEL (label
))
1389 gsi_remove (&i
, true);
1393 free (label_for_bb
);
1396 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1397 the ones jumping to the same label.
1398 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1401 group_case_labels_stmt (gimple stmt
)
1403 int old_size
= gimple_switch_num_labels (stmt
);
1404 int i
, j
, new_size
= old_size
;
1405 basic_block default_bb
= NULL
;
1407 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1409 /* Look for possible opportunities to merge cases. */
1411 while (i
< old_size
)
1413 tree base_case
, base_high
;
1414 basic_block base_bb
;
1416 base_case
= gimple_switch_label (stmt
, i
);
1418 gcc_assert (base_case
);
1419 base_bb
= label_to_block (CASE_LABEL (base_case
));
1421 /* Discard cases that have the same destination as the
1423 if (base_bb
== default_bb
)
1425 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1431 base_high
= CASE_HIGH (base_case
)
1432 ? CASE_HIGH (base_case
)
1433 : CASE_LOW (base_case
);
1436 /* Try to merge case labels. Break out when we reach the end
1437 of the label vector or when we cannot merge the next case
1438 label with the current one. */
1439 while (i
< old_size
)
1441 tree merge_case
= gimple_switch_label (stmt
, i
);
1442 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1443 double_int bhp1
= tree_to_double_int (base_high
) + double_int_one
;
1445 /* Merge the cases if they jump to the same place,
1446 and their ranges are consecutive. */
1447 if (merge_bb
== base_bb
1448 && tree_to_double_int (CASE_LOW (merge_case
)) == bhp1
)
1450 base_high
= CASE_HIGH (merge_case
) ?
1451 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1452 CASE_HIGH (base_case
) = base_high
;
1453 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1462 /* Compress the case labels in the label vector, and adjust the
1463 length of the vector. */
1464 for (i
= 0, j
= 0; i
< new_size
; i
++)
1466 while (! gimple_switch_label (stmt
, j
))
1468 gimple_switch_set_label (stmt
, i
,
1469 gimple_switch_label (stmt
, j
++));
1472 gcc_assert (new_size
<= old_size
);
1473 gimple_switch_set_num_labels (stmt
, new_size
);
1476 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1477 and scan the sorted vector of cases. Combine the ones jumping to the
1481 group_case_labels (void)
1487 gimple stmt
= last_stmt (bb
);
1488 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1489 group_case_labels_stmt (stmt
);
1493 /* Checks whether we can merge block B into block A. */
1496 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1499 gimple_stmt_iterator gsi
;
1501 if (!single_succ_p (a
))
1504 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1507 if (single_succ (a
) != b
)
1510 if (!single_pred_p (b
))
1513 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1516 /* If A ends by a statement causing exceptions or something similar, we
1517 cannot merge the blocks. */
1518 stmt
= last_stmt (a
);
1519 if (stmt
&& stmt_ends_bb_p (stmt
))
1522 /* Do not allow a block with only a non-local label to be merged. */
1524 && gimple_code (stmt
) == GIMPLE_LABEL
1525 && DECL_NONLOCAL (gimple_label_label (stmt
)))
1528 /* Examine the labels at the beginning of B. */
1529 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1532 stmt
= gsi_stmt (gsi
);
1533 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1535 lab
= gimple_label_label (stmt
);
1537 /* Do not remove user forced labels or for -O0 any user labels. */
1538 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1542 /* Protect the loop latches. */
1543 if (current_loops
&& b
->loop_father
->latch
== b
)
1546 /* It must be possible to eliminate all phi nodes in B. If ssa form
1547 is not up-to-date and a name-mapping is registered, we cannot eliminate
1548 any phis. Symbols marked for renaming are never a problem though. */
1549 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1551 gimple phi
= gsi_stmt (gsi
);
1552 /* Technically only new names matter. */
1553 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1557 /* When not optimizing, don't merge if we'd lose goto_locus. */
1559 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1561 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1562 gimple_stmt_iterator prev
, next
;
1563 prev
= gsi_last_nondebug_bb (a
);
1564 next
= gsi_after_labels (b
);
1565 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1566 gsi_next_nondebug (&next
);
1567 if ((gsi_end_p (prev
)
1568 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1569 && (gsi_end_p (next
)
1570 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1577 /* Replaces all uses of NAME by VAL. */
1580 replace_uses_by (tree name
, tree val
)
1582 imm_use_iterator imm_iter
;
1587 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1589 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1591 replace_exp (use
, val
);
1593 if (gimple_code (stmt
) == GIMPLE_PHI
)
1595 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1596 if (e
->flags
& EDGE_ABNORMAL
)
1598 /* This can only occur for virtual operands, since
1599 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1600 would prevent replacement. */
1601 gcc_checking_assert (virtual_operand_p (name
));
1602 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1607 if (gimple_code (stmt
) != GIMPLE_PHI
)
1609 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1610 gimple orig_stmt
= stmt
;
1613 /* Mark the block if we changed the last stmt in it. */
1614 if (cfgcleanup_altered_bbs
1615 && stmt_ends_bb_p (stmt
))
1616 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1618 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1619 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1620 only change sth from non-invariant to invariant, and only
1621 when propagating constants. */
1622 if (is_gimple_min_invariant (val
))
1623 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1625 tree op
= gimple_op (stmt
, i
);
1626 /* Operands may be empty here. For example, the labels
1627 of a GIMPLE_COND are nulled out following the creation
1628 of the corresponding CFG edges. */
1629 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1630 recompute_tree_invariant_for_addr_expr (op
);
1633 if (fold_stmt (&gsi
))
1634 stmt
= gsi_stmt (gsi
);
1636 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1637 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1643 gcc_checking_assert (has_zero_uses (name
));
1645 /* Also update the trees stored in loop structures. */
1650 FOR_EACH_LOOP (loop
, 0)
1652 substitute_in_loop_info (loop
, name
, val
);
1657 /* Merge block B into block A. */
1660 gimple_merge_blocks (basic_block a
, basic_block b
)
1662 gimple_stmt_iterator last
, gsi
, psi
;
1665 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1667 /* Remove all single-valued PHI nodes from block B of the form
1668 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1669 gsi
= gsi_last_bb (a
);
1670 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1672 gimple phi
= gsi_stmt (psi
);
1673 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1675 bool may_replace_uses
= (virtual_operand_p (def
)
1676 || may_propagate_copy (def
, use
));
1678 /* In case we maintain loop closed ssa form, do not propagate arguments
1679 of loop exit phi nodes. */
1681 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1682 && !virtual_operand_p (def
)
1683 && TREE_CODE (use
) == SSA_NAME
1684 && a
->loop_father
!= b
->loop_father
)
1685 may_replace_uses
= false;
1687 if (!may_replace_uses
)
1689 gcc_assert (!virtual_operand_p (def
));
1691 /* Note that just emitting the copies is fine -- there is no problem
1692 with ordering of phi nodes. This is because A is the single
1693 predecessor of B, therefore results of the phi nodes cannot
1694 appear as arguments of the phi nodes. */
1695 copy
= gimple_build_assign (def
, use
);
1696 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1697 remove_phi_node (&psi
, false);
1701 /* If we deal with a PHI for virtual operands, we can simply
1702 propagate these without fussing with folding or updating
1704 if (virtual_operand_p (def
))
1706 imm_use_iterator iter
;
1707 use_operand_p use_p
;
1710 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1711 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1712 SET_USE (use_p
, use
);
1714 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1715 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1718 replace_uses_by (def
, use
);
1720 remove_phi_node (&psi
, true);
1724 /* Ensure that B follows A. */
1725 move_block_after (b
, a
);
1727 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1728 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1730 /* Remove labels from B and set gimple_bb to A for other statements. */
1731 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1733 gimple stmt
= gsi_stmt (gsi
);
1734 if (gimple_code (stmt
) == GIMPLE_LABEL
)
1736 tree label
= gimple_label_label (stmt
);
1739 gsi_remove (&gsi
, false);
1741 /* Now that we can thread computed gotos, we might have
1742 a situation where we have a forced label in block B
1743 However, the label at the start of block B might still be
1744 used in other ways (think about the runtime checking for
1745 Fortran assigned gotos). So we can not just delete the
1746 label. Instead we move the label to the start of block A. */
1747 if (FORCED_LABEL (label
))
1749 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1750 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1752 /* Other user labels keep around in a form of a debug stmt. */
1753 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1755 gimple dbg
= gimple_build_debug_bind (label
,
1758 gimple_debug_bind_reset_value (dbg
);
1759 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1762 lp_nr
= EH_LANDING_PAD_NR (label
);
1765 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1766 lp
->post_landing_pad
= NULL
;
1771 gimple_set_bb (stmt
, a
);
1776 /* Merge the sequences. */
1777 last
= gsi_last_bb (a
);
1778 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1779 set_bb_seq (b
, NULL
);
1781 if (cfgcleanup_altered_bbs
)
1782 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1786 /* Return the one of two successors of BB that is not reachable by a
1787 complex edge, if there is one. Else, return BB. We use
1788 this in optimizations that use post-dominators for their heuristics,
1789 to catch the cases in C++ where function calls are involved. */
1792 single_noncomplex_succ (basic_block bb
)
1795 if (EDGE_COUNT (bb
->succs
) != 2)
1798 e0
= EDGE_SUCC (bb
, 0);
1799 e1
= EDGE_SUCC (bb
, 1);
1800 if (e0
->flags
& EDGE_COMPLEX
)
1802 if (e1
->flags
& EDGE_COMPLEX
)
1808 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1811 notice_special_calls (gimple call
)
1813 int flags
= gimple_call_flags (call
);
1815 if (flags
& ECF_MAY_BE_ALLOCA
)
1816 cfun
->calls_alloca
= true;
1817 if (flags
& ECF_RETURNS_TWICE
)
1818 cfun
->calls_setjmp
= true;
1822 /* Clear flags set by notice_special_calls. Used by dead code removal
1823 to update the flags. */
1826 clear_special_calls (void)
1828 cfun
->calls_alloca
= false;
1829 cfun
->calls_setjmp
= false;
1832 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1835 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1837 /* Since this block is no longer reachable, we can just delete all
1838 of its PHI nodes. */
1839 remove_phi_nodes (bb
);
1841 /* Remove edges to BB's successors. */
1842 while (EDGE_COUNT (bb
->succs
) > 0)
1843 remove_edge (EDGE_SUCC (bb
, 0));
1847 /* Remove statements of basic block BB. */
1850 remove_bb (basic_block bb
)
1852 gimple_stmt_iterator i
;
1856 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1857 if (dump_flags
& TDF_DETAILS
)
1859 dump_bb (dump_file
, bb
, 0, dump_flags
);
1860 fprintf (dump_file
, "\n");
1866 struct loop
*loop
= bb
->loop_father
;
1868 /* If a loop gets removed, clean up the information associated
1870 if (loop
->latch
== bb
1871 || loop
->header
== bb
)
1872 free_numbers_of_iterations_estimates_loop (loop
);
1875 /* Remove all the instructions in the block. */
1876 if (bb_seq (bb
) != NULL
)
1878 /* Walk backwards so as to get a chance to substitute all
1879 released DEFs into debug stmts. See
1880 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1882 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
1884 gimple stmt
= gsi_stmt (i
);
1885 if (gimple_code (stmt
) == GIMPLE_LABEL
1886 && (FORCED_LABEL (gimple_label_label (stmt
))
1887 || DECL_NONLOCAL (gimple_label_label (stmt
))))
1890 gimple_stmt_iterator new_gsi
;
1892 /* A non-reachable non-local label may still be referenced.
1893 But it no longer needs to carry the extra semantics of
1895 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
1897 DECL_NONLOCAL (gimple_label_label (stmt
)) = 0;
1898 FORCED_LABEL (gimple_label_label (stmt
)) = 1;
1901 new_bb
= bb
->prev_bb
;
1902 new_gsi
= gsi_start_bb (new_bb
);
1903 gsi_remove (&i
, false);
1904 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
1908 /* Release SSA definitions if we are in SSA. Note that we
1909 may be called when not in SSA. For example,
1910 final_cleanup calls this function via
1911 cleanup_tree_cfg. */
1912 if (gimple_in_ssa_p (cfun
))
1913 release_defs (stmt
);
1915 gsi_remove (&i
, true);
1919 i
= gsi_last_bb (bb
);
1925 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
1926 bb
->il
.gimple
.seq
= NULL
;
1927 bb
->il
.gimple
.phi_nodes
= NULL
;
1931 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1932 predicate VAL, return the edge that will be taken out of the block.
1933 If VAL does not match a unique edge, NULL is returned. */
1936 find_taken_edge (basic_block bb
, tree val
)
1940 stmt
= last_stmt (bb
);
1943 gcc_assert (is_ctrl_stmt (stmt
));
1948 if (!is_gimple_min_invariant (val
))
1951 if (gimple_code (stmt
) == GIMPLE_COND
)
1952 return find_taken_edge_cond_expr (bb
, val
);
1954 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
1955 return find_taken_edge_switch_expr (bb
, val
);
1957 if (computed_goto_p (stmt
))
1959 /* Only optimize if the argument is a label, if the argument is
1960 not a label then we can not construct a proper CFG.
1962 It may be the case that we only need to allow the LABEL_REF to
1963 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1964 appear inside a LABEL_EXPR just to be safe. */
1965 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
1966 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
1967 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
1974 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1975 statement, determine which of the outgoing edges will be taken out of the
1976 block. Return NULL if either edge may be taken. */
1979 find_taken_edge_computed_goto (basic_block bb
, tree val
)
1984 dest
= label_to_block (val
);
1987 e
= find_edge (bb
, dest
);
1988 gcc_assert (e
!= NULL
);
1994 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1995 statement, determine which of the two edges will be taken out of the
1996 block. Return NULL if either edge may be taken. */
1999 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2001 edge true_edge
, false_edge
;
2003 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2005 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2006 return (integer_zerop (val
) ? false_edge
: true_edge
);
2009 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2010 statement, determine which edge will be taken out of the block. Return
2011 NULL if any edge may be taken. */
2014 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2016 basic_block dest_bb
;
2021 switch_stmt
= last_stmt (bb
);
2022 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2023 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2025 e
= find_edge (bb
, dest_bb
);
2031 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2032 We can make optimal use here of the fact that the case labels are
2033 sorted: We can do a binary search for a case matching VAL. */
2036 find_case_label_for_value (gimple switch_stmt
, tree val
)
2038 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2039 tree default_case
= gimple_switch_default_label (switch_stmt
);
2041 for (low
= 0, high
= n
; high
- low
> 1; )
2043 size_t i
= (high
+ low
) / 2;
2044 tree t
= gimple_switch_label (switch_stmt
, i
);
2047 /* Cache the result of comparing CASE_LOW and val. */
2048 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2055 if (CASE_HIGH (t
) == NULL
)
2057 /* A singe-valued case label. */
2063 /* A case range. We can only handle integer ranges. */
2064 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2069 return default_case
;
2073 /* Dump a basic block on stderr. */
2076 gimple_debug_bb (basic_block bb
)
2078 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2082 /* Dump basic block with index N on stderr. */
2085 gimple_debug_bb_n (int n
)
2087 gimple_debug_bb (BASIC_BLOCK (n
));
2088 return BASIC_BLOCK (n
);
2092 /* Dump the CFG on stderr.
2094 FLAGS are the same used by the tree dumping functions
2095 (see TDF_* in dumpfile.h). */
2098 gimple_debug_cfg (int flags
)
2100 gimple_dump_cfg (stderr
, flags
);
2104 /* Dump the program showing basic block boundaries on the given FILE.
2106 FLAGS are the same used by the tree dumping functions (see TDF_* in
2110 gimple_dump_cfg (FILE *file
, int flags
)
2112 if (flags
& TDF_DETAILS
)
2114 dump_function_header (file
, current_function_decl
, flags
);
2115 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2116 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2119 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2120 fprintf (file
, "\n");
2123 if (flags
& TDF_STATS
)
2124 dump_cfg_stats (file
);
2126 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2130 /* Dump CFG statistics on FILE. */
2133 dump_cfg_stats (FILE *file
)
2135 static long max_num_merged_labels
= 0;
2136 unsigned long size
, total
= 0;
2139 const char * const fmt_str
= "%-30s%-13s%12s\n";
2140 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2141 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2142 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2143 const char *funcname
= current_function_name ();
2145 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2147 fprintf (file
, "---------------------------------------------------------\n");
2148 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2149 fprintf (file
, fmt_str
, "", " instances ", "used ");
2150 fprintf (file
, "---------------------------------------------------------\n");
2152 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2154 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2155 SCALE (size
), LABEL (size
));
2159 num_edges
+= EDGE_COUNT (bb
->succs
);
2160 size
= num_edges
* sizeof (struct edge_def
);
2162 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2164 fprintf (file
, "---------------------------------------------------------\n");
2165 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2167 fprintf (file
, "---------------------------------------------------------\n");
2168 fprintf (file
, "\n");
2170 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2171 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2173 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2174 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2176 fprintf (file
, "\n");
2180 /* Dump CFG statistics on stderr. Keep extern so that it's always
2181 linked in the final executable. */
2184 debug_cfg_stats (void)
2186 dump_cfg_stats (stderr
);
2189 /*---------------------------------------------------------------------------
2190 Miscellaneous helpers
2191 ---------------------------------------------------------------------------*/
2193 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2194 flow. Transfers of control flow associated with EH are excluded. */
2197 call_can_make_abnormal_goto (gimple t
)
2199 /* If the function has no non-local labels, then a call cannot make an
2200 abnormal transfer of control. */
2201 if (!cfun
->has_nonlocal_label
2202 && !cfun
->calls_setjmp
)
2205 /* Likewise if the call has no side effects. */
2206 if (!gimple_has_side_effects (t
))
2209 /* Likewise if the called function is leaf. */
2210 if (gimple_call_flags (t
) & ECF_LEAF
)
2217 /* Return true if T can make an abnormal transfer of control flow.
2218 Transfers of control flow associated with EH are excluded. */
2221 stmt_can_make_abnormal_goto (gimple t
)
2223 if (computed_goto_p (t
))
2225 if (is_gimple_call (t
))
2226 return call_can_make_abnormal_goto (t
);
2231 /* Return true if T represents a stmt that always transfers control. */
2234 is_ctrl_stmt (gimple t
)
2236 switch (gimple_code (t
))
2250 /* Return true if T is a statement that may alter the flow of control
2251 (e.g., a call to a non-returning function). */
2254 is_ctrl_altering_stmt (gimple t
)
2258 switch (gimple_code (t
))
2262 int flags
= gimple_call_flags (t
);
2264 /* A call alters control flow if it can make an abnormal goto. */
2265 if (call_can_make_abnormal_goto (t
))
2268 /* A call also alters control flow if it does not return. */
2269 if (flags
& ECF_NORETURN
)
2272 /* TM ending statements have backedges out of the transaction.
2273 Return true so we split the basic block containing them.
2274 Note that the TM_BUILTIN test is merely an optimization. */
2275 if ((flags
& ECF_TM_BUILTIN
)
2276 && is_tm_ending_fndecl (gimple_call_fndecl (t
)))
2279 /* BUILT_IN_RETURN call is same as return statement. */
2280 if (gimple_call_builtin_p (t
, BUILT_IN_RETURN
))
2285 case GIMPLE_EH_DISPATCH
:
2286 /* EH_DISPATCH branches to the individual catch handlers at
2287 this level of a try or allowed-exceptions region. It can
2288 fallthru to the next statement as well. */
2292 if (gimple_asm_nlabels (t
) > 0)
2297 /* OpenMP directives alter control flow. */
2300 case GIMPLE_TRANSACTION
:
2301 /* A transaction start alters control flow. */
2308 /* If a statement can throw, it alters control flow. */
2309 return stmt_can_throw_internal (t
);
2313 /* Return true if T is a simple local goto. */
2316 simple_goto_p (gimple t
)
2318 return (gimple_code (t
) == GIMPLE_GOTO
2319 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2323 /* Return true if STMT should start a new basic block. PREV_STMT is
2324 the statement preceding STMT. It is used when STMT is a label or a
2325 case label. Labels should only start a new basic block if their
2326 previous statement wasn't a label. Otherwise, sequence of labels
2327 would generate unnecessary basic blocks that only contain a single
2331 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2336 /* Labels start a new basic block only if the preceding statement
2337 wasn't a label of the same type. This prevents the creation of
2338 consecutive blocks that have nothing but a single label. */
2339 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2341 /* Nonlocal and computed GOTO targets always start a new block. */
2342 if (DECL_NONLOCAL (gimple_label_label (stmt
))
2343 || FORCED_LABEL (gimple_label_label (stmt
)))
2346 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2348 if (DECL_NONLOCAL (gimple_label_label (prev_stmt
)))
2351 cfg_stats
.num_merged_labels
++;
2357 else if (gimple_code (stmt
) == GIMPLE_CALL
2358 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2359 /* setjmp acts similar to a nonlocal GOTO target and thus should
2360 start a new block. */
2367 /* Return true if T should end a basic block. */
2370 stmt_ends_bb_p (gimple t
)
2372 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2375 /* Remove block annotations and other data structures. */
2378 delete_tree_cfg_annotations (void)
2380 vec_free (label_to_block_map
);
2384 /* Return the first statement in basic block BB. */
2387 first_stmt (basic_block bb
)
2389 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2392 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2400 /* Return the first non-label statement in basic block BB. */
2403 first_non_label_stmt (basic_block bb
)
2405 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2406 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2408 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2411 /* Return the last statement in basic block BB. */
2414 last_stmt (basic_block bb
)
2416 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2419 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2427 /* Return the last statement of an otherwise empty block. Return NULL
2428 if the block is totally empty, or if it contains more than one
2432 last_and_only_stmt (basic_block bb
)
2434 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2440 last
= gsi_stmt (i
);
2441 gsi_prev_nondebug (&i
);
2445 /* Empty statements should no longer appear in the instruction stream.
2446 Everything that might have appeared before should be deleted by
2447 remove_useless_stmts, and the optimizers should just gsi_remove
2448 instead of smashing with build_empty_stmt.
2450 Thus the only thing that should appear here in a block containing
2451 one executable statement is a label. */
2452 prev
= gsi_stmt (i
);
2453 if (gimple_code (prev
) == GIMPLE_LABEL
)
2459 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2462 reinstall_phi_args (edge new_edge
, edge old_edge
)
2464 edge_var_map_vector
*v
;
2467 gimple_stmt_iterator phis
;
2469 v
= redirect_edge_var_map_vector (old_edge
);
2473 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2474 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2475 i
++, gsi_next (&phis
))
2477 gimple phi
= gsi_stmt (phis
);
2478 tree result
= redirect_edge_var_map_result (vm
);
2479 tree arg
= redirect_edge_var_map_def (vm
);
2481 gcc_assert (result
== gimple_phi_result (phi
));
2483 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2486 redirect_edge_var_map_clear (old_edge
);
2489 /* Returns the basic block after which the new basic block created
2490 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2491 near its "logical" location. This is of most help to humans looking
2492 at debugging dumps. */
2495 split_edge_bb_loc (edge edge_in
)
2497 basic_block dest
= edge_in
->dest
;
2498 basic_block dest_prev
= dest
->prev_bb
;
2502 edge e
= find_edge (dest_prev
, dest
);
2503 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2504 return edge_in
->src
;
2509 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2510 Abort on abnormal edges. */
2513 gimple_split_edge (edge edge_in
)
2515 basic_block new_bb
, after_bb
, dest
;
2518 /* Abnormal edges cannot be split. */
2519 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2521 dest
= edge_in
->dest
;
2523 after_bb
= split_edge_bb_loc (edge_in
);
2525 new_bb
= create_empty_bb (after_bb
);
2526 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2527 new_bb
->count
= edge_in
->count
;
2528 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2529 new_edge
->probability
= REG_BR_PROB_BASE
;
2530 new_edge
->count
= edge_in
->count
;
2532 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2533 gcc_assert (e
== edge_in
);
2534 reinstall_phi_args (new_edge
, e
);
2540 /* Verify properties of the address expression T with base object BASE. */
2543 verify_address (tree t
, tree base
)
2546 bool old_side_effects
;
2548 bool new_side_effects
;
2550 old_constant
= TREE_CONSTANT (t
);
2551 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2553 recompute_tree_invariant_for_addr_expr (t
);
2554 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2555 new_constant
= TREE_CONSTANT (t
);
2557 if (old_constant
!= new_constant
)
2559 error ("constant not recomputed when ADDR_EXPR changed");
2562 if (old_side_effects
!= new_side_effects
)
2564 error ("side effects not recomputed when ADDR_EXPR changed");
2568 if (!(TREE_CODE (base
) == VAR_DECL
2569 || TREE_CODE (base
) == PARM_DECL
2570 || TREE_CODE (base
) == RESULT_DECL
))
2573 if (DECL_GIMPLE_REG_P (base
))
2575 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2582 /* Callback for walk_tree, check that all elements with address taken are
2583 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2584 inside a PHI node. */
2587 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2594 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2595 #define CHECK_OP(N, MSG) \
2596 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2597 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2599 switch (TREE_CODE (t
))
2602 if (SSA_NAME_IN_FREE_LIST (t
))
2604 error ("SSA name in freelist but still referenced");
2610 error ("INDIRECT_REF in gimple IL");
2614 x
= TREE_OPERAND (t
, 0);
2615 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2616 || !is_gimple_mem_ref_addr (x
))
2618 error ("invalid first operand of MEM_REF");
2621 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2622 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2624 error ("invalid offset operand of MEM_REF");
2625 return TREE_OPERAND (t
, 1);
2627 if (TREE_CODE (x
) == ADDR_EXPR
2628 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2634 x
= fold (ASSERT_EXPR_COND (t
));
2635 if (x
== boolean_false_node
)
2637 error ("ASSERT_EXPR with an always-false condition");
2643 error ("MODIFY_EXPR not expected while having tuples");
2650 gcc_assert (is_gimple_address (t
));
2652 /* Skip any references (they will be checked when we recurse down the
2653 tree) and ensure that any variable used as a prefix is marked
2655 for (x
= TREE_OPERAND (t
, 0);
2656 handled_component_p (x
);
2657 x
= TREE_OPERAND (x
, 0))
2660 if ((tem
= verify_address (t
, x
)))
2663 if (!(TREE_CODE (x
) == VAR_DECL
2664 || TREE_CODE (x
) == PARM_DECL
2665 || TREE_CODE (x
) == RESULT_DECL
))
2668 if (!TREE_ADDRESSABLE (x
))
2670 error ("address taken, but ADDRESSABLE bit not set");
2678 x
= COND_EXPR_COND (t
);
2679 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2681 error ("non-integral used in condition");
2684 if (!is_gimple_condexpr (x
))
2686 error ("invalid conditional operand");
2691 case NON_LVALUE_EXPR
:
2692 case TRUTH_NOT_EXPR
:
2696 case FIX_TRUNC_EXPR
:
2701 CHECK_OP (0, "invalid operand to unary operator");
2707 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2709 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2713 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2715 tree t0
= TREE_OPERAND (t
, 0);
2716 tree t1
= TREE_OPERAND (t
, 1);
2717 tree t2
= TREE_OPERAND (t
, 2);
2718 if (!tree_fits_uhwi_p (t1
)
2719 || !tree_fits_uhwi_p (t2
))
2721 error ("invalid position or size operand to BIT_FIELD_REF");
2724 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2725 && (TYPE_PRECISION (TREE_TYPE (t
))
2726 != tree_to_uhwi (t1
)))
2728 error ("integral result type precision does not match "
2729 "field size of BIT_FIELD_REF");
2732 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2733 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2734 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2735 != tree_to_uhwi (t1
)))
2737 error ("mode precision of non-integral result does not "
2738 "match field size of BIT_FIELD_REF");
2741 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2742 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2743 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2745 error ("position plus size exceeds size of referenced object in "
2750 t
= TREE_OPERAND (t
, 0);
2755 case ARRAY_RANGE_REF
:
2756 case VIEW_CONVERT_EXPR
:
2757 /* We have a nest of references. Verify that each of the operands
2758 that determine where to reference is either a constant or a variable,
2759 verify that the base is valid, and then show we've already checked
2761 while (handled_component_p (t
))
2763 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2764 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2765 else if (TREE_CODE (t
) == ARRAY_REF
2766 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2768 CHECK_OP (1, "invalid array index");
2769 if (TREE_OPERAND (t
, 2))
2770 CHECK_OP (2, "invalid array lower bound");
2771 if (TREE_OPERAND (t
, 3))
2772 CHECK_OP (3, "invalid array stride");
2774 else if (TREE_CODE (t
) == BIT_FIELD_REF
2775 || TREE_CODE (t
) == REALPART_EXPR
2776 || TREE_CODE (t
) == IMAGPART_EXPR
)
2778 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2783 t
= TREE_OPERAND (t
, 0);
2786 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2788 error ("invalid reference prefix");
2795 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2796 POINTER_PLUS_EXPR. */
2797 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2799 error ("invalid operand to plus/minus, type is a pointer");
2802 CHECK_OP (0, "invalid operand to binary operator");
2803 CHECK_OP (1, "invalid operand to binary operator");
2806 case POINTER_PLUS_EXPR
:
2807 /* Check to make sure the first operand is a pointer or reference type. */
2808 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2810 error ("invalid operand to pointer plus, first operand is not a pointer");
2813 /* Check to make sure the second operand is a ptrofftype. */
2814 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2816 error ("invalid operand to pointer plus, second operand is not an "
2817 "integer type of appropriate width");
2827 case UNORDERED_EXPR
:
2836 case TRUNC_DIV_EXPR
:
2838 case FLOOR_DIV_EXPR
:
2839 case ROUND_DIV_EXPR
:
2840 case TRUNC_MOD_EXPR
:
2842 case FLOOR_MOD_EXPR
:
2843 case ROUND_MOD_EXPR
:
2845 case EXACT_DIV_EXPR
:
2855 CHECK_OP (0, "invalid operand to binary operator");
2856 CHECK_OP (1, "invalid operand to binary operator");
2860 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2864 case CASE_LABEL_EXPR
:
2867 error ("invalid CASE_CHAIN");
2881 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2882 Returns true if there is an error, otherwise false. */
2885 verify_types_in_gimple_min_lval (tree expr
)
2889 if (is_gimple_id (expr
))
2892 if (TREE_CODE (expr
) != TARGET_MEM_REF
2893 && TREE_CODE (expr
) != MEM_REF
)
2895 error ("invalid expression for min lvalue");
2899 /* TARGET_MEM_REFs are strange beasts. */
2900 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
2903 op
= TREE_OPERAND (expr
, 0);
2904 if (!is_gimple_val (op
))
2906 error ("invalid operand in indirect reference");
2907 debug_generic_stmt (op
);
2910 /* Memory references now generally can involve a value conversion. */
2915 /* Verify if EXPR is a valid GIMPLE reference expression. If
2916 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2917 if there is an error, otherwise false. */
2920 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
2922 while (handled_component_p (expr
))
2924 tree op
= TREE_OPERAND (expr
, 0);
2926 if (TREE_CODE (expr
) == ARRAY_REF
2927 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
2929 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
2930 || (TREE_OPERAND (expr
, 2)
2931 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
2932 || (TREE_OPERAND (expr
, 3)
2933 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
2935 error ("invalid operands to array reference");
2936 debug_generic_stmt (expr
);
2941 /* Verify if the reference array element types are compatible. */
2942 if (TREE_CODE (expr
) == ARRAY_REF
2943 && !useless_type_conversion_p (TREE_TYPE (expr
),
2944 TREE_TYPE (TREE_TYPE (op
))))
2946 error ("type mismatch in array reference");
2947 debug_generic_stmt (TREE_TYPE (expr
));
2948 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2951 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
2952 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
2953 TREE_TYPE (TREE_TYPE (op
))))
2955 error ("type mismatch in array range reference");
2956 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
2957 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2961 if ((TREE_CODE (expr
) == REALPART_EXPR
2962 || TREE_CODE (expr
) == IMAGPART_EXPR
)
2963 && !useless_type_conversion_p (TREE_TYPE (expr
),
2964 TREE_TYPE (TREE_TYPE (op
))))
2966 error ("type mismatch in real/imagpart reference");
2967 debug_generic_stmt (TREE_TYPE (expr
));
2968 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2972 if (TREE_CODE (expr
) == COMPONENT_REF
2973 && !useless_type_conversion_p (TREE_TYPE (expr
),
2974 TREE_TYPE (TREE_OPERAND (expr
, 1))))
2976 error ("type mismatch in component reference");
2977 debug_generic_stmt (TREE_TYPE (expr
));
2978 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
2982 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2984 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2985 that their operand is not an SSA name or an invariant when
2986 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2987 bug). Otherwise there is nothing to verify, gross mismatches at
2988 most invoke undefined behavior. */
2990 && (TREE_CODE (op
) == SSA_NAME
2991 || is_gimple_min_invariant (op
)))
2993 error ("conversion of an SSA_NAME on the left hand side");
2994 debug_generic_stmt (expr
);
2997 else if (TREE_CODE (op
) == SSA_NAME
2998 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3000 error ("conversion of register to a different size");
3001 debug_generic_stmt (expr
);
3004 else if (!handled_component_p (op
))
3011 if (TREE_CODE (expr
) == MEM_REF
)
3013 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3015 error ("invalid address operand in MEM_REF");
3016 debug_generic_stmt (expr
);
3019 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3020 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3022 error ("invalid offset operand in MEM_REF");
3023 debug_generic_stmt (expr
);
3027 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3029 if (!TMR_BASE (expr
)
3030 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3032 error ("invalid address operand in TARGET_MEM_REF");
3035 if (!TMR_OFFSET (expr
)
3036 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3037 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3039 error ("invalid offset operand in TARGET_MEM_REF");
3040 debug_generic_stmt (expr
);
3045 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3046 && verify_types_in_gimple_min_lval (expr
));
3049 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3050 list of pointer-to types that is trivially convertible to DEST. */
3053 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3057 if (!TYPE_POINTER_TO (src_obj
))
3060 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3061 if (useless_type_conversion_p (dest
, src
))
3067 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3068 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3071 valid_fixed_convert_types_p (tree type1
, tree type2
)
3073 return (FIXED_POINT_TYPE_P (type1
)
3074 && (INTEGRAL_TYPE_P (type2
)
3075 || SCALAR_FLOAT_TYPE_P (type2
)
3076 || FIXED_POINT_TYPE_P (type2
)));
3079 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3080 is a problem, otherwise false. */
3083 verify_gimple_call (gimple stmt
)
3085 tree fn
= gimple_call_fn (stmt
);
3086 tree fntype
, fndecl
;
3089 if (gimple_call_internal_p (stmt
))
3093 error ("gimple call has two targets");
3094 debug_generic_stmt (fn
);
3102 error ("gimple call has no target");
3107 if (fn
&& !is_gimple_call_addr (fn
))
3109 error ("invalid function in gimple call");
3110 debug_generic_stmt (fn
);
3115 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3116 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3117 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3119 error ("non-function in gimple call");
3123 fndecl
= gimple_call_fndecl (stmt
);
3125 && TREE_CODE (fndecl
) == FUNCTION_DECL
3126 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3127 && !DECL_PURE_P (fndecl
)
3128 && !TREE_READONLY (fndecl
))
3130 error ("invalid pure const state for function");
3134 if (gimple_call_lhs (stmt
)
3135 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3136 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3138 error ("invalid LHS in gimple call");
3142 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3144 error ("LHS in noreturn call");
3148 fntype
= gimple_call_fntype (stmt
);
3150 && gimple_call_lhs (stmt
)
3151 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3153 /* ??? At least C++ misses conversions at assignments from
3154 void * call results.
3155 ??? Java is completely off. Especially with functions
3156 returning java.lang.Object.
3157 For now simply allow arbitrary pointer type conversions. */
3158 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3159 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3161 error ("invalid conversion in gimple call");
3162 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3163 debug_generic_stmt (TREE_TYPE (fntype
));
3167 if (gimple_call_chain (stmt
)
3168 && !is_gimple_val (gimple_call_chain (stmt
)))
3170 error ("invalid static chain in gimple call");
3171 debug_generic_stmt (gimple_call_chain (stmt
));
3175 /* If there is a static chain argument, this should not be an indirect
3176 call, and the decl should have DECL_STATIC_CHAIN set. */
3177 if (gimple_call_chain (stmt
))
3179 if (!gimple_call_fndecl (stmt
))
3181 error ("static chain in indirect gimple call");
3184 fn
= TREE_OPERAND (fn
, 0);
3186 if (!DECL_STATIC_CHAIN (fn
))
3188 error ("static chain with function that doesn%'t use one");
3193 /* ??? The C frontend passes unpromoted arguments in case it
3194 didn't see a function declaration before the call. So for now
3195 leave the call arguments mostly unverified. Once we gimplify
3196 unit-at-a-time we have a chance to fix this. */
3198 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3200 tree arg
= gimple_call_arg (stmt
, i
);
3201 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3202 && !is_gimple_val (arg
))
3203 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3204 && !is_gimple_lvalue (arg
)))
3206 error ("invalid argument to gimple call");
3207 debug_generic_expr (arg
);
3215 /* Verifies the gimple comparison with the result type TYPE and
3216 the operands OP0 and OP1. */
3219 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3221 tree op0_type
= TREE_TYPE (op0
);
3222 tree op1_type
= TREE_TYPE (op1
);
3224 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3226 error ("invalid operands in gimple comparison");
3230 /* For comparisons we do not have the operations type as the
3231 effective type the comparison is carried out in. Instead
3232 we require that either the first operand is trivially
3233 convertible into the second, or the other way around.
3234 Because we special-case pointers to void we allow
3235 comparisons of pointers with the same mode as well. */
3236 if (!useless_type_conversion_p (op0_type
, op1_type
)
3237 && !useless_type_conversion_p (op1_type
, op0_type
)
3238 && (!POINTER_TYPE_P (op0_type
)
3239 || !POINTER_TYPE_P (op1_type
)
3240 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3242 error ("mismatching comparison operand types");
3243 debug_generic_expr (op0_type
);
3244 debug_generic_expr (op1_type
);
3248 /* The resulting type of a comparison may be an effective boolean type. */
3249 if (INTEGRAL_TYPE_P (type
)
3250 && (TREE_CODE (type
) == BOOLEAN_TYPE
3251 || TYPE_PRECISION (type
) == 1))
3253 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3254 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3256 error ("vector comparison returning a boolean");
3257 debug_generic_expr (op0_type
);
3258 debug_generic_expr (op1_type
);
3262 /* Or an integer vector type with the same size and element count
3263 as the comparison operand types. */
3264 else if (TREE_CODE (type
) == VECTOR_TYPE
3265 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3267 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3268 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3270 error ("non-vector operands in vector comparison");
3271 debug_generic_expr (op0_type
);
3272 debug_generic_expr (op1_type
);
3276 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3277 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3278 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3279 /* The result of a vector comparison is of signed
3281 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3283 error ("invalid vector comparison resulting type");
3284 debug_generic_expr (type
);
3290 error ("bogus comparison result type");
3291 debug_generic_expr (type
);
3298 /* Verify a gimple assignment statement STMT with an unary rhs.
3299 Returns true if anything is wrong. */
3302 verify_gimple_assign_unary (gimple stmt
)
3304 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3305 tree lhs
= gimple_assign_lhs (stmt
);
3306 tree lhs_type
= TREE_TYPE (lhs
);
3307 tree rhs1
= gimple_assign_rhs1 (stmt
);
3308 tree rhs1_type
= TREE_TYPE (rhs1
);
3310 if (!is_gimple_reg (lhs
))
3312 error ("non-register as LHS of unary operation");
3316 if (!is_gimple_val (rhs1
))
3318 error ("invalid operand in unary operation");
3322 /* First handle conversions. */
3327 /* Allow conversions from pointer type to integral type only if
3328 there is no sign or zero extension involved.
3329 For targets were the precision of ptrofftype doesn't match that
3330 of pointers we need to allow arbitrary conversions to ptrofftype. */
3331 if ((POINTER_TYPE_P (lhs_type
)
3332 && INTEGRAL_TYPE_P (rhs1_type
))
3333 || (POINTER_TYPE_P (rhs1_type
)
3334 && INTEGRAL_TYPE_P (lhs_type
)
3335 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3336 || ptrofftype_p (sizetype
))))
3339 /* Allow conversion from integral to offset type and vice versa. */
3340 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3341 && INTEGRAL_TYPE_P (rhs1_type
))
3342 || (INTEGRAL_TYPE_P (lhs_type
)
3343 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3346 /* Otherwise assert we are converting between types of the
3348 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3350 error ("invalid types in nop conversion");
3351 debug_generic_expr (lhs_type
);
3352 debug_generic_expr (rhs1_type
);
3359 case ADDR_SPACE_CONVERT_EXPR
:
3361 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3362 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3363 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3365 error ("invalid types in address space conversion");
3366 debug_generic_expr (lhs_type
);
3367 debug_generic_expr (rhs1_type
);
3374 case FIXED_CONVERT_EXPR
:
3376 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3377 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3379 error ("invalid types in fixed-point conversion");
3380 debug_generic_expr (lhs_type
);
3381 debug_generic_expr (rhs1_type
);
3390 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3391 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3392 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3394 error ("invalid types in conversion to floating point");
3395 debug_generic_expr (lhs_type
);
3396 debug_generic_expr (rhs1_type
);
3403 case FIX_TRUNC_EXPR
:
3405 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3406 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3407 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3409 error ("invalid types in conversion to integer");
3410 debug_generic_expr (lhs_type
);
3411 debug_generic_expr (rhs1_type
);
3418 case VEC_UNPACK_HI_EXPR
:
3419 case VEC_UNPACK_LO_EXPR
:
3420 case REDUC_MAX_EXPR
:
3421 case REDUC_MIN_EXPR
:
3422 case REDUC_PLUS_EXPR
:
3423 case VEC_UNPACK_FLOAT_HI_EXPR
:
3424 case VEC_UNPACK_FLOAT_LO_EXPR
:
3432 case NON_LVALUE_EXPR
:
3440 /* For the remaining codes assert there is no conversion involved. */
3441 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3443 error ("non-trivial conversion in unary operation");
3444 debug_generic_expr (lhs_type
);
3445 debug_generic_expr (rhs1_type
);
3452 /* Verify a gimple assignment statement STMT with a binary rhs.
3453 Returns true if anything is wrong. */
3456 verify_gimple_assign_binary (gimple stmt
)
3458 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3459 tree lhs
= gimple_assign_lhs (stmt
);
3460 tree lhs_type
= TREE_TYPE (lhs
);
3461 tree rhs1
= gimple_assign_rhs1 (stmt
);
3462 tree rhs1_type
= TREE_TYPE (rhs1
);
3463 tree rhs2
= gimple_assign_rhs2 (stmt
);
3464 tree rhs2_type
= TREE_TYPE (rhs2
);
3466 if (!is_gimple_reg (lhs
))
3468 error ("non-register as LHS of binary operation");
3472 if (!is_gimple_val (rhs1
)
3473 || !is_gimple_val (rhs2
))
3475 error ("invalid operands in binary operation");
3479 /* First handle operations that involve different types. */
3484 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3485 || !(INTEGRAL_TYPE_P (rhs1_type
)
3486 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3487 || !(INTEGRAL_TYPE_P (rhs2_type
)
3488 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3490 error ("type mismatch in complex expression");
3491 debug_generic_expr (lhs_type
);
3492 debug_generic_expr (rhs1_type
);
3493 debug_generic_expr (rhs2_type
);
3505 /* Shifts and rotates are ok on integral types, fixed point
3506 types and integer vector types. */
3507 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3508 && !FIXED_POINT_TYPE_P (rhs1_type
)
3509 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3510 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3511 || (!INTEGRAL_TYPE_P (rhs2_type
)
3512 /* Vector shifts of vectors are also ok. */
3513 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3514 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3515 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3516 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3517 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3519 error ("type mismatch in shift expression");
3520 debug_generic_expr (lhs_type
);
3521 debug_generic_expr (rhs1_type
);
3522 debug_generic_expr (rhs2_type
);
3529 case VEC_LSHIFT_EXPR
:
3530 case VEC_RSHIFT_EXPR
:
3532 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3533 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3534 || POINTER_TYPE_P (TREE_TYPE (rhs1_type
))
3535 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type
))
3536 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3537 || (!INTEGRAL_TYPE_P (rhs2_type
)
3538 && (TREE_CODE (rhs2_type
) != VECTOR_TYPE
3539 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3540 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3542 error ("type mismatch in vector shift expression");
3543 debug_generic_expr (lhs_type
);
3544 debug_generic_expr (rhs1_type
);
3545 debug_generic_expr (rhs2_type
);
3548 /* For shifting a vector of non-integral components we
3549 only allow shifting by a constant multiple of the element size. */
3550 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3551 && (TREE_CODE (rhs2
) != INTEGER_CST
3552 || !div_if_zero_remainder (EXACT_DIV_EXPR
, rhs2
,
3553 TYPE_SIZE (TREE_TYPE (rhs1_type
)))))
3555 error ("non-element sized vector shift of floating point vector");
3562 case WIDEN_LSHIFT_EXPR
:
3564 if (!INTEGRAL_TYPE_P (lhs_type
)
3565 || !INTEGRAL_TYPE_P (rhs1_type
)
3566 || TREE_CODE (rhs2
) != INTEGER_CST
3567 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3569 error ("type mismatch in widening vector shift expression");
3570 debug_generic_expr (lhs_type
);
3571 debug_generic_expr (rhs1_type
);
3572 debug_generic_expr (rhs2_type
);
3579 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3580 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3582 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3583 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3584 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3585 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3586 || TREE_CODE (rhs2
) != INTEGER_CST
3587 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3588 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3590 error ("type mismatch in widening vector shift expression");
3591 debug_generic_expr (lhs_type
);
3592 debug_generic_expr (rhs1_type
);
3593 debug_generic_expr (rhs2_type
);
3603 tree lhs_etype
= lhs_type
;
3604 tree rhs1_etype
= rhs1_type
;
3605 tree rhs2_etype
= rhs2_type
;
3606 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3608 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3609 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3611 error ("invalid non-vector operands to vector valued plus");
3614 lhs_etype
= TREE_TYPE (lhs_type
);
3615 rhs1_etype
= TREE_TYPE (rhs1_type
);
3616 rhs2_etype
= TREE_TYPE (rhs2_type
);
3618 if (POINTER_TYPE_P (lhs_etype
)
3619 || POINTER_TYPE_P (rhs1_etype
)
3620 || POINTER_TYPE_P (rhs2_etype
))
3622 error ("invalid (pointer) operands to plus/minus");
3626 /* Continue with generic binary expression handling. */
3630 case POINTER_PLUS_EXPR
:
3632 if (!POINTER_TYPE_P (rhs1_type
)
3633 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3634 || !ptrofftype_p (rhs2_type
))
3636 error ("type mismatch in pointer plus expression");
3637 debug_generic_stmt (lhs_type
);
3638 debug_generic_stmt (rhs1_type
);
3639 debug_generic_stmt (rhs2_type
);
3646 case TRUTH_ANDIF_EXPR
:
3647 case TRUTH_ORIF_EXPR
:
3648 case TRUTH_AND_EXPR
:
3650 case TRUTH_XOR_EXPR
:
3660 case UNORDERED_EXPR
:
3668 /* Comparisons are also binary, but the result type is not
3669 connected to the operand types. */
3670 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3672 case WIDEN_MULT_EXPR
:
3673 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3675 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3676 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3678 case WIDEN_SUM_EXPR
:
3679 case VEC_WIDEN_MULT_HI_EXPR
:
3680 case VEC_WIDEN_MULT_LO_EXPR
:
3681 case VEC_WIDEN_MULT_EVEN_EXPR
:
3682 case VEC_WIDEN_MULT_ODD_EXPR
:
3683 case VEC_PACK_TRUNC_EXPR
:
3684 case VEC_PACK_SAT_EXPR
:
3685 case VEC_PACK_FIX_TRUNC_EXPR
:
3690 case MULT_HIGHPART_EXPR
:
3691 case TRUNC_DIV_EXPR
:
3693 case FLOOR_DIV_EXPR
:
3694 case ROUND_DIV_EXPR
:
3695 case TRUNC_MOD_EXPR
:
3697 case FLOOR_MOD_EXPR
:
3698 case ROUND_MOD_EXPR
:
3700 case EXACT_DIV_EXPR
:
3706 /* Continue with generic binary expression handling. */
3713 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3714 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3716 error ("type mismatch in binary expression");
3717 debug_generic_stmt (lhs_type
);
3718 debug_generic_stmt (rhs1_type
);
3719 debug_generic_stmt (rhs2_type
);
3726 /* Verify a gimple assignment statement STMT with a ternary rhs.
3727 Returns true if anything is wrong. */
3730 verify_gimple_assign_ternary (gimple stmt
)
3732 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3733 tree lhs
= gimple_assign_lhs (stmt
);
3734 tree lhs_type
= TREE_TYPE (lhs
);
3735 tree rhs1
= gimple_assign_rhs1 (stmt
);
3736 tree rhs1_type
= TREE_TYPE (rhs1
);
3737 tree rhs2
= gimple_assign_rhs2 (stmt
);
3738 tree rhs2_type
= TREE_TYPE (rhs2
);
3739 tree rhs3
= gimple_assign_rhs3 (stmt
);
3740 tree rhs3_type
= TREE_TYPE (rhs3
);
3742 if (!is_gimple_reg (lhs
))
3744 error ("non-register as LHS of ternary operation");
3748 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3749 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3750 || !is_gimple_val (rhs2
)
3751 || !is_gimple_val (rhs3
))
3753 error ("invalid operands in ternary operation");
3757 /* First handle operations that involve different types. */
3760 case WIDEN_MULT_PLUS_EXPR
:
3761 case WIDEN_MULT_MINUS_EXPR
:
3762 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3763 && !FIXED_POINT_TYPE_P (rhs1_type
))
3764 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3765 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3766 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3767 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3769 error ("type mismatch in widening multiply-accumulate expression");
3770 debug_generic_expr (lhs_type
);
3771 debug_generic_expr (rhs1_type
);
3772 debug_generic_expr (rhs2_type
);
3773 debug_generic_expr (rhs3_type
);
3779 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3780 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3781 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3783 error ("type mismatch in fused multiply-add expression");
3784 debug_generic_expr (lhs_type
);
3785 debug_generic_expr (rhs1_type
);
3786 debug_generic_expr (rhs2_type
);
3787 debug_generic_expr (rhs3_type
);
3794 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3795 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3797 error ("type mismatch in conditional expression");
3798 debug_generic_expr (lhs_type
);
3799 debug_generic_expr (rhs2_type
);
3800 debug_generic_expr (rhs3_type
);
3806 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3807 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3809 error ("type mismatch in vector permute expression");
3810 debug_generic_expr (lhs_type
);
3811 debug_generic_expr (rhs1_type
);
3812 debug_generic_expr (rhs2_type
);
3813 debug_generic_expr (rhs3_type
);
3817 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3818 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3819 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3821 error ("vector types expected in vector permute expression");
3822 debug_generic_expr (lhs_type
);
3823 debug_generic_expr (rhs1_type
);
3824 debug_generic_expr (rhs2_type
);
3825 debug_generic_expr (rhs3_type
);
3829 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3830 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3831 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3832 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3833 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3835 error ("vectors with different element number found "
3836 "in vector permute expression");
3837 debug_generic_expr (lhs_type
);
3838 debug_generic_expr (rhs1_type
);
3839 debug_generic_expr (rhs2_type
);
3840 debug_generic_expr (rhs3_type
);
3844 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3845 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3846 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3848 error ("invalid mask type in vector permute expression");
3849 debug_generic_expr (lhs_type
);
3850 debug_generic_expr (rhs1_type
);
3851 debug_generic_expr (rhs2_type
);
3852 debug_generic_expr (rhs3_type
);
3859 case REALIGN_LOAD_EXPR
:
3869 /* Verify a gimple assignment statement STMT with a single rhs.
3870 Returns true if anything is wrong. */
3873 verify_gimple_assign_single (gimple stmt
)
3875 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3876 tree lhs
= gimple_assign_lhs (stmt
);
3877 tree lhs_type
= TREE_TYPE (lhs
);
3878 tree rhs1
= gimple_assign_rhs1 (stmt
);
3879 tree rhs1_type
= TREE_TYPE (rhs1
);
3882 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3884 error ("non-trivial conversion at assignment");
3885 debug_generic_expr (lhs_type
);
3886 debug_generic_expr (rhs1_type
);
3890 if (gimple_clobber_p (stmt
)
3891 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
3893 error ("non-decl/MEM_REF LHS in clobber statement");
3894 debug_generic_expr (lhs
);
3898 if (handled_component_p (lhs
))
3899 res
|= verify_types_in_gimple_reference (lhs
, true);
3901 /* Special codes we cannot handle via their class. */
3906 tree op
= TREE_OPERAND (rhs1
, 0);
3907 if (!is_gimple_addressable (op
))
3909 error ("invalid operand in unary expression");
3913 /* Technically there is no longer a need for matching types, but
3914 gimple hygiene asks for this check. In LTO we can end up
3915 combining incompatible units and thus end up with addresses
3916 of globals that change their type to a common one. */
3918 && !types_compatible_p (TREE_TYPE (op
),
3919 TREE_TYPE (TREE_TYPE (rhs1
)))
3920 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
3923 error ("type mismatch in address expression");
3924 debug_generic_stmt (TREE_TYPE (rhs1
));
3925 debug_generic_stmt (TREE_TYPE (op
));
3929 return verify_types_in_gimple_reference (op
, true);
3934 error ("INDIRECT_REF in gimple IL");
3940 case ARRAY_RANGE_REF
:
3941 case VIEW_CONVERT_EXPR
:
3944 case TARGET_MEM_REF
:
3946 if (!is_gimple_reg (lhs
)
3947 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3949 error ("invalid rhs for gimple memory store");
3950 debug_generic_stmt (lhs
);
3951 debug_generic_stmt (rhs1
);
3954 return res
|| verify_types_in_gimple_reference (rhs1
, false);
3966 /* tcc_declaration */
3971 if (!is_gimple_reg (lhs
)
3972 && !is_gimple_reg (rhs1
)
3973 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3975 error ("invalid rhs for gimple memory store");
3976 debug_generic_stmt (lhs
);
3977 debug_generic_stmt (rhs1
);
3983 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
3986 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
3988 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
3990 /* For vector CONSTRUCTORs we require that either it is empty
3991 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3992 (then the element count must be correct to cover the whole
3993 outer vector and index must be NULL on all elements, or it is
3994 a CONSTRUCTOR of scalar elements, where we as an exception allow
3995 smaller number of elements (assuming zero filling) and
3996 consecutive indexes as compared to NULL indexes (such
3997 CONSTRUCTORs can appear in the IL from FEs). */
3998 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4000 if (elt_t
== NULL_TREE
)
4002 elt_t
= TREE_TYPE (elt_v
);
4003 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4005 tree elt_t
= TREE_TYPE (elt_v
);
4006 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4009 error ("incorrect type of vector CONSTRUCTOR"
4011 debug_generic_stmt (rhs1
);
4014 else if (CONSTRUCTOR_NELTS (rhs1
)
4015 * TYPE_VECTOR_SUBPARTS (elt_t
)
4016 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4018 error ("incorrect number of vector CONSTRUCTOR"
4020 debug_generic_stmt (rhs1
);
4024 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4027 error ("incorrect type of vector CONSTRUCTOR elements");
4028 debug_generic_stmt (rhs1
);
4031 else if (CONSTRUCTOR_NELTS (rhs1
)
4032 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4034 error ("incorrect number of vector CONSTRUCTOR elements");
4035 debug_generic_stmt (rhs1
);
4039 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4041 error ("incorrect type of vector CONSTRUCTOR elements");
4042 debug_generic_stmt (rhs1
);
4045 if (elt_i
!= NULL_TREE
4046 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4047 || TREE_CODE (elt_i
) != INTEGER_CST
4048 || compare_tree_int (elt_i
, i
) != 0))
4050 error ("vector CONSTRUCTOR with non-NULL element index");
4051 debug_generic_stmt (rhs1
);
4059 case WITH_SIZE_EXPR
:
4069 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4070 is a problem, otherwise false. */
4073 verify_gimple_assign (gimple stmt
)
4075 switch (gimple_assign_rhs_class (stmt
))
4077 case GIMPLE_SINGLE_RHS
:
4078 return verify_gimple_assign_single (stmt
);
4080 case GIMPLE_UNARY_RHS
:
4081 return verify_gimple_assign_unary (stmt
);
4083 case GIMPLE_BINARY_RHS
:
4084 return verify_gimple_assign_binary (stmt
);
4086 case GIMPLE_TERNARY_RHS
:
4087 return verify_gimple_assign_ternary (stmt
);
4094 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4095 is a problem, otherwise false. */
4098 verify_gimple_return (gimple stmt
)
4100 tree op
= gimple_return_retval (stmt
);
4101 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4103 /* We cannot test for present return values as we do not fix up missing
4104 return values from the original source. */
4108 if (!is_gimple_val (op
)
4109 && TREE_CODE (op
) != RESULT_DECL
)
4111 error ("invalid operand in return statement");
4112 debug_generic_stmt (op
);
4116 if ((TREE_CODE (op
) == RESULT_DECL
4117 && DECL_BY_REFERENCE (op
))
4118 || (TREE_CODE (op
) == SSA_NAME
4119 && SSA_NAME_VAR (op
)
4120 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4121 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4122 op
= TREE_TYPE (op
);
4124 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4126 error ("invalid conversion in return statement");
4127 debug_generic_stmt (restype
);
4128 debug_generic_stmt (TREE_TYPE (op
));
4136 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4137 is a problem, otherwise false. */
4140 verify_gimple_goto (gimple stmt
)
4142 tree dest
= gimple_goto_dest (stmt
);
4144 /* ??? We have two canonical forms of direct goto destinations, a
4145 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4146 if (TREE_CODE (dest
) != LABEL_DECL
4147 && (!is_gimple_val (dest
)
4148 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4150 error ("goto destination is neither a label nor a pointer");
4157 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4158 is a problem, otherwise false. */
4161 verify_gimple_switch (gimple stmt
)
4164 tree elt
, prev_upper_bound
= NULL_TREE
;
4165 tree index_type
, elt_type
= NULL_TREE
;
4167 if (!is_gimple_val (gimple_switch_index (stmt
)))
4169 error ("invalid operand to switch statement");
4170 debug_generic_stmt (gimple_switch_index (stmt
));
4174 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4175 if (! INTEGRAL_TYPE_P (index_type
))
4177 error ("non-integral type switch statement");
4178 debug_generic_expr (index_type
);
4182 elt
= gimple_switch_label (stmt
, 0);
4183 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4185 error ("invalid default case label in switch statement");
4186 debug_generic_expr (elt
);
4190 n
= gimple_switch_num_labels (stmt
);
4191 for (i
= 1; i
< n
; i
++)
4193 elt
= gimple_switch_label (stmt
, i
);
4195 if (! CASE_LOW (elt
))
4197 error ("invalid case label in switch statement");
4198 debug_generic_expr (elt
);
4202 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4204 error ("invalid case range in switch statement");
4205 debug_generic_expr (elt
);
4211 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4212 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4214 error ("type mismatch for case label in switch statement");
4215 debug_generic_expr (elt
);
4221 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4222 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4224 error ("type precision mismatch in switch statement");
4229 if (prev_upper_bound
)
4231 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4233 error ("case labels not sorted in switch statement");
4238 prev_upper_bound
= CASE_HIGH (elt
);
4239 if (! prev_upper_bound
)
4240 prev_upper_bound
= CASE_LOW (elt
);
4246 /* Verify a gimple debug statement STMT.
4247 Returns true if anything is wrong. */
4250 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4252 /* There isn't much that could be wrong in a gimple debug stmt. A
4253 gimple debug bind stmt, for example, maps a tree, that's usually
4254 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4255 component or member of an aggregate type, to another tree, that
4256 can be an arbitrary expression. These stmts expand into debug
4257 insns, and are converted to debug notes by var-tracking.c. */
4261 /* Verify a gimple label statement STMT.
4262 Returns true if anything is wrong. */
4265 verify_gimple_label (gimple stmt
)
4267 tree decl
= gimple_label_label (stmt
);
4271 if (TREE_CODE (decl
) != LABEL_DECL
)
4273 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4274 && DECL_CONTEXT (decl
) != current_function_decl
)
4276 error ("label's context is not the current function decl");
4280 uid
= LABEL_DECL_UID (decl
);
4282 && (uid
== -1 || (*label_to_block_map
)[uid
] != gimple_bb (stmt
)))
4284 error ("incorrect entry in label_to_block_map");
4288 uid
= EH_LANDING_PAD_NR (decl
);
4291 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4292 if (decl
!= lp
->post_landing_pad
)
4294 error ("incorrect setting of landing pad number");
4302 /* Verify the GIMPLE statement STMT. Returns true if there is an
4303 error, otherwise false. */
4306 verify_gimple_stmt (gimple stmt
)
4308 switch (gimple_code (stmt
))
4311 return verify_gimple_assign (stmt
);
4314 return verify_gimple_label (stmt
);
4317 return verify_gimple_call (stmt
);
4320 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4322 error ("invalid comparison code in gimple cond");
4325 if (!(!gimple_cond_true_label (stmt
)
4326 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4327 || !(!gimple_cond_false_label (stmt
)
4328 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4330 error ("invalid labels in gimple cond");
4334 return verify_gimple_comparison (boolean_type_node
,
4335 gimple_cond_lhs (stmt
),
4336 gimple_cond_rhs (stmt
));
4339 return verify_gimple_goto (stmt
);
4342 return verify_gimple_switch (stmt
);
4345 return verify_gimple_return (stmt
);
4350 case GIMPLE_TRANSACTION
:
4351 return verify_gimple_transaction (stmt
);
4353 /* Tuples that do not have tree operands. */
4355 case GIMPLE_PREDICT
:
4357 case GIMPLE_EH_DISPATCH
:
4358 case GIMPLE_EH_MUST_NOT_THROW
:
4362 /* OpenMP directives are validated by the FE and never operated
4363 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4364 non-gimple expressions when the main index variable has had
4365 its address taken. This does not affect the loop itself
4366 because the header of an GIMPLE_OMP_FOR is merely used to determine
4367 how to setup the parallel iteration. */
4371 return verify_gimple_debug (stmt
);
4378 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4379 and false otherwise. */
4382 verify_gimple_phi (gimple phi
)
4386 tree phi_result
= gimple_phi_result (phi
);
4391 error ("invalid PHI result");
4395 virtual_p
= virtual_operand_p (phi_result
);
4396 if (TREE_CODE (phi_result
) != SSA_NAME
4398 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4400 error ("invalid PHI result");
4404 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4406 tree t
= gimple_phi_arg_def (phi
, i
);
4410 error ("missing PHI def");
4414 /* Addressable variables do have SSA_NAMEs but they
4415 are not considered gimple values. */
4416 else if ((TREE_CODE (t
) == SSA_NAME
4417 && virtual_p
!= virtual_operand_p (t
))
4419 && (TREE_CODE (t
) != SSA_NAME
4420 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4422 && !is_gimple_val (t
)))
4424 error ("invalid PHI argument");
4425 debug_generic_expr (t
);
4428 #ifdef ENABLE_TYPES_CHECKING
4429 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4431 error ("incompatible types in PHI argument %u", i
);
4432 debug_generic_stmt (TREE_TYPE (phi_result
));
4433 debug_generic_stmt (TREE_TYPE (t
));
4442 /* Verify the GIMPLE statements inside the sequence STMTS. */
4445 verify_gimple_in_seq_2 (gimple_seq stmts
)
4447 gimple_stmt_iterator ittr
;
4450 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4452 gimple stmt
= gsi_stmt (ittr
);
4454 switch (gimple_code (stmt
))
4457 err
|= verify_gimple_in_seq_2 (gimple_bind_body (stmt
));
4461 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4462 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4465 case GIMPLE_EH_FILTER
:
4466 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4469 case GIMPLE_EH_ELSE
:
4470 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt
));
4471 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt
));
4475 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (stmt
));
4478 case GIMPLE_TRANSACTION
:
4479 err
|= verify_gimple_transaction (stmt
);
4484 bool err2
= verify_gimple_stmt (stmt
);
4486 debug_gimple_stmt (stmt
);
4495 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4496 is a problem, otherwise false. */
4499 verify_gimple_transaction (gimple stmt
)
4501 tree lab
= gimple_transaction_label (stmt
);
4502 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4504 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4508 /* Verify the GIMPLE statements inside the statement list STMTS. */
4511 verify_gimple_in_seq (gimple_seq stmts
)
4513 timevar_push (TV_TREE_STMT_VERIFY
);
4514 if (verify_gimple_in_seq_2 (stmts
))
4515 internal_error ("verify_gimple failed");
4516 timevar_pop (TV_TREE_STMT_VERIFY
);
4519 /* Return true when the T can be shared. */
4522 tree_node_can_be_shared (tree t
)
4524 if (IS_TYPE_OR_DECL_P (t
)
4525 || is_gimple_min_invariant (t
)
4526 || TREE_CODE (t
) == SSA_NAME
4527 || t
== error_mark_node
4528 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4531 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4540 /* Called via walk_tree. Verify tree sharing. */
4543 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4545 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4547 if (tree_node_can_be_shared (*tp
))
4549 *walk_subtrees
= false;
4553 if (pointer_set_insert (visited
, *tp
))
4559 /* Called via walk_gimple_stmt. Verify tree sharing. */
4562 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4564 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4565 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4568 static bool eh_error_found
;
4570 verify_eh_throw_stmt_node (void **slot
, void *data
)
4572 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4573 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4575 if (!pointer_set_contains (visited
, node
->stmt
))
4577 error ("dead STMT in EH table");
4578 debug_gimple_stmt (node
->stmt
);
4579 eh_error_found
= true;
4584 /* Verify if the location LOCs block is in BLOCKS. */
4587 verify_location (pointer_set_t
*blocks
, location_t loc
)
4589 tree block
= LOCATION_BLOCK (loc
);
4590 if (block
!= NULL_TREE
4591 && !pointer_set_contains (blocks
, block
))
4593 error ("location references block not in block tree");
4596 if (block
!= NULL_TREE
)
4597 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4601 /* Called via walk_tree. Verify that expressions have no blocks. */
4604 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4608 *walk_subtrees
= false;
4612 location_t loc
= EXPR_LOCATION (*tp
);
4613 if (LOCATION_BLOCK (loc
) != NULL
)
4619 /* Called via walk_tree. Verify locations of expressions. */
4622 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4624 struct pointer_set_t
*blocks
= (struct pointer_set_t
*) data
;
4626 if (TREE_CODE (*tp
) == VAR_DECL
4627 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4629 tree t
= DECL_DEBUG_EXPR (*tp
);
4630 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4634 if ((TREE_CODE (*tp
) == VAR_DECL
4635 || TREE_CODE (*tp
) == PARM_DECL
4636 || TREE_CODE (*tp
) == RESULT_DECL
)
4637 && DECL_HAS_VALUE_EXPR_P (*tp
))
4639 tree t
= DECL_VALUE_EXPR (*tp
);
4640 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4647 *walk_subtrees
= false;
4651 location_t loc
= EXPR_LOCATION (*tp
);
4652 if (verify_location (blocks
, loc
))
4658 /* Called via walk_gimple_op. Verify locations of expressions. */
4661 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4663 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4664 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4667 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4670 collect_subblocks (pointer_set_t
*blocks
, tree block
)
4673 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4675 pointer_set_insert (blocks
, t
);
4676 collect_subblocks (blocks
, t
);
4680 /* Verify the GIMPLE statements in the CFG of FN. */
4683 verify_gimple_in_cfg (struct function
*fn
)
4687 struct pointer_set_t
*visited
, *visited_stmts
, *blocks
;
4689 timevar_push (TV_TREE_STMT_VERIFY
);
4690 visited
= pointer_set_create ();
4691 visited_stmts
= pointer_set_create ();
4693 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4694 blocks
= pointer_set_create ();
4695 if (DECL_INITIAL (fn
->decl
))
4697 pointer_set_insert (blocks
, DECL_INITIAL (fn
->decl
));
4698 collect_subblocks (blocks
, DECL_INITIAL (fn
->decl
));
4701 FOR_EACH_BB_FN (bb
, fn
)
4703 gimple_stmt_iterator gsi
;
4705 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4707 gimple phi
= gsi_stmt (gsi
);
4711 pointer_set_insert (visited_stmts
, phi
);
4713 if (gimple_bb (phi
) != bb
)
4715 error ("gimple_bb (phi) is set to a wrong basic block");
4719 err2
|= verify_gimple_phi (phi
);
4721 /* Only PHI arguments have locations. */
4722 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4724 error ("PHI node with location");
4728 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4730 tree arg
= gimple_phi_arg_def (phi
, i
);
4731 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4735 error ("incorrect sharing of tree nodes");
4736 debug_generic_expr (addr
);
4739 location_t loc
= gimple_phi_arg_location (phi
, i
);
4740 if (virtual_operand_p (gimple_phi_result (phi
))
4741 && loc
!= UNKNOWN_LOCATION
)
4743 error ("virtual PHI with argument locations");
4746 addr
= walk_tree (&arg
, verify_expr_location_1
, blocks
, NULL
);
4749 debug_generic_expr (addr
);
4752 err2
|= verify_location (blocks
, loc
);
4756 debug_gimple_stmt (phi
);
4760 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4762 gimple stmt
= gsi_stmt (gsi
);
4764 struct walk_stmt_info wi
;
4768 pointer_set_insert (visited_stmts
, stmt
);
4770 if (gimple_bb (stmt
) != bb
)
4772 error ("gimple_bb (stmt) is set to a wrong basic block");
4776 err2
|= verify_gimple_stmt (stmt
);
4777 err2
|= verify_location (blocks
, gimple_location (stmt
));
4779 memset (&wi
, 0, sizeof (wi
));
4780 wi
.info
= (void *) visited
;
4781 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4784 error ("incorrect sharing of tree nodes");
4785 debug_generic_expr (addr
);
4789 memset (&wi
, 0, sizeof (wi
));
4790 wi
.info
= (void *) blocks
;
4791 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4794 debug_generic_expr (addr
);
4798 /* ??? Instead of not checking these stmts at all the walker
4799 should know its context via wi. */
4800 if (!is_gimple_debug (stmt
)
4801 && !is_gimple_omp (stmt
))
4803 memset (&wi
, 0, sizeof (wi
));
4804 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
4807 debug_generic_expr (addr
);
4808 inform (gimple_location (stmt
), "in statement");
4813 /* If the statement is marked as part of an EH region, then it is
4814 expected that the statement could throw. Verify that when we
4815 have optimizations that simplify statements such that we prove
4816 that they cannot throw, that we update other data structures
4818 lp_nr
= lookup_stmt_eh_lp (stmt
);
4821 if (!stmt_could_throw_p (stmt
))
4823 error ("statement marked for throw, but doesn%'t");
4827 && !gsi_one_before_end_p (gsi
)
4828 && stmt_can_throw_internal (stmt
))
4830 error ("statement marked for throw in middle of block");
4836 debug_gimple_stmt (stmt
);
4841 eh_error_found
= false;
4842 if (get_eh_throw_stmt_table (cfun
))
4843 htab_traverse (get_eh_throw_stmt_table (cfun
),
4844 verify_eh_throw_stmt_node
,
4847 if (err
|| eh_error_found
)
4848 internal_error ("verify_gimple failed");
4850 pointer_set_destroy (visited
);
4851 pointer_set_destroy (visited_stmts
);
4852 pointer_set_destroy (blocks
);
4853 verify_histograms ();
4854 timevar_pop (TV_TREE_STMT_VERIFY
);
4858 /* Verifies that the flow information is OK. */
4861 gimple_verify_flow_info (void)
4865 gimple_stmt_iterator gsi
;
4870 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4871 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4873 error ("ENTRY_BLOCK has IL associated with it");
4877 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4878 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4880 error ("EXIT_BLOCK has IL associated with it");
4884 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
4885 if (e
->flags
& EDGE_FALLTHRU
)
4887 error ("fallthru to exit from bb %d", e
->src
->index
);
4893 bool found_ctrl_stmt
= false;
4897 /* Skip labels on the start of basic block. */
4898 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4901 gimple prev_stmt
= stmt
;
4903 stmt
= gsi_stmt (gsi
);
4905 if (gimple_code (stmt
) != GIMPLE_LABEL
)
4908 label
= gimple_label_label (stmt
);
4909 if (prev_stmt
&& DECL_NONLOCAL (label
))
4911 error ("nonlocal label ");
4912 print_generic_expr (stderr
, label
, 0);
4913 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4918 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
4920 error ("EH landing pad label ");
4921 print_generic_expr (stderr
, label
, 0);
4922 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4927 if (label_to_block (label
) != bb
)
4930 print_generic_expr (stderr
, label
, 0);
4931 fprintf (stderr
, " to block does not match in bb %d",
4936 if (decl_function_context (label
) != current_function_decl
)
4939 print_generic_expr (stderr
, label
, 0);
4940 fprintf (stderr
, " has incorrect context in bb %d",
4946 /* Verify that body of basic block BB is free of control flow. */
4947 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
4949 gimple stmt
= gsi_stmt (gsi
);
4951 if (found_ctrl_stmt
)
4953 error ("control flow in the middle of basic block %d",
4958 if (stmt_ends_bb_p (stmt
))
4959 found_ctrl_stmt
= true;
4961 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4964 print_generic_expr (stderr
, gimple_label_label (stmt
), 0);
4965 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
4970 gsi
= gsi_last_bb (bb
);
4971 if (gsi_end_p (gsi
))
4974 stmt
= gsi_stmt (gsi
);
4976 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4979 err
|= verify_eh_edges (stmt
);
4981 if (is_ctrl_stmt (stmt
))
4983 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4984 if (e
->flags
& EDGE_FALLTHRU
)
4986 error ("fallthru edge after a control statement in bb %d",
4992 if (gimple_code (stmt
) != GIMPLE_COND
)
4994 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4995 after anything else but if statement. */
4996 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4997 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
4999 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5005 switch (gimple_code (stmt
))
5012 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5016 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5017 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5018 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5019 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5020 || EDGE_COUNT (bb
->succs
) >= 3)
5022 error ("wrong outgoing edge flags at end of bb %d",
5030 if (simple_goto_p (stmt
))
5032 error ("explicit goto at end of bb %d", bb
->index
);
5037 /* FIXME. We should double check that the labels in the
5038 destination blocks have their address taken. */
5039 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5040 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5041 | EDGE_FALSE_VALUE
))
5042 || !(e
->flags
& EDGE_ABNORMAL
))
5044 error ("wrong outgoing edge flags at end of bb %d",
5052 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5054 /* ... fallthru ... */
5056 if (!single_succ_p (bb
)
5057 || (single_succ_edge (bb
)->flags
5058 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5059 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5061 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5064 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5066 error ("return edge does not point to exit in bb %d",
5078 n
= gimple_switch_num_labels (stmt
);
5080 /* Mark all the destination basic blocks. */
5081 for (i
= 0; i
< n
; ++i
)
5083 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5084 basic_block label_bb
= label_to_block (lab
);
5085 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5086 label_bb
->aux
= (void *)1;
5089 /* Verify that the case labels are sorted. */
5090 prev
= gimple_switch_label (stmt
, 0);
5091 for (i
= 1; i
< n
; ++i
)
5093 tree c
= gimple_switch_label (stmt
, i
);
5096 error ("found default case not at the start of "
5102 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5104 error ("case labels not sorted: ");
5105 print_generic_expr (stderr
, prev
, 0);
5106 fprintf (stderr
," is greater than ");
5107 print_generic_expr (stderr
, c
, 0);
5108 fprintf (stderr
," but comes before it.\n");
5113 /* VRP will remove the default case if it can prove it will
5114 never be executed. So do not verify there always exists
5115 a default case here. */
5117 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5121 error ("extra outgoing edge %d->%d",
5122 bb
->index
, e
->dest
->index
);
5126 e
->dest
->aux
= (void *)2;
5127 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5128 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5130 error ("wrong outgoing edge flags at end of bb %d",
5136 /* Check that we have all of them. */
5137 for (i
= 0; i
< n
; ++i
)
5139 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5140 basic_block label_bb
= label_to_block (lab
);
5142 if (label_bb
->aux
!= (void *)2)
5144 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5149 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5150 e
->dest
->aux
= (void *)0;
5154 case GIMPLE_EH_DISPATCH
:
5155 err
|= verify_eh_dispatch_edge (stmt
);
5163 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5164 verify_dominators (CDI_DOMINATORS
);
5170 /* Updates phi nodes after creating a forwarder block joined
5171 by edge FALLTHRU. */
5174 gimple_make_forwarder_block (edge fallthru
)
5178 basic_block dummy
, bb
;
5180 gimple_stmt_iterator gsi
;
5182 dummy
= fallthru
->src
;
5183 bb
= fallthru
->dest
;
5185 if (single_pred_p (bb
))
5188 /* If we redirected a branch we must create new PHI nodes at the
5190 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5192 gimple phi
, new_phi
;
5194 phi
= gsi_stmt (gsi
);
5195 var
= gimple_phi_result (phi
);
5196 new_phi
= create_phi_node (var
, bb
);
5197 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5198 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5202 /* Add the arguments we have stored on edges. */
5203 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5208 flush_pending_stmts (e
);
5213 /* Return a non-special label in the head of basic block BLOCK.
5214 Create one if it doesn't exist. */
5217 gimple_block_label (basic_block bb
)
5219 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5224 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5226 stmt
= gsi_stmt (i
);
5227 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5229 label
= gimple_label_label (stmt
);
5230 if (!DECL_NONLOCAL (label
))
5233 gsi_move_before (&i
, &s
);
5238 label
= create_artificial_label (UNKNOWN_LOCATION
);
5239 stmt
= gimple_build_label (label
);
5240 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5245 /* Attempt to perform edge redirection by replacing a possibly complex
5246 jump instruction by a goto or by removing the jump completely.
5247 This can apply only if all edges now point to the same block. The
5248 parameters and return values are equivalent to
5249 redirect_edge_and_branch. */
5252 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5254 basic_block src
= e
->src
;
5255 gimple_stmt_iterator i
;
5258 /* We can replace or remove a complex jump only when we have exactly
5260 if (EDGE_COUNT (src
->succs
) != 2
5261 /* Verify that all targets will be TARGET. Specifically, the
5262 edge that is not E must also go to TARGET. */
5263 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5266 i
= gsi_last_bb (src
);
5270 stmt
= gsi_stmt (i
);
5272 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5274 gsi_remove (&i
, true);
5275 e
= ssa_redirect_edge (e
, target
);
5276 e
->flags
= EDGE_FALLTHRU
;
5284 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5285 edge representing the redirected branch. */
5288 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5290 basic_block bb
= e
->src
;
5291 gimple_stmt_iterator gsi
;
5295 if (e
->flags
& EDGE_ABNORMAL
)
5298 if (e
->dest
== dest
)
5301 if (e
->flags
& EDGE_EH
)
5302 return redirect_eh_edge (e
, dest
);
5304 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5306 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5311 gsi
= gsi_last_bb (bb
);
5312 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5314 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5317 /* For COND_EXPR, we only need to redirect the edge. */
5321 /* No non-abnormal edges should lead from a non-simple goto, and
5322 simple ones should be represented implicitly. */
5327 tree label
= gimple_block_label (dest
);
5328 tree cases
= get_cases_for_edge (e
, stmt
);
5330 /* If we have a list of cases associated with E, then use it
5331 as it's a lot faster than walking the entire case vector. */
5334 edge e2
= find_edge (e
->src
, dest
);
5341 CASE_LABEL (cases
) = label
;
5342 cases
= CASE_CHAIN (cases
);
5345 /* If there was already an edge in the CFG, then we need
5346 to move all the cases associated with E to E2. */
5349 tree cases2
= get_cases_for_edge (e2
, stmt
);
5351 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5352 CASE_CHAIN (cases2
) = first
;
5354 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5358 size_t i
, n
= gimple_switch_num_labels (stmt
);
5360 for (i
= 0; i
< n
; i
++)
5362 tree elt
= gimple_switch_label (stmt
, i
);
5363 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5364 CASE_LABEL (elt
) = label
;
5372 int i
, n
= gimple_asm_nlabels (stmt
);
5375 for (i
= 0; i
< n
; ++i
)
5377 tree cons
= gimple_asm_label_op (stmt
, i
);
5378 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5381 label
= gimple_block_label (dest
);
5382 TREE_VALUE (cons
) = label
;
5386 /* If we didn't find any label matching the former edge in the
5387 asm labels, we must be redirecting the fallthrough
5389 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5394 gsi_remove (&gsi
, true);
5395 e
->flags
|= EDGE_FALLTHRU
;
5398 case GIMPLE_OMP_RETURN
:
5399 case GIMPLE_OMP_CONTINUE
:
5400 case GIMPLE_OMP_SECTIONS_SWITCH
:
5401 case GIMPLE_OMP_FOR
:
5402 /* The edges from OMP constructs can be simply redirected. */
5405 case GIMPLE_EH_DISPATCH
:
5406 if (!(e
->flags
& EDGE_FALLTHRU
))
5407 redirect_eh_dispatch_edge (stmt
, e
, dest
);
5410 case GIMPLE_TRANSACTION
:
5411 /* The ABORT edge has a stored label associated with it, otherwise
5412 the edges are simply redirectable. */
5414 gimple_transaction_set_label (stmt
, gimple_block_label (dest
));
5418 /* Otherwise it must be a fallthru edge, and we don't need to
5419 do anything besides redirecting it. */
5420 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5424 /* Update/insert PHI nodes as necessary. */
5426 /* Now update the edges in the CFG. */
5427 e
= ssa_redirect_edge (e
, dest
);
5432 /* Returns true if it is possible to remove edge E by redirecting
5433 it to the destination of the other edge from E->src. */
5436 gimple_can_remove_branch_p (const_edge e
)
5438 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5444 /* Simple wrapper, as we can always redirect fallthru edges. */
5447 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5449 e
= gimple_redirect_edge_and_branch (e
, dest
);
5456 /* Splits basic block BB after statement STMT (but at least after the
5457 labels). If STMT is NULL, BB is split just after the labels. */
5460 gimple_split_block (basic_block bb
, void *stmt
)
5462 gimple_stmt_iterator gsi
;
5463 gimple_stmt_iterator gsi_tgt
;
5470 new_bb
= create_empty_bb (bb
);
5472 /* Redirect the outgoing edges. */
5473 new_bb
->succs
= bb
->succs
;
5475 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5478 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5481 /* Move everything from GSI to the new basic block. */
5482 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5484 act
= gsi_stmt (gsi
);
5485 if (gimple_code (act
) == GIMPLE_LABEL
)
5498 if (gsi_end_p (gsi
))
5501 /* Split the statement list - avoid re-creating new containers as this
5502 brings ugly quadratic memory consumption in the inliner.
5503 (We are still quadratic since we need to update stmt BB pointers,
5505 gsi_split_seq_before (&gsi
, &list
);
5506 set_bb_seq (new_bb
, list
);
5507 for (gsi_tgt
= gsi_start (list
);
5508 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5509 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5515 /* Moves basic block BB after block AFTER. */
5518 gimple_move_block_after (basic_block bb
, basic_block after
)
5520 if (bb
->prev_bb
== after
)
5524 link_block (bb
, after
);
5530 /* Return TRUE if block BB has no executable statements, otherwise return
5534 gimple_empty_block_p (basic_block bb
)
5536 /* BB must have no executable statements. */
5537 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5540 if (gsi_end_p (gsi
))
5542 if (is_gimple_debug (gsi_stmt (gsi
)))
5543 gsi_next_nondebug (&gsi
);
5544 return gsi_end_p (gsi
);
5548 /* Split a basic block if it ends with a conditional branch and if the
5549 other part of the block is not empty. */
5552 gimple_split_block_before_cond_jump (basic_block bb
)
5554 gimple last
, split_point
;
5555 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5556 if (gsi_end_p (gsi
))
5558 last
= gsi_stmt (gsi
);
5559 if (gimple_code (last
) != GIMPLE_COND
5560 && gimple_code (last
) != GIMPLE_SWITCH
)
5562 gsi_prev_nondebug (&gsi
);
5563 split_point
= gsi_stmt (gsi
);
5564 return split_block (bb
, split_point
)->dest
;
5568 /* Return true if basic_block can be duplicated. */
5571 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5576 /* Create a duplicate of the basic block BB. NOTE: This does not
5577 preserve SSA form. */
5580 gimple_duplicate_bb (basic_block bb
)
5583 gimple_stmt_iterator gsi
, gsi_tgt
;
5584 gimple_seq phis
= phi_nodes (bb
);
5585 gimple phi
, stmt
, copy
;
5587 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5589 /* Copy the PHI nodes. We ignore PHI node arguments here because
5590 the incoming edges have not been setup yet. */
5591 for (gsi
= gsi_start (phis
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5593 phi
= gsi_stmt (gsi
);
5594 copy
= create_phi_node (NULL_TREE
, new_bb
);
5595 create_new_def_for (gimple_phi_result (phi
), copy
,
5596 gimple_phi_result_ptr (copy
));
5597 gimple_set_uid (copy
, gimple_uid (phi
));
5600 gsi_tgt
= gsi_start_bb (new_bb
);
5601 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5603 def_operand_p def_p
;
5604 ssa_op_iter op_iter
;
5607 stmt
= gsi_stmt (gsi
);
5608 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5611 /* Don't duplicate label debug stmts. */
5612 if (gimple_debug_bind_p (stmt
)
5613 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5617 /* Create a new copy of STMT and duplicate STMT's virtual
5619 copy
= gimple_copy (stmt
);
5620 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5622 maybe_duplicate_eh_stmt (copy
, stmt
);
5623 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5625 /* When copying around a stmt writing into a local non-user
5626 aggregate, make sure it won't share stack slot with other
5628 lhs
= gimple_get_lhs (stmt
);
5629 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5631 tree base
= get_base_address (lhs
);
5633 && (TREE_CODE (base
) == VAR_DECL
5634 || TREE_CODE (base
) == RESULT_DECL
)
5635 && DECL_IGNORED_P (base
)
5636 && !TREE_STATIC (base
)
5637 && !DECL_EXTERNAL (base
)
5638 && (TREE_CODE (base
) != VAR_DECL
5639 || !DECL_HAS_VALUE_EXPR_P (base
)))
5640 DECL_NONSHAREABLE (base
) = 1;
5643 /* Create new names for all the definitions created by COPY and
5644 add replacement mappings for each new name. */
5645 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5646 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5652 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5655 add_phi_args_after_copy_edge (edge e_copy
)
5657 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5660 gimple phi
, phi_copy
;
5662 gimple_stmt_iterator psi
, psi_copy
;
5664 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5667 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5669 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5670 dest
= get_bb_original (e_copy
->dest
);
5672 dest
= e_copy
->dest
;
5674 e
= find_edge (bb
, dest
);
5677 /* During loop unrolling the target of the latch edge is copied.
5678 In this case we are not looking for edge to dest, but to
5679 duplicated block whose original was dest. */
5680 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5682 if ((e
->dest
->flags
& BB_DUPLICATED
)
5683 && get_bb_original (e
->dest
) == dest
)
5687 gcc_assert (e
!= NULL
);
5690 for (psi
= gsi_start_phis (e
->dest
),
5691 psi_copy
= gsi_start_phis (e_copy
->dest
);
5693 gsi_next (&psi
), gsi_next (&psi_copy
))
5695 phi
= gsi_stmt (psi
);
5696 phi_copy
= gsi_stmt (psi_copy
);
5697 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5698 add_phi_arg (phi_copy
, def
, e_copy
,
5699 gimple_phi_arg_location_from_edge (phi
, e
));
5704 /* Basic block BB_COPY was created by code duplication. Add phi node
5705 arguments for edges going out of BB_COPY. The blocks that were
5706 duplicated have BB_DUPLICATED set. */
5709 add_phi_args_after_copy_bb (basic_block bb_copy
)
5714 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5716 add_phi_args_after_copy_edge (e_copy
);
5720 /* Blocks in REGION_COPY array of length N_REGION were created by
5721 duplication of basic blocks. Add phi node arguments for edges
5722 going from these blocks. If E_COPY is not NULL, also add
5723 phi node arguments for its destination.*/
5726 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5731 for (i
= 0; i
< n_region
; i
++)
5732 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5734 for (i
= 0; i
< n_region
; i
++)
5735 add_phi_args_after_copy_bb (region_copy
[i
]);
5737 add_phi_args_after_copy_edge (e_copy
);
5739 for (i
= 0; i
< n_region
; i
++)
5740 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5743 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5744 important exit edge EXIT. By important we mean that no SSA name defined
5745 inside region is live over the other exit edges of the region. All entry
5746 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5747 to the duplicate of the region. Dominance and loop information is
5748 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5749 UPDATE_DOMINANCE is false then we assume that the caller will update the
5750 dominance information after calling this function. The new basic
5751 blocks are stored to REGION_COPY in the same order as they had in REGION,
5752 provided that REGION_COPY is not NULL.
5753 The function returns false if it is unable to copy the region,
5757 gimple_duplicate_sese_region (edge entry
, edge exit
,
5758 basic_block
*region
, unsigned n_region
,
5759 basic_block
*region_copy
,
5760 bool update_dominance
)
5763 bool free_region_copy
= false, copying_header
= false;
5764 struct loop
*loop
= entry
->dest
->loop_father
;
5766 vec
<basic_block
> doms
;
5768 int total_freq
= 0, entry_freq
= 0;
5769 gcov_type total_count
= 0, entry_count
= 0;
5771 if (!can_copy_bbs_p (region
, n_region
))
5774 /* Some sanity checking. Note that we do not check for all possible
5775 missuses of the functions. I.e. if you ask to copy something weird,
5776 it will work, but the state of structures probably will not be
5778 for (i
= 0; i
< n_region
; i
++)
5780 /* We do not handle subloops, i.e. all the blocks must belong to the
5782 if (region
[i
]->loop_father
!= loop
)
5785 if (region
[i
] != entry
->dest
5786 && region
[i
] == loop
->header
)
5790 set_loop_copy (loop
, loop
);
5792 /* In case the function is used for loop header copying (which is the primary
5793 use), ensure that EXIT and its copy will be new latch and entry edges. */
5794 if (loop
->header
== entry
->dest
)
5796 copying_header
= true;
5797 set_loop_copy (loop
, loop_outer (loop
));
5799 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5802 for (i
= 0; i
< n_region
; i
++)
5803 if (region
[i
] != exit
->src
5804 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5810 region_copy
= XNEWVEC (basic_block
, n_region
);
5811 free_region_copy
= true;
5814 initialize_original_copy_tables ();
5816 /* Record blocks outside the region that are dominated by something
5818 if (update_dominance
)
5821 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5824 if (entry
->dest
->count
)
5826 total_count
= entry
->dest
->count
;
5827 entry_count
= entry
->count
;
5828 /* Fix up corner cases, to avoid division by zero or creation of negative
5830 if (entry_count
> total_count
)
5831 entry_count
= total_count
;
5835 total_freq
= entry
->dest
->frequency
;
5836 entry_freq
= EDGE_FREQUENCY (entry
);
5837 /* Fix up corner cases, to avoid division by zero or creation of negative
5839 if (total_freq
== 0)
5841 else if (entry_freq
> total_freq
)
5842 entry_freq
= total_freq
;
5845 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5846 split_edge_bb_loc (entry
), update_dominance
);
5849 scale_bbs_frequencies_gcov_type (region
, n_region
,
5850 total_count
- entry_count
,
5852 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
5857 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
5859 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
5864 loop
->header
= exit
->dest
;
5865 loop
->latch
= exit
->src
;
5868 /* Redirect the entry and add the phi node arguments. */
5869 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
5870 gcc_assert (redirected
!= NULL
);
5871 flush_pending_stmts (entry
);
5873 /* Concerning updating of dominators: We must recount dominators
5874 for entry block and its copy. Anything that is outside of the
5875 region, but was dominated by something inside needs recounting as
5877 if (update_dominance
)
5879 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
5880 doms
.safe_push (get_bb_original (entry
->dest
));
5881 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
5885 /* Add the other PHI node arguments. */
5886 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
5888 if (free_region_copy
)
5891 free_original_copy_tables ();
5895 /* Checks if BB is part of the region defined by N_REGION BBS. */
5897 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
5901 for (n
= 0; n
< n_region
; n
++)
5909 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5910 are stored to REGION_COPY in the same order in that they appear
5911 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5912 the region, EXIT an exit from it. The condition guarding EXIT
5913 is moved to ENTRY. Returns true if duplication succeeds, false
5939 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
5940 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
5941 basic_block
*region_copy ATTRIBUTE_UNUSED
)
5944 bool free_region_copy
= false;
5945 struct loop
*loop
= exit
->dest
->loop_father
;
5946 struct loop
*orig_loop
= entry
->dest
->loop_father
;
5947 basic_block switch_bb
, entry_bb
, nentry_bb
;
5948 vec
<basic_block
> doms
;
5949 int total_freq
= 0, exit_freq
= 0;
5950 gcov_type total_count
= 0, exit_count
= 0;
5951 edge exits
[2], nexits
[2], e
;
5952 gimple_stmt_iterator gsi
;
5955 basic_block exit_bb
;
5956 gimple_stmt_iterator psi
;
5959 struct loop
*target
, *aloop
, *cloop
;
5961 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
5963 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
5965 if (!can_copy_bbs_p (region
, n_region
))
5968 initialize_original_copy_tables ();
5969 set_loop_copy (orig_loop
, loop
);
5972 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
5974 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
5976 cloop
= duplicate_loop (aloop
, target
);
5977 duplicate_subloops (aloop
, cloop
);
5983 region_copy
= XNEWVEC (basic_block
, n_region
);
5984 free_region_copy
= true;
5987 gcc_assert (!need_ssa_update_p (cfun
));
5989 /* Record blocks outside the region that are dominated by something
5991 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5993 if (exit
->src
->count
)
5995 total_count
= exit
->src
->count
;
5996 exit_count
= exit
->count
;
5997 /* Fix up corner cases, to avoid division by zero or creation of negative
5999 if (exit_count
> total_count
)
6000 exit_count
= total_count
;
6004 total_freq
= exit
->src
->frequency
;
6005 exit_freq
= EDGE_FREQUENCY (exit
);
6006 /* Fix up corner cases, to avoid division by zero or creation of negative
6008 if (total_freq
== 0)
6010 if (exit_freq
> total_freq
)
6011 exit_freq
= total_freq
;
6014 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6015 split_edge_bb_loc (exit
), true);
6018 scale_bbs_frequencies_gcov_type (region
, n_region
,
6019 total_count
- exit_count
,
6021 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6026 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6028 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6031 /* Create the switch block, and put the exit condition to it. */
6032 entry_bb
= entry
->dest
;
6033 nentry_bb
= get_bb_copy (entry_bb
);
6034 if (!last_stmt (entry
->src
)
6035 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6036 switch_bb
= entry
->src
;
6038 switch_bb
= split_edge (entry
);
6039 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6041 gsi
= gsi_last_bb (switch_bb
);
6042 cond_stmt
= last_stmt (exit
->src
);
6043 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6044 cond_stmt
= gimple_copy (cond_stmt
);
6046 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6048 sorig
= single_succ_edge (switch_bb
);
6049 sorig
->flags
= exits
[1]->flags
;
6050 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6052 /* Register the new edge from SWITCH_BB in loop exit lists. */
6053 rescan_loop_exit (snew
, true, false);
6055 /* Add the PHI node arguments. */
6056 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6058 /* Get rid of now superfluous conditions and associated edges (and phi node
6060 exit_bb
= exit
->dest
;
6062 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6063 PENDING_STMT (e
) = NULL
;
6065 /* The latch of ORIG_LOOP was copied, and so was the backedge
6066 to the original header. We redirect this backedge to EXIT_BB. */
6067 for (i
= 0; i
< n_region
; i
++)
6068 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6070 gcc_assert (single_succ_edge (region_copy
[i
]));
6071 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6072 PENDING_STMT (e
) = NULL
;
6073 for (psi
= gsi_start_phis (exit_bb
);
6077 phi
= gsi_stmt (psi
);
6078 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6079 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6082 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6083 PENDING_STMT (e
) = NULL
;
6085 /* Anything that is outside of the region, but was dominated by something
6086 inside needs to update dominance info. */
6087 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6089 /* Update the SSA web. */
6090 update_ssa (TODO_update_ssa
);
6092 if (free_region_copy
)
6095 free_original_copy_tables ();
6099 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6100 adding blocks when the dominator traversal reaches EXIT. This
6101 function silently assumes that ENTRY strictly dominates EXIT. */
6104 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6105 vec
<basic_block
> *bbs_p
)
6109 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6111 son
= next_dom_son (CDI_DOMINATORS
, son
))
6113 bbs_p
->safe_push (son
);
6115 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6119 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6120 The duplicates are recorded in VARS_MAP. */
6123 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
6126 tree t
= *tp
, new_t
;
6127 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6130 if (DECL_CONTEXT (t
) == to_context
)
6133 loc
= pointer_map_contains (vars_map
, t
);
6137 loc
= pointer_map_insert (vars_map
, t
);
6141 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6142 add_local_decl (f
, new_t
);
6146 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6147 new_t
= copy_node (t
);
6149 DECL_CONTEXT (new_t
) = to_context
;
6154 new_t
= (tree
) *loc
;
6160 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6161 VARS_MAP maps old ssa names and var_decls to the new ones. */
6164 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
6170 gcc_assert (!virtual_operand_p (name
));
6172 loc
= pointer_map_contains (vars_map
, name
);
6176 tree decl
= SSA_NAME_VAR (name
);
6179 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6180 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6181 decl
, SSA_NAME_DEF_STMT (name
));
6182 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6183 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6187 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6188 name
, SSA_NAME_DEF_STMT (name
));
6190 loc
= pointer_map_insert (vars_map
, name
);
6194 new_name
= (tree
) *loc
;
6205 struct pointer_map_t
*vars_map
;
6206 htab_t new_label_map
;
6207 struct pointer_map_t
*eh_map
;
6211 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6212 contained in *TP if it has been ORIG_BLOCK previously and change the
6213 DECL_CONTEXT of every local variable referenced in *TP. */
6216 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6218 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6219 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6224 tree block
= TREE_BLOCK (t
);
6225 if (block
== p
->orig_block
6226 || (p
->orig_block
== NULL_TREE
6227 && block
!= NULL_TREE
))
6228 TREE_SET_BLOCK (t
, p
->new_block
);
6229 #ifdef ENABLE_CHECKING
6230 else if (block
!= NULL_TREE
)
6232 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6233 block
= BLOCK_SUPERCONTEXT (block
);
6234 gcc_assert (block
== p
->orig_block
);
6238 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6240 if (TREE_CODE (t
) == SSA_NAME
)
6241 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6242 else if (TREE_CODE (t
) == LABEL_DECL
)
6244 if (p
->new_label_map
)
6246 struct tree_map in
, *out
;
6248 out
= (struct tree_map
*)
6249 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6254 DECL_CONTEXT (t
) = p
->to_context
;
6256 else if (p
->remap_decls_p
)
6258 /* Replace T with its duplicate. T should no longer appear in the
6259 parent function, so this looks wasteful; however, it may appear
6260 in referenced_vars, and more importantly, as virtual operands of
6261 statements, and in alias lists of other variables. It would be
6262 quite difficult to expunge it from all those places. ??? It might
6263 suffice to do this for addressable variables. */
6264 if ((TREE_CODE (t
) == VAR_DECL
6265 && !is_global_var (t
))
6266 || TREE_CODE (t
) == CONST_DECL
)
6267 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6271 else if (TYPE_P (t
))
6277 /* Helper for move_stmt_r. Given an EH region number for the source
6278 function, map that to the duplicate EH regio number in the dest. */
6281 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6283 eh_region old_r
, new_r
;
6286 old_r
= get_eh_region_from_number (old_nr
);
6287 slot
= pointer_map_contains (p
->eh_map
, old_r
);
6288 new_r
= (eh_region
) *slot
;
6290 return new_r
->index
;
6293 /* Similar, but operate on INTEGER_CSTs. */
6296 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6300 old_nr
= tree_to_shwi (old_t_nr
);
6301 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6303 return build_int_cst (integer_type_node
, new_nr
);
6306 /* Like move_stmt_op, but for gimple statements.
6308 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6309 contained in the current statement in *GSI_P and change the
6310 DECL_CONTEXT of every local variable referenced in the current
6314 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6315 struct walk_stmt_info
*wi
)
6317 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6318 gimple stmt
= gsi_stmt (*gsi_p
);
6319 tree block
= gimple_block (stmt
);
6321 if (block
== p
->orig_block
6322 || (p
->orig_block
== NULL_TREE
6323 && block
!= NULL_TREE
))
6324 gimple_set_block (stmt
, p
->new_block
);
6326 switch (gimple_code (stmt
))
6329 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6331 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6332 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6333 switch (DECL_FUNCTION_CODE (fndecl
))
6335 case BUILT_IN_EH_COPY_VALUES
:
6336 r
= gimple_call_arg (stmt
, 1);
6337 r
= move_stmt_eh_region_tree_nr (r
, p
);
6338 gimple_call_set_arg (stmt
, 1, r
);
6341 case BUILT_IN_EH_POINTER
:
6342 case BUILT_IN_EH_FILTER
:
6343 r
= gimple_call_arg (stmt
, 0);
6344 r
= move_stmt_eh_region_tree_nr (r
, p
);
6345 gimple_call_set_arg (stmt
, 0, r
);
6356 int r
= gimple_resx_region (stmt
);
6357 r
= move_stmt_eh_region_nr (r
, p
);
6358 gimple_resx_set_region (stmt
, r
);
6362 case GIMPLE_EH_DISPATCH
:
6364 int r
= gimple_eh_dispatch_region (stmt
);
6365 r
= move_stmt_eh_region_nr (r
, p
);
6366 gimple_eh_dispatch_set_region (stmt
, r
);
6370 case GIMPLE_OMP_RETURN
:
6371 case GIMPLE_OMP_CONTINUE
:
6374 if (is_gimple_omp (stmt
))
6376 /* Do not remap variables inside OMP directives. Variables
6377 referenced in clauses and directive header belong to the
6378 parent function and should not be moved into the child
6380 bool save_remap_decls_p
= p
->remap_decls_p
;
6381 p
->remap_decls_p
= false;
6382 *handled_ops_p
= true;
6384 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6387 p
->remap_decls_p
= save_remap_decls_p
;
6395 /* Move basic block BB from function CFUN to function DEST_FN. The
6396 block is moved out of the original linked list and placed after
6397 block AFTER in the new list. Also, the block is removed from the
6398 original array of blocks and placed in DEST_FN's array of blocks.
6399 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6400 updated to reflect the moved edges.
6402 The local variables are remapped to new instances, VARS_MAP is used
6403 to record the mapping. */
6406 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6407 basic_block after
, bool update_edge_count_p
,
6408 struct move_stmt_d
*d
)
6410 struct control_flow_graph
*cfg
;
6413 gimple_stmt_iterator si
;
6414 unsigned old_len
, new_len
;
6416 /* Remove BB from dominance structures. */
6417 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6419 /* Move BB from its current loop to the copy in the new function. */
6422 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6424 bb
->loop_father
= new_loop
;
6427 /* Link BB to the new linked list. */
6428 move_block_after (bb
, after
);
6430 /* Update the edge count in the corresponding flowgraphs. */
6431 if (update_edge_count_p
)
6432 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6434 cfun
->cfg
->x_n_edges
--;
6435 dest_cfun
->cfg
->x_n_edges
++;
6438 /* Remove BB from the original basic block array. */
6439 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6440 cfun
->cfg
->x_n_basic_blocks
--;
6442 /* Grow DEST_CFUN's basic block array if needed. */
6443 cfg
= dest_cfun
->cfg
;
6444 cfg
->x_n_basic_blocks
++;
6445 if (bb
->index
>= cfg
->x_last_basic_block
)
6446 cfg
->x_last_basic_block
= bb
->index
+ 1;
6448 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6449 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6451 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6452 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6455 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6457 /* Remap the variables in phi nodes. */
6458 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); )
6460 gimple phi
= gsi_stmt (si
);
6462 tree op
= PHI_RESULT (phi
);
6466 if (virtual_operand_p (op
))
6468 /* Remove the phi nodes for virtual operands (alias analysis will be
6469 run for the new function, anyway). */
6470 remove_phi_node (&si
, true);
6474 SET_PHI_RESULT (phi
,
6475 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6476 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6478 op
= USE_FROM_PTR (use
);
6479 if (TREE_CODE (op
) == SSA_NAME
)
6480 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6483 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6485 location_t locus
= gimple_phi_arg_location (phi
, i
);
6486 tree block
= LOCATION_BLOCK (locus
);
6488 if (locus
== UNKNOWN_LOCATION
)
6490 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6492 if (d
->new_block
== NULL_TREE
)
6493 locus
= LOCATION_LOCUS (locus
);
6495 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6496 gimple_phi_arg_set_location (phi
, i
, locus
);
6503 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6505 gimple stmt
= gsi_stmt (si
);
6506 struct walk_stmt_info wi
;
6508 memset (&wi
, 0, sizeof (wi
));
6510 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6512 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6514 tree label
= gimple_label_label (stmt
);
6515 int uid
= LABEL_DECL_UID (label
);
6517 gcc_assert (uid
> -1);
6519 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6520 if (old_len
<= (unsigned) uid
)
6522 new_len
= 3 * uid
/ 2 + 1;
6523 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6526 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6527 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6529 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6531 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6532 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6535 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6536 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6538 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6539 gimple_remove_stmt_histograms (cfun
, stmt
);
6541 /* We cannot leave any operands allocated from the operand caches of
6542 the current function. */
6543 free_stmt_operands (cfun
, stmt
);
6544 push_cfun (dest_cfun
);
6549 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6550 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6552 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6553 if (d
->orig_block
== NULL_TREE
6554 || block
== d
->orig_block
)
6555 e
->goto_locus
= d
->new_block
?
6556 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6557 LOCATION_LOCUS (e
->goto_locus
);
6561 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6562 the outermost EH region. Use REGION as the incoming base EH region. */
6565 find_outermost_region_in_block (struct function
*src_cfun
,
6566 basic_block bb
, eh_region region
)
6568 gimple_stmt_iterator si
;
6570 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6572 gimple stmt
= gsi_stmt (si
);
6573 eh_region stmt_region
;
6576 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6577 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6581 region
= stmt_region
;
6582 else if (stmt_region
!= region
)
6584 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6585 gcc_assert (region
!= NULL
);
6594 new_label_mapper (tree decl
, void *data
)
6596 htab_t hash
= (htab_t
) data
;
6600 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6602 m
= XNEW (struct tree_map
);
6603 m
->hash
= DECL_UID (decl
);
6604 m
->base
.from
= decl
;
6605 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6606 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6607 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6608 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6610 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6611 gcc_assert (*slot
== NULL
);
6618 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6622 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6627 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6630 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6632 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6635 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6637 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6638 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6640 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6645 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6646 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6649 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6653 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6656 /* Discard it from the old loop array. */
6657 (*get_loops (fn1
))[loop
->num
] = NULL
;
6659 /* Place it in the new loop array, assigning it a new number. */
6660 loop
->num
= number_of_loops (fn2
);
6661 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6663 /* Recurse to children. */
6664 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6665 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6668 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6669 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6670 single basic block in the original CFG and the new basic block is
6671 returned. DEST_CFUN must not have a CFG yet.
6673 Note that the region need not be a pure SESE region. Blocks inside
6674 the region may contain calls to abort/exit. The only restriction
6675 is that ENTRY_BB should be the only entry point and it must
6678 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6679 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6680 to the new function.
6682 All local variables referenced in the region are assumed to be in
6683 the corresponding BLOCK_VARS and unexpanded variable lists
6684 associated with DEST_CFUN. */
6687 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6688 basic_block exit_bb
, tree orig_block
)
6690 vec
<basic_block
> bbs
, dom_bbs
;
6691 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6692 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6693 struct function
*saved_cfun
= cfun
;
6694 int *entry_flag
, *exit_flag
;
6695 unsigned *entry_prob
, *exit_prob
;
6696 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6699 htab_t new_label_map
;
6700 struct pointer_map_t
*vars_map
, *eh_map
;
6701 struct loop
*loop
= entry_bb
->loop_father
;
6702 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6703 struct move_stmt_d d
;
6705 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6707 gcc_assert (entry_bb
!= exit_bb
6709 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6711 /* Collect all the blocks in the region. Manually add ENTRY_BB
6712 because it won't be added by dfs_enumerate_from. */
6714 bbs
.safe_push (entry_bb
);
6715 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6717 /* The blocks that used to be dominated by something in BBS will now be
6718 dominated by the new block. */
6719 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6723 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6724 the predecessor edges to ENTRY_BB and the successor edges to
6725 EXIT_BB so that we can re-attach them to the new basic block that
6726 will replace the region. */
6727 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6728 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6729 entry_flag
= XNEWVEC (int, num_entry_edges
);
6730 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6732 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6734 entry_prob
[i
] = e
->probability
;
6735 entry_flag
[i
] = e
->flags
;
6736 entry_pred
[i
++] = e
->src
;
6742 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6743 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6744 exit_flag
= XNEWVEC (int, num_exit_edges
);
6745 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6747 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6749 exit_prob
[i
] = e
->probability
;
6750 exit_flag
[i
] = e
->flags
;
6751 exit_succ
[i
++] = e
->dest
;
6763 /* Switch context to the child function to initialize DEST_FN's CFG. */
6764 gcc_assert (dest_cfun
->cfg
== NULL
);
6765 push_cfun (dest_cfun
);
6767 init_empty_tree_cfg ();
6769 /* Initialize EH information for the new function. */
6771 new_label_map
= NULL
;
6774 eh_region region
= NULL
;
6776 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6777 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6779 init_eh_for_function ();
6782 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6783 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6784 new_label_mapper
, new_label_map
);
6788 /* Initialize an empty loop tree. */
6789 struct loops
*loops
= ggc_alloc_cleared_loops ();
6790 init_loops_structure (dest_cfun
, loops
, 1);
6791 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6792 set_loops_for_fn (dest_cfun
, loops
);
6794 /* Move the outlined loop tree part. */
6795 num_nodes
= bbs
.length ();
6796 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6798 if (bb
->loop_father
->header
== bb
)
6800 struct loop
*this_loop
= bb
->loop_father
;
6801 struct loop
*outer
= loop_outer (this_loop
);
6803 /* If the SESE region contains some bbs ending with
6804 a noreturn call, those are considered to belong
6805 to the outermost loop in saved_cfun, rather than
6806 the entry_bb's loop_father. */
6810 num_nodes
-= this_loop
->num_nodes
;
6811 flow_loop_tree_node_remove (bb
->loop_father
);
6812 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
6813 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
6816 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
6819 /* Remove loop exits from the outlined region. */
6820 if (loops_for_fn (saved_cfun
)->exits
)
6821 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6823 void **slot
= htab_find_slot_with_hash
6824 (loops_for_fn (saved_cfun
)->exits
, e
,
6825 htab_hash_pointer (e
), NO_INSERT
);
6827 htab_clear_slot (loops_for_fn (saved_cfun
)->exits
, slot
);
6832 /* Adjust the number of blocks in the tree root of the outlined part. */
6833 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
6835 /* Setup a mapping to be used by move_block_to_fn. */
6836 loop
->aux
= current_loops
->tree_root
;
6837 loop0
->aux
= current_loops
->tree_root
;
6841 /* Move blocks from BBS into DEST_CFUN. */
6842 gcc_assert (bbs
.length () >= 2);
6843 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6844 vars_map
= pointer_map_create ();
6846 memset (&d
, 0, sizeof (d
));
6847 d
.orig_block
= orig_block
;
6848 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
6849 d
.from_context
= cfun
->decl
;
6850 d
.to_context
= dest_cfun
->decl
;
6851 d
.vars_map
= vars_map
;
6852 d
.new_label_map
= new_label_map
;
6854 d
.remap_decls_p
= true;
6856 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6858 /* No need to update edge counts on the last block. It has
6859 already been updated earlier when we detached the region from
6860 the original CFG. */
6861 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
6867 /* Loop sizes are no longer correct, fix them up. */
6868 loop
->num_nodes
-= num_nodes
;
6869 for (struct loop
*outer
= loop_outer (loop
);
6870 outer
; outer
= loop_outer (outer
))
6871 outer
->num_nodes
-= num_nodes
;
6872 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
6874 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vect_loops
)
6877 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
6882 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
6884 dest_cfun
->has_simduid_loops
= true;
6886 if (aloop
->force_vect
)
6887 dest_cfun
->has_force_vect_loops
= true;
6891 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6895 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6897 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6898 = BLOCK_SUBBLOCKS (orig_block
);
6899 for (block
= BLOCK_SUBBLOCKS (orig_block
);
6900 block
; block
= BLOCK_CHAIN (block
))
6901 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
6902 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
6905 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
6906 vars_map
, dest_cfun
->decl
);
6909 htab_delete (new_label_map
);
6911 pointer_map_destroy (eh_map
);
6912 pointer_map_destroy (vars_map
);
6914 /* Rewire the entry and exit blocks. The successor to the entry
6915 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6916 the child function. Similarly, the predecessor of DEST_FN's
6917 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6918 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6919 various CFG manipulation function get to the right CFG.
6921 FIXME, this is silly. The CFG ought to become a parameter to
6923 push_cfun (dest_cfun
);
6924 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
6926 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
6929 /* Back in the original function, the SESE region has disappeared,
6930 create a new basic block in its place. */
6931 bb
= create_empty_bb (entry_pred
[0]);
6933 add_bb_to_loop (bb
, loop
);
6934 for (i
= 0; i
< num_entry_edges
; i
++)
6936 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
6937 e
->probability
= entry_prob
[i
];
6940 for (i
= 0; i
< num_exit_edges
; i
++)
6942 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
6943 e
->probability
= exit_prob
[i
];
6946 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
6947 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
6948 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
6966 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6970 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
6972 tree arg
, var
, old_current_fndecl
= current_function_decl
;
6973 struct function
*dsf
;
6974 bool ignore_topmost_bind
= false, any_var
= false;
6977 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
6978 && decl_is_tm_clone (fndecl
));
6979 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
6981 current_function_decl
= fndecl
;
6982 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
6984 arg
= DECL_ARGUMENTS (fndecl
);
6987 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
6988 fprintf (file
, " ");
6989 print_generic_expr (file
, arg
, dump_flags
);
6990 if (flags
& TDF_VERBOSE
)
6991 print_node (file
, "", arg
, 4);
6992 if (DECL_CHAIN (arg
))
6993 fprintf (file
, ", ");
6994 arg
= DECL_CHAIN (arg
);
6996 fprintf (file
, ")\n");
6998 if (flags
& TDF_VERBOSE
)
6999 print_node (file
, "", fndecl
, 2);
7001 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7002 if (dsf
&& (flags
& TDF_EH
))
7003 dump_eh_tree (file
, dsf
);
7005 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7007 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7008 current_function_decl
= old_current_fndecl
;
7012 /* When GIMPLE is lowered, the variables are no longer available in
7013 BIND_EXPRs, so display them separately. */
7014 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7017 ignore_topmost_bind
= true;
7019 fprintf (file
, "{\n");
7020 if (!vec_safe_is_empty (fun
->local_decls
))
7021 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7023 print_generic_decl (file
, var
, flags
);
7024 if (flags
& TDF_VERBOSE
)
7025 print_node (file
, "", var
, 4);
7026 fprintf (file
, "\n");
7030 if (gimple_in_ssa_p (cfun
))
7031 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7033 tree name
= ssa_name (ix
);
7034 if (name
&& !SSA_NAME_VAR (name
))
7036 fprintf (file
, " ");
7037 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7038 fprintf (file
, " ");
7039 print_generic_expr (file
, name
, flags
);
7040 fprintf (file
, ";\n");
7047 if (fun
&& fun
->decl
== fndecl
7049 && basic_block_info_for_function (fun
))
7051 /* If the CFG has been built, emit a CFG-based dump. */
7052 if (!ignore_topmost_bind
)
7053 fprintf (file
, "{\n");
7055 if (any_var
&& n_basic_blocks_for_fn (fun
))
7056 fprintf (file
, "\n");
7058 FOR_EACH_BB_FN (bb
, fun
)
7059 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7061 fprintf (file
, "}\n");
7063 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7065 /* The function is now in GIMPLE form but the CFG has not been
7066 built yet. Emit the single sequence of GIMPLE statements
7067 that make up its body. */
7068 gimple_seq body
= gimple_body (fndecl
);
7070 if (gimple_seq_first_stmt (body
)
7071 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7072 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7073 print_gimple_seq (file
, body
, 0, flags
);
7076 if (!ignore_topmost_bind
)
7077 fprintf (file
, "{\n");
7080 fprintf (file
, "\n");
7082 print_gimple_seq (file
, body
, 2, flags
);
7083 fprintf (file
, "}\n");
7090 /* Make a tree based dump. */
7091 chain
= DECL_SAVED_TREE (fndecl
);
7092 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7094 if (ignore_topmost_bind
)
7096 chain
= BIND_EXPR_BODY (chain
);
7104 if (!ignore_topmost_bind
)
7105 fprintf (file
, "{\n");
7110 fprintf (file
, "\n");
7112 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7113 if (ignore_topmost_bind
)
7114 fprintf (file
, "}\n");
7117 if (flags
& TDF_ENUMERATE_LOCALS
)
7118 dump_enumerated_decls (file
, flags
);
7119 fprintf (file
, "\n\n");
7121 current_function_decl
= old_current_fndecl
;
7124 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7127 debug_function (tree fn
, int flags
)
7129 dump_function_to_file (fn
, stderr
, flags
);
7133 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7136 print_pred_bbs (FILE *file
, basic_block bb
)
7141 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7142 fprintf (file
, "bb_%d ", e
->src
->index
);
7146 /* Print on FILE the indexes for the successors of basic_block BB. */
7149 print_succ_bbs (FILE *file
, basic_block bb
)
7154 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7155 fprintf (file
, "bb_%d ", e
->dest
->index
);
7158 /* Print to FILE the basic block BB following the VERBOSITY level. */
7161 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7163 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7164 memset ((void *) s_indent
, ' ', (size_t) indent
);
7165 s_indent
[indent
] = '\0';
7167 /* Print basic_block's header. */
7170 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7171 print_pred_bbs (file
, bb
);
7172 fprintf (file
, "}, succs = {");
7173 print_succ_bbs (file
, bb
);
7174 fprintf (file
, "})\n");
7177 /* Print basic_block's body. */
7180 fprintf (file
, "%s {\n", s_indent
);
7181 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7182 fprintf (file
, "%s }\n", s_indent
);
7186 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7188 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7189 VERBOSITY level this outputs the contents of the loop, or just its
7193 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7201 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7202 memset ((void *) s_indent
, ' ', (size_t) indent
);
7203 s_indent
[indent
] = '\0';
7205 /* Print loop's header. */
7206 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7208 fprintf (file
, "header = %d", loop
->header
->index
);
7211 fprintf (file
, "deleted)\n");
7215 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7217 fprintf (file
, ", multiple latches");
7218 fprintf (file
, ", niter = ");
7219 print_generic_expr (file
, loop
->nb_iterations
, 0);
7221 if (loop
->any_upper_bound
)
7223 fprintf (file
, ", upper_bound = ");
7224 dump_double_int (file
, loop
->nb_iterations_upper_bound
, true);
7227 if (loop
->any_estimate
)
7229 fprintf (file
, ", estimate = ");
7230 dump_double_int (file
, loop
->nb_iterations_estimate
, true);
7232 fprintf (file
, ")\n");
7234 /* Print loop's body. */
7237 fprintf (file
, "%s{\n", s_indent
);
7239 if (bb
->loop_father
== loop
)
7240 print_loops_bb (file
, bb
, indent
, verbosity
);
7242 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7243 fprintf (file
, "%s}\n", s_indent
);
7247 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7248 spaces. Following VERBOSITY level this outputs the contents of the
7249 loop, or just its structure. */
7252 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7258 print_loop (file
, loop
, indent
, verbosity
);
7259 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7262 /* Follow a CFG edge from the entry point of the program, and on entry
7263 of a loop, pretty print the loop structure on FILE. */
7266 print_loops (FILE *file
, int verbosity
)
7270 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7271 if (bb
&& bb
->loop_father
)
7272 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7278 debug (struct loop
&ref
)
7280 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7284 debug (struct loop
*ptr
)
7289 fprintf (stderr
, "<nil>\n");
7292 /* Dump a loop verbosely. */
7295 debug_verbose (struct loop
&ref
)
7297 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7301 debug_verbose (struct loop
*ptr
)
7306 fprintf (stderr
, "<nil>\n");
7310 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7313 debug_loops (int verbosity
)
7315 print_loops (stderr
, verbosity
);
7318 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7321 debug_loop (struct loop
*loop
, int verbosity
)
7323 print_loop (stderr
, loop
, 0, verbosity
);
7326 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7330 debug_loop_num (unsigned num
, int verbosity
)
7332 debug_loop (get_loop (cfun
, num
), verbosity
);
7335 /* Return true if BB ends with a call, possibly followed by some
7336 instructions that must stay with the call. Return false,
7340 gimple_block_ends_with_call_p (basic_block bb
)
7342 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7343 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7347 /* Return true if BB ends with a conditional branch. Return false,
7351 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7353 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7354 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7358 /* Return true if we need to add fake edge to exit at statement T.
7359 Helper function for gimple_flow_call_edges_add. */
7362 need_fake_edge_p (gimple t
)
7364 tree fndecl
= NULL_TREE
;
7367 /* NORETURN and LONGJMP calls already have an edge to exit.
7368 CONST and PURE calls do not need one.
7369 We don't currently check for CONST and PURE here, although
7370 it would be a good idea, because those attributes are
7371 figured out from the RTL in mark_constant_function, and
7372 the counter incrementation code from -fprofile-arcs
7373 leads to different results from -fbranch-probabilities. */
7374 if (is_gimple_call (t
))
7376 fndecl
= gimple_call_fndecl (t
);
7377 call_flags
= gimple_call_flags (t
);
7380 if (is_gimple_call (t
)
7382 && DECL_BUILT_IN (fndecl
)
7383 && (call_flags
& ECF_NOTHROW
)
7384 && !(call_flags
& ECF_RETURNS_TWICE
)
7385 /* fork() doesn't really return twice, but the effect of
7386 wrapping it in __gcov_fork() which calls __gcov_flush()
7387 and clears the counters before forking has the same
7388 effect as returning twice. Force a fake edge. */
7389 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7390 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7393 if (is_gimple_call (t
))
7399 if (!(call_flags
& ECF_NORETURN
))
7403 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7404 if ((e
->flags
& EDGE_FAKE
) == 0)
7408 if (gimple_code (t
) == GIMPLE_ASM
7409 && (gimple_asm_volatile_p (t
) || gimple_asm_input_p (t
)))
7416 /* Add fake edges to the function exit for any non constant and non
7417 noreturn calls (or noreturn calls with EH/abnormal edges),
7418 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7419 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7422 The goal is to expose cases in which entering a basic block does
7423 not imply that all subsequent instructions must be executed. */
7426 gimple_flow_call_edges_add (sbitmap blocks
)
7429 int blocks_split
= 0;
7430 int last_bb
= last_basic_block
;
7431 bool check_last_block
= false;
7433 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7437 check_last_block
= true;
7439 check_last_block
= bitmap_bit_p (blocks
,
7440 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7442 /* In the last basic block, before epilogue generation, there will be
7443 a fallthru edge to EXIT. Special care is required if the last insn
7444 of the last basic block is a call because make_edge folds duplicate
7445 edges, which would result in the fallthru edge also being marked
7446 fake, which would result in the fallthru edge being removed by
7447 remove_fake_edges, which would result in an invalid CFG.
7449 Moreover, we can't elide the outgoing fake edge, since the block
7450 profiler needs to take this into account in order to solve the minimal
7451 spanning tree in the case that the call doesn't return.
7453 Handle this by adding a dummy instruction in a new last basic block. */
7454 if (check_last_block
)
7456 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7457 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7460 if (!gsi_end_p (gsi
))
7463 if (t
&& need_fake_edge_p (t
))
7467 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7470 gsi_insert_on_edge (e
, gimple_build_nop ());
7471 gsi_commit_edge_inserts ();
7476 /* Now add fake edges to the function exit for any non constant
7477 calls since there is no way that we can determine if they will
7479 for (i
= 0; i
< last_bb
; i
++)
7481 basic_block bb
= BASIC_BLOCK (i
);
7482 gimple_stmt_iterator gsi
;
7483 gimple stmt
, last_stmt
;
7488 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7491 gsi
= gsi_last_nondebug_bb (bb
);
7492 if (!gsi_end_p (gsi
))
7494 last_stmt
= gsi_stmt (gsi
);
7497 stmt
= gsi_stmt (gsi
);
7498 if (need_fake_edge_p (stmt
))
7502 /* The handling above of the final block before the
7503 epilogue should be enough to verify that there is
7504 no edge to the exit block in CFG already.
7505 Calling make_edge in such case would cause us to
7506 mark that edge as fake and remove it later. */
7507 #ifdef ENABLE_CHECKING
7508 if (stmt
== last_stmt
)
7510 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7511 gcc_assert (e
== NULL
);
7515 /* Note that the following may create a new basic block
7516 and renumber the existing basic blocks. */
7517 if (stmt
!= last_stmt
)
7519 e
= split_block (bb
, stmt
);
7523 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7527 while (!gsi_end_p (gsi
));
7532 verify_flow_info ();
7534 return blocks_split
;
7537 /* Removes edge E and all the blocks dominated by it, and updates dominance
7538 information. The IL in E->src needs to be updated separately.
7539 If dominance info is not available, only the edge E is removed.*/
7542 remove_edge_and_dominated_blocks (edge e
)
7544 vec
<basic_block
> bbs_to_remove
= vNULL
;
7545 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7549 bool none_removed
= false;
7551 basic_block bb
, dbb
;
7554 if (!dom_info_available_p (CDI_DOMINATORS
))
7560 /* No updating is needed for edges to exit. */
7561 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7563 if (cfgcleanup_altered_bbs
)
7564 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7569 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7570 that is not dominated by E->dest, then this set is empty. Otherwise,
7571 all the basic blocks dominated by E->dest are removed.
7573 Also, to DF_IDOM we store the immediate dominators of the blocks in
7574 the dominance frontier of E (i.e., of the successors of the
7575 removed blocks, if there are any, and of E->dest otherwise). */
7576 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7581 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7583 none_removed
= true;
7588 df
= BITMAP_ALLOC (NULL
);
7589 df_idom
= BITMAP_ALLOC (NULL
);
7592 bitmap_set_bit (df_idom
,
7593 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7596 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7597 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7599 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7601 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7602 bitmap_set_bit (df
, f
->dest
->index
);
7605 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7606 bitmap_clear_bit (df
, bb
->index
);
7608 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7610 bb
= BASIC_BLOCK (i
);
7611 bitmap_set_bit (df_idom
,
7612 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7616 if (cfgcleanup_altered_bbs
)
7618 /* Record the set of the altered basic blocks. */
7619 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7620 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7623 /* Remove E and the cancelled blocks. */
7628 /* Walk backwards so as to get a chance to substitute all
7629 released DEFs into debug stmts. See
7630 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7632 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7633 delete_basic_block (bbs_to_remove
[i
]);
7636 /* Update the dominance information. The immediate dominator may change only
7637 for blocks whose immediate dominator belongs to DF_IDOM:
7639 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7640 removal. Let Z the arbitrary block such that idom(Z) = Y and
7641 Z dominates X after the removal. Before removal, there exists a path P
7642 from Y to X that avoids Z. Let F be the last edge on P that is
7643 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7644 dominates W, and because of P, Z does not dominate W), and W belongs to
7645 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7646 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7648 bb
= BASIC_BLOCK (i
);
7649 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7651 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7652 bbs_to_fix_dom
.safe_push (dbb
);
7655 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7658 BITMAP_FREE (df_idom
);
7659 bbs_to_remove
.release ();
7660 bbs_to_fix_dom
.release ();
7663 /* Purge dead EH edges from basic block BB. */
7666 gimple_purge_dead_eh_edges (basic_block bb
)
7668 bool changed
= false;
7671 gimple stmt
= last_stmt (bb
);
7673 if (stmt
&& stmt_can_throw_internal (stmt
))
7676 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7678 if (e
->flags
& EDGE_EH
)
7680 remove_edge_and_dominated_blocks (e
);
7690 /* Purge dead EH edges from basic block listed in BLOCKS. */
7693 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7695 bool changed
= false;
7699 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7701 basic_block bb
= BASIC_BLOCK (i
);
7703 /* Earlier gimple_purge_dead_eh_edges could have removed
7704 this basic block already. */
7705 gcc_assert (bb
|| changed
);
7707 changed
|= gimple_purge_dead_eh_edges (bb
);
7713 /* Purge dead abnormal call edges from basic block BB. */
7716 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7718 bool changed
= false;
7721 gimple stmt
= last_stmt (bb
);
7723 if (!cfun
->has_nonlocal_label
7724 && !cfun
->calls_setjmp
)
7727 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7730 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7732 if (e
->flags
& EDGE_ABNORMAL
)
7734 if (e
->flags
& EDGE_FALLTHRU
)
7735 e
->flags
&= ~EDGE_ABNORMAL
;
7737 remove_edge_and_dominated_blocks (e
);
7747 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7750 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7752 bool changed
= false;
7756 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7758 basic_block bb
= BASIC_BLOCK (i
);
7760 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7761 this basic block already. */
7762 gcc_assert (bb
|| changed
);
7764 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7770 /* This function is called whenever a new edge is created or
7774 gimple_execute_on_growing_pred (edge e
)
7776 basic_block bb
= e
->dest
;
7778 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7779 reserve_phi_args_for_new_edge (bb
);
7782 /* This function is called immediately before edge E is removed from
7783 the edge vector E->dest->preds. */
7786 gimple_execute_on_shrinking_pred (edge e
)
7788 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7789 remove_phi_args (e
);
7792 /*---------------------------------------------------------------------------
7793 Helper functions for Loop versioning
7794 ---------------------------------------------------------------------------*/
7796 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7797 of 'first'. Both of them are dominated by 'new_head' basic block. When
7798 'new_head' was created by 'second's incoming edge it received phi arguments
7799 on the edge by split_edge(). Later, additional edge 'e' was created to
7800 connect 'new_head' and 'first'. Now this routine adds phi args on this
7801 additional edge 'e' that new_head to second edge received as part of edge
7805 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
7806 basic_block new_head
, edge e
)
7809 gimple_stmt_iterator psi1
, psi2
;
7811 edge e2
= find_edge (new_head
, second
);
7813 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7814 edge, we should always have an edge from NEW_HEAD to SECOND. */
7815 gcc_assert (e2
!= NULL
);
7817 /* Browse all 'second' basic block phi nodes and add phi args to
7818 edge 'e' for 'first' head. PHI args are always in correct order. */
7820 for (psi2
= gsi_start_phis (second
),
7821 psi1
= gsi_start_phis (first
);
7822 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
7823 gsi_next (&psi2
), gsi_next (&psi1
))
7825 phi1
= gsi_stmt (psi1
);
7826 phi2
= gsi_stmt (psi2
);
7827 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
7828 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
7833 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7834 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7835 the destination of the ELSE part. */
7838 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
7839 basic_block second_head ATTRIBUTE_UNUSED
,
7840 basic_block cond_bb
, void *cond_e
)
7842 gimple_stmt_iterator gsi
;
7843 gimple new_cond_expr
;
7844 tree cond_expr
= (tree
) cond_e
;
7847 /* Build new conditional expr */
7848 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
7849 NULL_TREE
, NULL_TREE
);
7851 /* Add new cond in cond_bb. */
7852 gsi
= gsi_last_bb (cond_bb
);
7853 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
7855 /* Adjust edges appropriately to connect new head with first head
7856 as well as second head. */
7857 e0
= single_succ_edge (cond_bb
);
7858 e0
->flags
&= ~EDGE_FALLTHRU
;
7859 e0
->flags
|= EDGE_FALSE_VALUE
;
7863 /* Do book-keeping of basic block BB for the profile consistency checker.
7864 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7865 then do post-pass accounting. Store the counting in RECORD. */
7867 gimple_account_profile_record (basic_block bb
, int after_pass
,
7868 struct profile_record
*record
)
7870 gimple_stmt_iterator i
;
7871 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
7873 record
->size
[after_pass
]
7874 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
7875 if (profile_status
== PROFILE_READ
)
7876 record
->time
[after_pass
]
7877 += estimate_num_insns (gsi_stmt (i
),
7878 &eni_time_weights
) * bb
->count
;
7879 else if (profile_status
== PROFILE_GUESSED
)
7880 record
->time
[after_pass
]
7881 += estimate_num_insns (gsi_stmt (i
),
7882 &eni_time_weights
) * bb
->frequency
;
7886 struct cfg_hooks gimple_cfg_hooks
= {
7888 gimple_verify_flow_info
,
7889 gimple_dump_bb
, /* dump_bb */
7890 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
7891 create_bb
, /* create_basic_block */
7892 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
7893 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
7894 gimple_can_remove_branch_p
, /* can_remove_branch_p */
7895 remove_bb
, /* delete_basic_block */
7896 gimple_split_block
, /* split_block */
7897 gimple_move_block_after
, /* move_block_after */
7898 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
7899 gimple_merge_blocks
, /* merge_blocks */
7900 gimple_predict_edge
, /* predict_edge */
7901 gimple_predicted_by_p
, /* predicted_by_p */
7902 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
7903 gimple_duplicate_bb
, /* duplicate_block */
7904 gimple_split_edge
, /* split_edge */
7905 gimple_make_forwarder_block
, /* make_forward_block */
7906 NULL
, /* tidy_fallthru_edge */
7907 NULL
, /* force_nonfallthru */
7908 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
7909 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
7910 gimple_flow_call_edges_add
, /* flow_call_edges_add */
7911 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
7912 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
7913 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
7914 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
7915 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
7916 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
7917 flush_pending_stmts
, /* flush_pending_stmts */
7918 gimple_empty_block_p
, /* block_empty_p */
7919 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
7920 gimple_account_profile_record
,
7924 /* Split all critical edges. */
7927 split_critical_edges (void)
7933 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7934 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7935 mappings around the calls to split_edge. */
7936 start_recording_case_labels ();
7939 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7941 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
7943 /* PRE inserts statements to edges and expects that
7944 since split_critical_edges was done beforehand, committing edge
7945 insertions will not split more edges. In addition to critical
7946 edges we must split edges that have multiple successors and
7947 end by control flow statements, such as RESX.
7948 Go ahead and split them too. This matches the logic in
7949 gimple_find_edge_insert_loc. */
7950 else if ((!single_pred_p (e
->dest
)
7951 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
7952 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7953 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
7954 && !(e
->flags
& EDGE_ABNORMAL
))
7956 gimple_stmt_iterator gsi
;
7958 gsi
= gsi_last_bb (e
->src
);
7959 if (!gsi_end_p (gsi
)
7960 && stmt_ends_bb_p (gsi_stmt (gsi
))
7961 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
7962 && !gimple_call_builtin_p (gsi_stmt (gsi
),
7968 end_recording_case_labels ();
7974 const pass_data pass_data_split_crit_edges
=
7976 GIMPLE_PASS
, /* type */
7977 "crited", /* name */
7978 OPTGROUP_NONE
, /* optinfo_flags */
7979 false, /* has_gate */
7980 true, /* has_execute */
7981 TV_TREE_SPLIT_EDGES
, /* tv_id */
7982 PROP_cfg
, /* properties_required */
7983 PROP_no_crit_edges
, /* properties_provided */
7984 0, /* properties_destroyed */
7985 0, /* todo_flags_start */
7986 TODO_verify_flow
, /* todo_flags_finish */
7989 class pass_split_crit_edges
: public gimple_opt_pass
7992 pass_split_crit_edges (gcc::context
*ctxt
)
7993 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
7996 /* opt_pass methods: */
7997 unsigned int execute () { return split_critical_edges (); }
7999 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8000 }; // class pass_split_crit_edges
8005 make_pass_split_crit_edges (gcc::context
*ctxt
)
8007 return new pass_split_crit_edges (ctxt
);
8011 /* Build a ternary operation and gimplify it. Emit code before GSI.
8012 Return the gimple_val holding the result. */
8015 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8016 tree type
, tree a
, tree b
, tree c
)
8019 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8021 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8024 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8028 /* Build a binary operation and gimplify it. Emit code before GSI.
8029 Return the gimple_val holding the result. */
8032 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8033 tree type
, tree a
, tree b
)
8037 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8040 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8044 /* Build a unary operation and gimplify it. Emit code before GSI.
8045 Return the gimple_val holding the result. */
8048 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8053 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8056 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8062 /* Emit return warnings. */
8065 execute_warn_function_return (void)
8067 source_location location
;
8072 if (!targetm
.warn_func_return (cfun
->decl
))
8075 /* If we have a path to EXIT, then we do return. */
8076 if (TREE_THIS_VOLATILE (cfun
->decl
)
8077 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0)
8079 location
= UNKNOWN_LOCATION
;
8080 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8082 last
= last_stmt (e
->src
);
8083 if ((gimple_code (last
) == GIMPLE_RETURN
8084 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8085 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8088 if (location
== UNKNOWN_LOCATION
)
8089 location
= cfun
->function_end_locus
;
8090 warning_at (location
, 0, "%<noreturn%> function does return");
8093 /* If we see "return;" in some basic block, then we do reach the end
8094 without returning a value. */
8095 else if (warn_return_type
8096 && !TREE_NO_WARNING (cfun
->decl
)
8097 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0
8098 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
8100 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8102 gimple last
= last_stmt (e
->src
);
8103 if (gimple_code (last
) == GIMPLE_RETURN
8104 && gimple_return_retval (last
) == NULL
8105 && !gimple_no_warning_p (last
))
8107 location
= gimple_location (last
);
8108 if (location
== UNKNOWN_LOCATION
)
8109 location
= cfun
->function_end_locus
;
8110 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8111 TREE_NO_WARNING (cfun
->decl
) = 1;
8120 /* Given a basic block B which ends with a conditional and has
8121 precisely two successors, determine which of the edges is taken if
8122 the conditional is true and which is taken if the conditional is
8123 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8126 extract_true_false_edges_from_block (basic_block b
,
8130 edge e
= EDGE_SUCC (b
, 0);
8132 if (e
->flags
& EDGE_TRUE_VALUE
)
8135 *false_edge
= EDGE_SUCC (b
, 1);
8140 *true_edge
= EDGE_SUCC (b
, 1);
8146 const pass_data pass_data_warn_function_return
=
8148 GIMPLE_PASS
, /* type */
8149 "*warn_function_return", /* name */
8150 OPTGROUP_NONE
, /* optinfo_flags */
8151 false, /* has_gate */
8152 true, /* has_execute */
8153 TV_NONE
, /* tv_id */
8154 PROP_cfg
, /* properties_required */
8155 0, /* properties_provided */
8156 0, /* properties_destroyed */
8157 0, /* todo_flags_start */
8158 0, /* todo_flags_finish */
8161 class pass_warn_function_return
: public gimple_opt_pass
8164 pass_warn_function_return (gcc::context
*ctxt
)
8165 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8168 /* opt_pass methods: */
8169 unsigned int execute () { return execute_warn_function_return (); }
8171 }; // class pass_warn_function_return
8176 make_pass_warn_function_return (gcc::context
*ctxt
)
8178 return new pass_warn_function_return (ctxt
);
8181 /* Walk a gimplified function and warn for functions whose return value is
8182 ignored and attribute((warn_unused_result)) is set. This is done before
8183 inlining, so we don't have to worry about that. */
8186 do_warn_unused_result (gimple_seq seq
)
8189 gimple_stmt_iterator i
;
8191 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8193 gimple g
= gsi_stmt (i
);
8195 switch (gimple_code (g
))
8198 do_warn_unused_result (gimple_bind_body (g
));
8201 do_warn_unused_result (gimple_try_eval (g
));
8202 do_warn_unused_result (gimple_try_cleanup (g
));
8205 do_warn_unused_result (gimple_catch_handler (g
));
8207 case GIMPLE_EH_FILTER
:
8208 do_warn_unused_result (gimple_eh_filter_failure (g
));
8212 if (gimple_call_lhs (g
))
8214 if (gimple_call_internal_p (g
))
8217 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8218 LHS. All calls whose value is ignored should be
8219 represented like this. Look for the attribute. */
8220 fdecl
= gimple_call_fndecl (g
);
8221 ftype
= gimple_call_fntype (g
);
8223 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8225 location_t loc
= gimple_location (g
);
8228 warning_at (loc
, OPT_Wunused_result
,
8229 "ignoring return value of %qD, "
8230 "declared with attribute warn_unused_result",
8233 warning_at (loc
, OPT_Wunused_result
,
8234 "ignoring return value of function "
8235 "declared with attribute warn_unused_result");
8240 /* Not a container, not a call, or a call whose value is used. */
8247 run_warn_unused_result (void)
8249 do_warn_unused_result (gimple_body (current_function_decl
));
8254 gate_warn_unused_result (void)
8256 return flag_warn_unused_result
;
8261 const pass_data pass_data_warn_unused_result
=
8263 GIMPLE_PASS
, /* type */
8264 "*warn_unused_result", /* name */
8265 OPTGROUP_NONE
, /* optinfo_flags */
8266 true, /* has_gate */
8267 true, /* has_execute */
8268 TV_NONE
, /* tv_id */
8269 PROP_gimple_any
, /* properties_required */
8270 0, /* properties_provided */
8271 0, /* properties_destroyed */
8272 0, /* todo_flags_start */
8273 0, /* todo_flags_finish */
8276 class pass_warn_unused_result
: public gimple_opt_pass
8279 pass_warn_unused_result (gcc::context
*ctxt
)
8280 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8283 /* opt_pass methods: */
8284 bool gate () { return gate_warn_unused_result (); }
8285 unsigned int execute () { return run_warn_unused_result (); }
8287 }; // class pass_warn_unused_result
8292 make_pass_warn_unused_result (gcc::context
*ctxt
)
8294 return new pass_warn_unused_result (ctxt
);
8297 /* IPA passes, compilation of earlier functions or inlining
8298 might have changed some properties, such as marked functions nothrow,
8299 pure, const or noreturn.
8300 Remove redundant edges and basic blocks, and create new ones if necessary.
8302 This pass can't be executed as stand alone pass from pass manager, because
8303 in between inlining and this fixup the verify_flow_info would fail. */
8306 execute_fixup_cfg (void)
8309 gimple_stmt_iterator gsi
;
8310 int todo
= gimple_in_ssa_p (cfun
) ? TODO_verify_ssa
: 0;
8311 gcov_type count_scale
;
8316 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl
)->count
,
8317 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8319 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8320 cgraph_get_node (current_function_decl
)->count
;
8321 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8322 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8325 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8326 e
->count
= apply_scale (e
->count
, count_scale
);
8330 bb
->count
= apply_scale (bb
->count
, count_scale
);
8331 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8333 gimple stmt
= gsi_stmt (gsi
);
8334 tree decl
= is_gimple_call (stmt
)
8335 ? gimple_call_fndecl (stmt
)
8339 int flags
= gimple_call_flags (stmt
);
8340 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8342 if (gimple_purge_dead_abnormal_call_edges (bb
))
8343 todo
|= TODO_cleanup_cfg
;
8345 if (gimple_in_ssa_p (cfun
))
8347 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8352 if (flags
& ECF_NORETURN
8353 && fixup_noreturn_call (stmt
))
8354 todo
|= TODO_cleanup_cfg
;
8357 if (maybe_clean_eh_stmt (stmt
)
8358 && gimple_purge_dead_eh_edges (bb
))
8359 todo
|= TODO_cleanup_cfg
;
8362 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8363 e
->count
= apply_scale (e
->count
, count_scale
);
8365 /* If we have a basic block with no successors that does not
8366 end with a control statement or a noreturn call end it with
8367 a call to __builtin_unreachable. This situation can occur
8368 when inlining a noreturn call that does in fact return. */
8369 if (EDGE_COUNT (bb
->succs
) == 0)
8371 gimple stmt
= last_stmt (bb
);
8373 || (!is_ctrl_stmt (stmt
)
8374 && (!is_gimple_call (stmt
)
8375 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8377 stmt
= gimple_build_call
8378 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8379 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8380 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8384 if (count_scale
!= REG_BR_PROB_BASE
)
8385 compute_function_frequency ();
8387 /* We just processed all calls. */
8388 if (cfun
->gimple_df
)
8389 vec_free (MODIFIED_NORETURN_CALLS (cfun
));
8391 /* Dump a textual representation of the flowgraph. */
8393 gimple_dump_cfg (dump_file
, dump_flags
);
8396 && (todo
& TODO_cleanup_cfg
))
8397 loops_state_set (LOOPS_NEED_FIXUP
);
8404 const pass_data pass_data_fixup_cfg
=
8406 GIMPLE_PASS
, /* type */
8407 "*free_cfg_annotations", /* name */
8408 OPTGROUP_NONE
, /* optinfo_flags */
8409 false, /* has_gate */
8410 true, /* has_execute */
8411 TV_NONE
, /* tv_id */
8412 PROP_cfg
, /* properties_required */
8413 0, /* properties_provided */
8414 0, /* properties_destroyed */
8415 0, /* todo_flags_start */
8416 0, /* todo_flags_finish */
8419 class pass_fixup_cfg
: public gimple_opt_pass
8422 pass_fixup_cfg (gcc::context
*ctxt
)
8423 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8426 /* opt_pass methods: */
8427 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8428 unsigned int execute () { return execute_fixup_cfg (); }
8430 }; // class pass_fixup_cfg
8435 make_pass_fixup_cfg (gcc::context
*ctxt
)
8437 return new pass_fixup_cfg (ctxt
);
8440 /* Garbage collection support for edge_def. */
8442 extern void gt_ggc_mx (tree
&);
8443 extern void gt_ggc_mx (gimple
&);
8444 extern void gt_ggc_mx (rtx
&);
8445 extern void gt_ggc_mx (basic_block
&);
8448 gt_ggc_mx (edge_def
*e
)
8450 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8452 gt_ggc_mx (e
->dest
);
8453 if (current_ir_type () == IR_GIMPLE
)
8454 gt_ggc_mx (e
->insns
.g
);
8456 gt_ggc_mx (e
->insns
.r
);
8460 /* PCH support for edge_def. */
8462 extern void gt_pch_nx (tree
&);
8463 extern void gt_pch_nx (gimple
&);
8464 extern void gt_pch_nx (rtx
&);
8465 extern void gt_pch_nx (basic_block
&);
8468 gt_pch_nx (edge_def
*e
)
8470 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8472 gt_pch_nx (e
->dest
);
8473 if (current_ir_type () == IR_GIMPLE
)
8474 gt_pch_nx (e
->insns
.g
);
8476 gt_pch_nx (e
->insns
.r
);
8481 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8483 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8484 op (&(e
->src
), cookie
);
8485 op (&(e
->dest
), cookie
);
8486 if (current_ir_type () == IR_GIMPLE
)
8487 op (&(e
->insns
.g
), cookie
);
8489 op (&(e
->insns
.r
), cookie
);
8490 op (&(block
), cookie
);