1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
68 #include "tree-ssa-live.h"
70 #include "tree-cfgcleanup.h"
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
75 /* Local declarations. */
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity
= 20;
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
93 static struct pointer_map_t
*edge_to_cases
;
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
99 static bitmap touched_switch_bbs
;
101 /* CFG statistics. */
104 long num_merged_labels
;
107 static struct cfg_stats_d cfg_stats
;
109 /* Nonzero if we found a computed goto while building basic blocks. */
110 static bool found_computed_goto
;
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
119 /* Hashtable helpers. */
121 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
123 typedef locus_discrim_map value_type
;
124 typedef locus_discrim_map compare_type
;
125 static inline hashval_t
hash (const value_type
*);
126 static inline bool equal (const value_type
*, const compare_type
*);
129 /* Trivial hash function for a location_t. ITEM is a pointer to
130 a hash table entry that maps a location_t to a discriminator. */
133 locus_discrim_hasher::hash (const value_type
*item
)
135 return LOCATION_LINE (item
->locus
);
138 /* Equality function for the locus-to-discriminator map. A and B
139 point to the two hash table entries to compare. */
142 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
144 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
147 static hash_table
<locus_discrim_hasher
> discriminator_per_locus
;
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq
);
151 static void factor_computed_gotos (void);
154 static void make_edges (void);
155 static void assign_discriminators (void);
156 static void make_cond_expr_edges (basic_block
);
157 static void make_gimple_switch_edges (basic_block
);
158 static void make_goto_expr_edges (basic_block
);
159 static void make_gimple_asm_edges (basic_block
);
160 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
161 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
162 static unsigned int split_critical_edges (void);
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple
, gimple
);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge
);
168 static gimple
first_non_label_stmt (basic_block
);
169 static bool verify_gimple_transaction (gimple
);
171 /* Flowgraph optimization and cleanup. */
172 static void gimple_merge_blocks (basic_block
, basic_block
);
173 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
174 static void remove_bb (basic_block
);
175 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
176 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
177 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
178 static tree
find_case_label_for_value (gimple
, tree
);
181 init_empty_tree_cfg_for_function (struct function
*fn
)
183 /* Initialize the basic block array. */
185 profile_status_for_function (fn
) = PROFILE_ABSENT
;
186 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
187 last_basic_block_for_function (fn
) = NUM_FIXED_BLOCKS
;
188 vec_alloc (basic_block_info_for_function (fn
), initial_cfg_capacity
);
189 vec_safe_grow_cleared (basic_block_info_for_function (fn
),
190 initial_cfg_capacity
);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_alloc (label_to_block_map_for_function (fn
), initial_cfg_capacity
);
194 vec_safe_grow_cleared (label_to_block_map_for_function (fn
),
195 initial_cfg_capacity
);
197 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, ENTRY_BLOCK
,
198 ENTRY_BLOCK_PTR_FOR_FN (fn
));
199 SET_BASIC_BLOCK_FOR_FUNCTION (fn
, EXIT_BLOCK
,
200 EXIT_BLOCK_PTR_FOR_FN (fn
));
202 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
203 = EXIT_BLOCK_PTR_FOR_FN (fn
);
204 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
205 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
209 init_empty_tree_cfg (void)
211 init_empty_tree_cfg_for_function (cfun
);
214 /*---------------------------------------------------------------------------
216 ---------------------------------------------------------------------------*/
218 /* Entry point to the CFG builder for trees. SEQ is the sequence of
219 statements to be added to the flowgraph. */
222 build_gimple_cfg (gimple_seq seq
)
224 /* Register specific gimple functions. */
225 gimple_register_cfg_hooks ();
227 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
229 init_empty_tree_cfg ();
231 found_computed_goto
= 0;
234 /* Computed gotos are hell to deal with, especially if there are
235 lots of them with a large number of destinations. So we factor
236 them to a common computed goto location before we build the
237 edge list. After we convert back to normal form, we will un-factor
238 the computed gotos since factoring introduces an unwanted jump. */
239 if (found_computed_goto
)
240 factor_computed_gotos ();
242 /* Make sure there is always at least one block, even if it's empty. */
243 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
244 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
246 /* Adjust the size of the array. */
247 if (basic_block_info
->length () < (size_t) n_basic_blocks_for_fn (cfun
))
248 vec_safe_grow_cleared (basic_block_info
, n_basic_blocks_for_fn (cfun
));
250 /* To speed up statement iterator walks, we first purge dead labels. */
251 cleanup_dead_labels ();
253 /* Group case nodes to reduce the number of edges.
254 We do this after cleaning up dead labels because otherwise we miss
255 a lot of obvious case merging opportunities. */
256 group_case_labels ();
258 /* Create the edges of the flowgraph. */
259 discriminator_per_locus
.create (13);
261 assign_discriminators ();
262 cleanup_dead_labels ();
263 discriminator_per_locus
.dispose ();
267 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
268 it and set loop->safelen to INT_MAX. We assume that the annotation
269 comes immediately before the condition. */
272 replace_loop_annotate ()
276 gimple_stmt_iterator gsi
;
279 FOR_EACH_LOOP (loop
, 0)
281 gsi
= gsi_last_bb (loop
->header
);
282 stmt
= gsi_stmt (gsi
);
283 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
285 gsi_prev_nondebug (&gsi
);
288 stmt
= gsi_stmt (gsi
);
289 if (gimple_code (stmt
) != GIMPLE_CALL
)
291 if (!gimple_call_internal_p (stmt
)
292 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
294 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
295 != annot_expr_ivdep_kind
)
297 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
298 gimple_call_arg (stmt
, 0));
299 gsi_replace (&gsi
, stmt
, true);
300 loop
->safelen
= INT_MAX
;
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
307 gsi
= gsi_last_bb (bb
);
308 stmt
= gsi_stmt (gsi
);
309 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
310 gsi_prev_nondebug (&gsi
);
313 stmt
= gsi_stmt (gsi
);
314 if (gimple_code (stmt
) != GIMPLE_CALL
)
316 if (!gimple_call_internal_p (stmt
)
317 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
319 if ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1))
320 != annot_expr_ivdep_kind
)
322 warning_at (gimple_location (stmt
), 0, "ignoring %<GCC ivdep%> "
324 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
325 gimple_call_arg (stmt
, 0));
326 gsi_replace (&gsi
, stmt
, true);
332 execute_build_cfg (void)
334 gimple_seq body
= gimple_body (current_function_decl
);
336 build_gimple_cfg (body
);
337 gimple_set_body (current_function_decl
, NULL
);
338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
340 fprintf (dump_file
, "Scope blocks:\n");
341 dump_scope_blocks (dump_file
, dump_flags
);
344 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
345 replace_loop_annotate ();
351 const pass_data pass_data_build_cfg
=
353 GIMPLE_PASS
, /* type */
355 OPTGROUP_NONE
, /* optinfo_flags */
356 false, /* has_gate */
357 true, /* has_execute */
358 TV_TREE_CFG
, /* tv_id */
359 PROP_gimple_leh
, /* properties_required */
360 ( PROP_cfg
| PROP_loops
), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 TODO_verify_stmts
, /* todo_flags_finish */
366 class pass_build_cfg
: public gimple_opt_pass
369 pass_build_cfg (gcc::context
*ctxt
)
370 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
373 /* opt_pass methods: */
374 unsigned int execute () { return execute_build_cfg (); }
376 }; // class pass_build_cfg
381 make_pass_build_cfg (gcc::context
*ctxt
)
383 return new pass_build_cfg (ctxt
);
387 /* Return true if T is a computed goto. */
390 computed_goto_p (gimple t
)
392 return (gimple_code (t
) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
406 __builtin_unreachable ();
410 assert_unreachable_fallthru_edge_p (edge e
)
412 basic_block pred_bb
= e
->src
;
413 gimple last
= last_stmt (pred_bb
);
414 if (last
&& gimple_code (last
) == GIMPLE_COND
)
416 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
417 if (other_bb
== e
->dest
)
418 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
419 if (EDGE_COUNT (other_bb
->succs
) == 0)
421 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
426 stmt
= gsi_stmt (gsi
);
427 if (is_gimple_debug (stmt
))
429 gsi_next_nondebug (&gsi
);
432 stmt
= gsi_stmt (gsi
);
434 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
441 /* Search the CFG for any computed gotos. If found, factor them to a
442 common computed goto site. Also record the location of that site so
443 that we can un-factor the gotos after we have converted back to
447 factor_computed_gotos (void)
450 tree factored_label_decl
= NULL
;
452 gimple factored_computed_goto_label
= NULL
;
453 gimple factored_computed_goto
= NULL
;
455 /* We know there are one or more computed gotos in this function.
456 Examine the last statement in each basic block to see if the block
457 ends with a computed goto. */
461 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
467 last
= gsi_stmt (gsi
);
469 /* Ignore the computed goto we create when we factor the original
471 if (last
== factored_computed_goto
)
474 /* If the last statement is a computed goto, factor it. */
475 if (computed_goto_p (last
))
479 /* The first time we find a computed goto we need to create
480 the factored goto block and the variable each original
481 computed goto will use for their goto destination. */
482 if (!factored_computed_goto
)
484 basic_block new_bb
= create_empty_bb (bb
);
485 gimple_stmt_iterator new_gsi
= gsi_start_bb (new_bb
);
487 /* Create the destination of the factored goto. Each original
488 computed goto will put its desired destination into this
489 variable and jump to the label we create immediately
491 var
= create_tmp_var (ptr_type_node
, "gotovar");
493 /* Build a label for the new block which will contain the
494 factored computed goto. */
495 factored_label_decl
= create_artificial_label (UNKNOWN_LOCATION
);
496 factored_computed_goto_label
497 = gimple_build_label (factored_label_decl
);
498 gsi_insert_after (&new_gsi
, factored_computed_goto_label
,
501 /* Build our new computed goto. */
502 factored_computed_goto
= gimple_build_goto (var
);
503 gsi_insert_after (&new_gsi
, factored_computed_goto
, GSI_NEW_STMT
);
506 /* Copy the original computed goto's destination into VAR. */
507 assignment
= gimple_build_assign (var
, gimple_goto_dest (last
));
508 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
510 /* And re-vector the computed goto to the new destination. */
511 gimple_goto_set_dest (last
, factored_label_decl
);
517 /* Build a flowgraph for the sequence of stmts SEQ. */
520 make_blocks (gimple_seq seq
)
522 gimple_stmt_iterator i
= gsi_start (seq
);
524 bool start_new_block
= true;
525 bool first_stmt_of_seq
= true;
526 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
528 while (!gsi_end_p (i
))
535 /* If the statement starts a new basic block or if we have determined
536 in a previous pass that we need to create a new block for STMT, do
538 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
540 if (!first_stmt_of_seq
)
541 gsi_split_seq_before (&i
, &seq
);
542 bb
= create_basic_block (seq
, NULL
, bb
);
543 start_new_block
= false;
546 /* Now add STMT to BB and create the subgraphs for special statement
548 gimple_set_bb (stmt
, bb
);
550 if (computed_goto_p (stmt
))
551 found_computed_goto
= true;
553 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
555 if (stmt_ends_bb_p (stmt
))
557 /* If the stmt can make abnormal goto use a new temporary
558 for the assignment to the LHS. This makes sure the old value
559 of the LHS is available on the abnormal edge. Otherwise
560 we will end up with overlapping life-ranges for abnormal
562 if (gimple_has_lhs (stmt
)
563 && stmt_can_make_abnormal_goto (stmt
)
564 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
566 tree lhs
= gimple_get_lhs (stmt
);
567 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
568 gimple s
= gimple_build_assign (lhs
, tmp
);
569 gimple_set_location (s
, gimple_location (stmt
));
570 gimple_set_block (s
, gimple_block (stmt
));
571 gimple_set_lhs (stmt
, tmp
);
572 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
573 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
574 DECL_GIMPLE_REG_P (tmp
) = 1;
575 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
577 start_new_block
= true;
581 first_stmt_of_seq
= false;
586 /* Create and return a new empty basic block after bb AFTER. */
589 create_bb (void *h
, void *e
, basic_block after
)
595 /* Create and initialize a new basic block. Since alloc_block uses
596 GC allocation that clears memory to allocate a basic block, we do
597 not have to clear the newly allocated basic block here. */
600 bb
->index
= last_basic_block
;
602 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
604 /* Add the new block to the linked list of blocks. */
605 link_block (bb
, after
);
607 /* Grow the basic block array if needed. */
608 if ((size_t) last_basic_block
== basic_block_info
->length ())
610 size_t new_size
= last_basic_block
+ (last_basic_block
+ 3) / 4;
611 vec_safe_grow_cleared (basic_block_info
, new_size
);
614 /* Add the newly created block to the array. */
615 SET_BASIC_BLOCK (last_basic_block
, bb
);
617 n_basic_blocks_for_fn (cfun
)++;
624 /*---------------------------------------------------------------------------
626 ---------------------------------------------------------------------------*/
628 /* Fold COND_EXPR_COND of each COND_EXPR. */
631 fold_cond_expr_cond (void)
637 gimple stmt
= last_stmt (bb
);
639 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
641 location_t loc
= gimple_location (stmt
);
645 fold_defer_overflow_warnings ();
646 cond
= fold_binary_loc (loc
, gimple_cond_code (stmt
), boolean_type_node
,
647 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
650 zerop
= integer_zerop (cond
);
651 onep
= integer_onep (cond
);
654 zerop
= onep
= false;
656 fold_undefer_overflow_warnings (zerop
|| onep
,
658 WARN_STRICT_OVERFLOW_CONDITIONAL
);
660 gimple_cond_make_false (stmt
);
662 gimple_cond_make_true (stmt
);
667 /* Join all the blocks in the flowgraph. */
673 struct omp_region
*cur_region
= NULL
;
675 /* Create an edge from entry to the first block with executable
677 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), BASIC_BLOCK (NUM_FIXED_BLOCKS
),
680 /* Traverse the basic block array placing edges. */
683 gimple last
= last_stmt (bb
);
688 enum gimple_code code
= gimple_code (last
);
692 make_goto_expr_edges (bb
);
696 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
700 make_cond_expr_edges (bb
);
704 make_gimple_switch_edges (bb
);
708 make_eh_edges (last
);
711 case GIMPLE_EH_DISPATCH
:
712 fallthru
= make_eh_dispatch_edges (last
);
716 /* If this function receives a nonlocal goto, then we need to
717 make edges from this call site to all the nonlocal goto
719 if (stmt_can_make_abnormal_goto (last
))
720 make_abnormal_goto_edges (bb
, true);
722 /* If this statement has reachable exception handlers, then
723 create abnormal edges to them. */
724 make_eh_edges (last
);
726 /* BUILTIN_RETURN is really a return statement. */
727 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
728 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0), fallthru
=
730 /* Some calls are known not to return. */
732 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
736 /* A GIMPLE_ASSIGN may throw internally and thus be considered
738 if (is_ctrl_altering_stmt (last
))
739 make_eh_edges (last
);
744 make_gimple_asm_edges (bb
);
749 fallthru
= make_gimple_omp_edges (bb
, &cur_region
);
752 case GIMPLE_TRANSACTION
:
754 tree abort_label
= gimple_transaction_label (last
);
756 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
762 gcc_assert (!stmt_ends_bb_p (last
));
770 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
775 /* Fold COND_EXPR_COND of each COND_EXPR. */
776 fold_cond_expr_cond ();
779 /* Find the next available discriminator value for LOCUS. The
780 discriminator distinguishes among several basic blocks that
781 share a common locus, allowing for more accurate sample-based
785 next_discriminator_for_locus (location_t locus
)
787 struct locus_discrim_map item
;
788 struct locus_discrim_map
**slot
;
791 item
.discriminator
= 0;
792 slot
= discriminator_per_locus
.find_slot_with_hash (
793 &item
, LOCATION_LINE (locus
), INSERT
);
795 if (*slot
== HTAB_EMPTY_ENTRY
)
797 *slot
= XNEW (struct locus_discrim_map
);
799 (*slot
)->locus
= locus
;
800 (*slot
)->discriminator
= 0;
802 (*slot
)->discriminator
++;
803 return (*slot
)->discriminator
;
806 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
809 same_line_p (location_t locus1
, location_t locus2
)
811 expanded_location from
, to
;
813 if (locus1
== locus2
)
816 from
= expand_location (locus1
);
817 to
= expand_location (locus2
);
819 if (from
.line
!= to
.line
)
821 if (from
.file
== to
.file
)
823 return (from
.file
!= NULL
825 && filename_cmp (from
.file
, to
.file
) == 0);
828 /* Assign discriminators to each basic block. */
831 assign_discriminators (void)
839 gimple last
= last_stmt (bb
);
840 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
842 if (locus
== UNKNOWN_LOCATION
)
845 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
847 gimple first
= first_non_label_stmt (e
->dest
);
848 gimple last
= last_stmt (e
->dest
);
849 if ((first
&& same_line_p (locus
, gimple_location (first
)))
850 || (last
&& same_line_p (locus
, gimple_location (last
))))
852 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
853 bb
->discriminator
= next_discriminator_for_locus (locus
);
855 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
861 /* Create the edges for a GIMPLE_COND starting at block BB. */
864 make_cond_expr_edges (basic_block bb
)
866 gimple entry
= last_stmt (bb
);
867 gimple then_stmt
, else_stmt
;
868 basic_block then_bb
, else_bb
;
869 tree then_label
, else_label
;
873 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
875 /* Entry basic blocks for each component. */
876 then_label
= gimple_cond_true_label (entry
);
877 else_label
= gimple_cond_false_label (entry
);
878 then_bb
= label_to_block (then_label
);
879 else_bb
= label_to_block (else_label
);
880 then_stmt
= first_stmt (then_bb
);
881 else_stmt
= first_stmt (else_bb
);
883 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
884 e
->goto_locus
= gimple_location (then_stmt
);
885 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
887 e
->goto_locus
= gimple_location (else_stmt
);
889 /* We do not need the labels anymore. */
890 gimple_cond_set_true_label (entry
, NULL_TREE
);
891 gimple_cond_set_false_label (entry
, NULL_TREE
);
895 /* Called for each element in the hash table (P) as we delete the
896 edge to cases hash table.
898 Clear all the TREE_CHAINs to prevent problems with copying of
899 SWITCH_EXPRs and structure sharing rules, then free the hash table
903 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
904 void *data ATTRIBUTE_UNUSED
)
908 for (t
= (tree
) *value
; t
; t
= next
)
910 next
= CASE_CHAIN (t
);
911 CASE_CHAIN (t
) = NULL
;
918 /* Start recording information mapping edges to case labels. */
921 start_recording_case_labels (void)
923 gcc_assert (edge_to_cases
== NULL
);
924 edge_to_cases
= pointer_map_create ();
925 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
928 /* Return nonzero if we are recording information for case labels. */
931 recording_case_labels_p (void)
933 return (edge_to_cases
!= NULL
);
936 /* Stop recording information mapping edges to case labels and
937 remove any information we have recorded. */
939 end_recording_case_labels (void)
943 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
944 pointer_map_destroy (edge_to_cases
);
945 edge_to_cases
= NULL
;
946 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
948 basic_block bb
= BASIC_BLOCK (i
);
951 gimple stmt
= last_stmt (bb
);
952 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
953 group_case_labels_stmt (stmt
);
956 BITMAP_FREE (touched_switch_bbs
);
959 /* If we are inside a {start,end}_recording_cases block, then return
960 a chain of CASE_LABEL_EXPRs from T which reference E.
962 Otherwise return NULL. */
965 get_cases_for_edge (edge e
, gimple t
)
970 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
971 chains available. Return NULL so the caller can detect this case. */
972 if (!recording_case_labels_p ())
975 slot
= pointer_map_contains (edge_to_cases
, e
);
979 /* If we did not find E in the hash table, then this must be the first
980 time we have been queried for information about E & T. Add all the
981 elements from T to the hash table then perform the query again. */
983 n
= gimple_switch_num_labels (t
);
984 for (i
= 0; i
< n
; i
++)
986 tree elt
= gimple_switch_label (t
, i
);
987 tree lab
= CASE_LABEL (elt
);
988 basic_block label_bb
= label_to_block (lab
);
989 edge this_edge
= find_edge (e
->src
, label_bb
);
991 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
993 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
994 CASE_CHAIN (elt
) = (tree
) *slot
;
998 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
1001 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1004 make_gimple_switch_edges (basic_block bb
)
1006 gimple entry
= last_stmt (bb
);
1009 n
= gimple_switch_num_labels (entry
);
1011 for (i
= 0; i
< n
; ++i
)
1013 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1014 basic_block label_bb
= label_to_block (lab
);
1015 make_edge (bb
, label_bb
, 0);
1020 /* Return the basic block holding label DEST. */
1023 label_to_block_fn (struct function
*ifun
, tree dest
)
1025 int uid
= LABEL_DECL_UID (dest
);
1027 /* We would die hard when faced by an undefined label. Emit a label to
1028 the very first basic block. This will hopefully make even the dataflow
1029 and undefined variable warnings quite right. */
1030 if (seen_error () && uid
< 0)
1032 gimple_stmt_iterator gsi
= gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS
));
1035 stmt
= gimple_build_label (dest
);
1036 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1037 uid
= LABEL_DECL_UID (dest
);
1039 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1041 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1044 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1045 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1048 make_abnormal_goto_edges (basic_block bb
, bool for_call
)
1050 basic_block target_bb
;
1051 gimple_stmt_iterator gsi
;
1053 FOR_EACH_BB (target_bb
)
1055 for (gsi
= gsi_start_bb (target_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1057 gimple label_stmt
= gsi_stmt (gsi
);
1060 if (gimple_code (label_stmt
) != GIMPLE_LABEL
)
1063 target
= gimple_label_label (label_stmt
);
1065 /* Make an edge to every label block that has been marked as a
1066 potential target for a computed goto or a non-local goto. */
1067 if ((FORCED_LABEL (target
) && !for_call
)
1068 || (DECL_NONLOCAL (target
) && for_call
))
1070 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1074 if (!gsi_end_p (gsi
)
1075 && is_gimple_debug (gsi_stmt (gsi
)))
1076 gsi_next_nondebug (&gsi
);
1077 if (!gsi_end_p (gsi
))
1079 /* Make an edge to every setjmp-like call. */
1080 gimple call_stmt
= gsi_stmt (gsi
);
1081 if (is_gimple_call (call_stmt
)
1082 && (gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
))
1083 make_edge (bb
, target_bb
, EDGE_ABNORMAL
);
1088 /* Create edges for a goto statement at block BB. */
1091 make_goto_expr_edges (basic_block bb
)
1093 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1094 gimple goto_t
= gsi_stmt (last
);
1096 /* A simple GOTO creates normal edges. */
1097 if (simple_goto_p (goto_t
))
1099 tree dest
= gimple_goto_dest (goto_t
);
1100 basic_block label_bb
= label_to_block (dest
);
1101 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1102 e
->goto_locus
= gimple_location (goto_t
);
1103 gsi_remove (&last
, true);
1107 /* A computed GOTO creates abnormal edges. */
1108 make_abnormal_goto_edges (bb
, false);
1111 /* Create edges for an asm statement with labels at block BB. */
1114 make_gimple_asm_edges (basic_block bb
)
1116 gimple stmt
= last_stmt (bb
);
1117 int i
, n
= gimple_asm_nlabels (stmt
);
1119 for (i
= 0; i
< n
; ++i
)
1121 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1122 basic_block label_bb
= label_to_block (label
);
1123 make_edge (bb
, label_bb
, 0);
1127 /*---------------------------------------------------------------------------
1129 ---------------------------------------------------------------------------*/
1131 /* Cleanup useless labels in basic blocks. This is something we wish
1132 to do early because it allows us to group case labels before creating
1133 the edges for the CFG, and it speeds up block statement iterators in
1134 all passes later on.
1135 We rerun this pass after CFG is created, to get rid of the labels that
1136 are no longer referenced. After then we do not run it any more, since
1137 (almost) no new labels should be created. */
1139 /* A map from basic block index to the leading label of that block. */
1140 static struct label_record
1145 /* True if the label is referenced from somewhere. */
1149 /* Given LABEL return the first label in the same basic block. */
1152 main_block_label (tree label
)
1154 basic_block bb
= label_to_block (label
);
1155 tree main_label
= label_for_bb
[bb
->index
].label
;
1157 /* label_to_block possibly inserted undefined label into the chain. */
1160 label_for_bb
[bb
->index
].label
= label
;
1164 label_for_bb
[bb
->index
].used
= true;
1168 /* Clean up redundant labels within the exception tree. */
1171 cleanup_dead_labels_eh (void)
1178 if (cfun
->eh
== NULL
)
1181 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1182 if (lp
&& lp
->post_landing_pad
)
1184 lab
= main_block_label (lp
->post_landing_pad
);
1185 if (lab
!= lp
->post_landing_pad
)
1187 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1188 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1192 FOR_ALL_EH_REGION (r
)
1196 case ERT_MUST_NOT_THROW
:
1202 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1206 c
->label
= main_block_label (lab
);
1211 case ERT_ALLOWED_EXCEPTIONS
:
1212 lab
= r
->u
.allowed
.label
;
1214 r
->u
.allowed
.label
= main_block_label (lab
);
1220 /* Cleanup redundant labels. This is a three-step process:
1221 1) Find the leading label for each block.
1222 2) Redirect all references to labels to the leading labels.
1223 3) Cleanup all useless labels. */
1226 cleanup_dead_labels (void)
1229 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block
);
1231 /* Find a suitable label for each block. We use the first user-defined
1232 label if there is one, or otherwise just the first label we see. */
1235 gimple_stmt_iterator i
;
1237 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1240 gimple stmt
= gsi_stmt (i
);
1242 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1245 label
= gimple_label_label (stmt
);
1247 /* If we have not yet seen a label for the current block,
1248 remember this one and see if there are more labels. */
1249 if (!label_for_bb
[bb
->index
].label
)
1251 label_for_bb
[bb
->index
].label
= label
;
1255 /* If we did see a label for the current block already, but it
1256 is an artificially created label, replace it if the current
1257 label is a user defined label. */
1258 if (!DECL_ARTIFICIAL (label
)
1259 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1261 label_for_bb
[bb
->index
].label
= label
;
1267 /* Now redirect all jumps/branches to the selected label.
1268 First do so for each block ending in a control statement. */
1271 gimple stmt
= last_stmt (bb
);
1272 tree label
, new_label
;
1277 switch (gimple_code (stmt
))
1280 label
= gimple_cond_true_label (stmt
);
1283 new_label
= main_block_label (label
);
1284 if (new_label
!= label
)
1285 gimple_cond_set_true_label (stmt
, new_label
);
1288 label
= gimple_cond_false_label (stmt
);
1291 new_label
= main_block_label (label
);
1292 if (new_label
!= label
)
1293 gimple_cond_set_false_label (stmt
, new_label
);
1299 size_t i
, n
= gimple_switch_num_labels (stmt
);
1301 /* Replace all destination labels. */
1302 for (i
= 0; i
< n
; ++i
)
1304 tree case_label
= gimple_switch_label (stmt
, i
);
1305 label
= CASE_LABEL (case_label
);
1306 new_label
= main_block_label (label
);
1307 if (new_label
!= label
)
1308 CASE_LABEL (case_label
) = new_label
;
1315 int i
, n
= gimple_asm_nlabels (stmt
);
1317 for (i
= 0; i
< n
; ++i
)
1319 tree cons
= gimple_asm_label_op (stmt
, i
);
1320 tree label
= main_block_label (TREE_VALUE (cons
));
1321 TREE_VALUE (cons
) = label
;
1326 /* We have to handle gotos until they're removed, and we don't
1327 remove them until after we've created the CFG edges. */
1329 if (!computed_goto_p (stmt
))
1331 label
= gimple_goto_dest (stmt
);
1332 new_label
= main_block_label (label
);
1333 if (new_label
!= label
)
1334 gimple_goto_set_dest (stmt
, new_label
);
1338 case GIMPLE_TRANSACTION
:
1340 tree label
= gimple_transaction_label (stmt
);
1343 tree new_label
= main_block_label (label
);
1344 if (new_label
!= label
)
1345 gimple_transaction_set_label (stmt
, new_label
);
1355 /* Do the same for the exception region tree labels. */
1356 cleanup_dead_labels_eh ();
1358 /* Finally, purge dead labels. All user-defined labels and labels that
1359 can be the target of non-local gotos and labels which have their
1360 address taken are preserved. */
1363 gimple_stmt_iterator i
;
1364 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1366 if (!label_for_this_bb
)
1369 /* If the main label of the block is unused, we may still remove it. */
1370 if (!label_for_bb
[bb
->index
].used
)
1371 label_for_this_bb
= NULL
;
1373 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1376 gimple stmt
= gsi_stmt (i
);
1378 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1381 label
= gimple_label_label (stmt
);
1383 if (label
== label_for_this_bb
1384 || !DECL_ARTIFICIAL (label
)
1385 || DECL_NONLOCAL (label
)
1386 || FORCED_LABEL (label
))
1389 gsi_remove (&i
, true);
1393 free (label_for_bb
);
1396 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1397 the ones jumping to the same label.
1398 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1401 group_case_labels_stmt (gimple stmt
)
1403 int old_size
= gimple_switch_num_labels (stmt
);
1404 int i
, j
, new_size
= old_size
;
1405 basic_block default_bb
= NULL
;
1407 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1409 /* Look for possible opportunities to merge cases. */
1411 while (i
< old_size
)
1413 tree base_case
, base_high
;
1414 basic_block base_bb
;
1416 base_case
= gimple_switch_label (stmt
, i
);
1418 gcc_assert (base_case
);
1419 base_bb
= label_to_block (CASE_LABEL (base_case
));
1421 /* Discard cases that have the same destination as the
1423 if (base_bb
== default_bb
)
1425 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1431 base_high
= CASE_HIGH (base_case
)
1432 ? CASE_HIGH (base_case
)
1433 : CASE_LOW (base_case
);
1436 /* Try to merge case labels. Break out when we reach the end
1437 of the label vector or when we cannot merge the next case
1438 label with the current one. */
1439 while (i
< old_size
)
1441 tree merge_case
= gimple_switch_label (stmt
, i
);
1442 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1443 double_int bhp1
= tree_to_double_int (base_high
) + double_int_one
;
1445 /* Merge the cases if they jump to the same place,
1446 and their ranges are consecutive. */
1447 if (merge_bb
== base_bb
1448 && tree_to_double_int (CASE_LOW (merge_case
)) == bhp1
)
1450 base_high
= CASE_HIGH (merge_case
) ?
1451 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1452 CASE_HIGH (base_case
) = base_high
;
1453 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1462 /* Compress the case labels in the label vector, and adjust the
1463 length of the vector. */
1464 for (i
= 0, j
= 0; i
< new_size
; i
++)
1466 while (! gimple_switch_label (stmt
, j
))
1468 gimple_switch_set_label (stmt
, i
,
1469 gimple_switch_label (stmt
, j
++));
1472 gcc_assert (new_size
<= old_size
);
1473 gimple_switch_set_num_labels (stmt
, new_size
);
1476 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1477 and scan the sorted vector of cases. Combine the ones jumping to the
1481 group_case_labels (void)
1487 gimple stmt
= last_stmt (bb
);
1488 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1489 group_case_labels_stmt (stmt
);
1493 /* Checks whether we can merge block B into block A. */
1496 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1499 gimple_stmt_iterator gsi
;
1501 if (!single_succ_p (a
))
1504 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1507 if (single_succ (a
) != b
)
1510 if (!single_pred_p (b
))
1513 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1516 /* If A ends by a statement causing exceptions or something similar, we
1517 cannot merge the blocks. */
1518 stmt
= last_stmt (a
);
1519 if (stmt
&& stmt_ends_bb_p (stmt
))
1522 /* Do not allow a block with only a non-local label to be merged. */
1524 && gimple_code (stmt
) == GIMPLE_LABEL
1525 && DECL_NONLOCAL (gimple_label_label (stmt
)))
1528 /* Examine the labels at the beginning of B. */
1529 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1532 stmt
= gsi_stmt (gsi
);
1533 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1535 lab
= gimple_label_label (stmt
);
1537 /* Do not remove user forced labels or for -O0 any user labels. */
1538 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1542 /* Protect the loop latches. */
1543 if (current_loops
&& b
->loop_father
->latch
== b
)
1546 /* It must be possible to eliminate all phi nodes in B. If ssa form
1547 is not up-to-date and a name-mapping is registered, we cannot eliminate
1548 any phis. Symbols marked for renaming are never a problem though. */
1549 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1551 gimple phi
= gsi_stmt (gsi
);
1552 /* Technically only new names matter. */
1553 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1557 /* When not optimizing, don't merge if we'd lose goto_locus. */
1559 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1561 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1562 gimple_stmt_iterator prev
, next
;
1563 prev
= gsi_last_nondebug_bb (a
);
1564 next
= gsi_after_labels (b
);
1565 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1566 gsi_next_nondebug (&next
);
1567 if ((gsi_end_p (prev
)
1568 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1569 && (gsi_end_p (next
)
1570 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1577 /* Replaces all uses of NAME by VAL. */
1580 replace_uses_by (tree name
, tree val
)
1582 imm_use_iterator imm_iter
;
1587 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1589 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1591 replace_exp (use
, val
);
1593 if (gimple_code (stmt
) == GIMPLE_PHI
)
1595 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1596 if (e
->flags
& EDGE_ABNORMAL
)
1598 /* This can only occur for virtual operands, since
1599 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1600 would prevent replacement. */
1601 gcc_checking_assert (virtual_operand_p (name
));
1602 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1607 if (gimple_code (stmt
) != GIMPLE_PHI
)
1609 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1610 gimple orig_stmt
= stmt
;
1613 /* Mark the block if we changed the last stmt in it. */
1614 if (cfgcleanup_altered_bbs
1615 && stmt_ends_bb_p (stmt
))
1616 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1618 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1619 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1620 only change sth from non-invariant to invariant, and only
1621 when propagating constants. */
1622 if (is_gimple_min_invariant (val
))
1623 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1625 tree op
= gimple_op (stmt
, i
);
1626 /* Operands may be empty here. For example, the labels
1627 of a GIMPLE_COND are nulled out following the creation
1628 of the corresponding CFG edges. */
1629 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1630 recompute_tree_invariant_for_addr_expr (op
);
1633 if (fold_stmt (&gsi
))
1634 stmt
= gsi_stmt (gsi
);
1636 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1637 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1643 gcc_checking_assert (has_zero_uses (name
));
1645 /* Also update the trees stored in loop structures. */
1650 FOR_EACH_LOOP (loop
, 0)
1652 substitute_in_loop_info (loop
, name
, val
);
1657 /* Merge block B into block A. */
1660 gimple_merge_blocks (basic_block a
, basic_block b
)
1662 gimple_stmt_iterator last
, gsi
, psi
;
1665 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1667 /* Remove all single-valued PHI nodes from block B of the form
1668 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1669 gsi
= gsi_last_bb (a
);
1670 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1672 gimple phi
= gsi_stmt (psi
);
1673 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1675 bool may_replace_uses
= (virtual_operand_p (def
)
1676 || may_propagate_copy (def
, use
));
1678 /* In case we maintain loop closed ssa form, do not propagate arguments
1679 of loop exit phi nodes. */
1681 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1682 && !virtual_operand_p (def
)
1683 && TREE_CODE (use
) == SSA_NAME
1684 && a
->loop_father
!= b
->loop_father
)
1685 may_replace_uses
= false;
1687 if (!may_replace_uses
)
1689 gcc_assert (!virtual_operand_p (def
));
1691 /* Note that just emitting the copies is fine -- there is no problem
1692 with ordering of phi nodes. This is because A is the single
1693 predecessor of B, therefore results of the phi nodes cannot
1694 appear as arguments of the phi nodes. */
1695 copy
= gimple_build_assign (def
, use
);
1696 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1697 remove_phi_node (&psi
, false);
1701 /* If we deal with a PHI for virtual operands, we can simply
1702 propagate these without fussing with folding or updating
1704 if (virtual_operand_p (def
))
1706 imm_use_iterator iter
;
1707 use_operand_p use_p
;
1710 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1711 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1712 SET_USE (use_p
, use
);
1714 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1715 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1718 replace_uses_by (def
, use
);
1720 remove_phi_node (&psi
, true);
1724 /* Ensure that B follows A. */
1725 move_block_after (b
, a
);
1727 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1728 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1730 /* Remove labels from B and set gimple_bb to A for other statements. */
1731 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1733 gimple stmt
= gsi_stmt (gsi
);
1734 if (gimple_code (stmt
) == GIMPLE_LABEL
)
1736 tree label
= gimple_label_label (stmt
);
1739 gsi_remove (&gsi
, false);
1741 /* Now that we can thread computed gotos, we might have
1742 a situation where we have a forced label in block B
1743 However, the label at the start of block B might still be
1744 used in other ways (think about the runtime checking for
1745 Fortran assigned gotos). So we can not just delete the
1746 label. Instead we move the label to the start of block A. */
1747 if (FORCED_LABEL (label
))
1749 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1750 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1752 /* Other user labels keep around in a form of a debug stmt. */
1753 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1755 gimple dbg
= gimple_build_debug_bind (label
,
1758 gimple_debug_bind_reset_value (dbg
);
1759 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1762 lp_nr
= EH_LANDING_PAD_NR (label
);
1765 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1766 lp
->post_landing_pad
= NULL
;
1771 gimple_set_bb (stmt
, a
);
1776 /* Merge the sequences. */
1777 last
= gsi_last_bb (a
);
1778 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1779 set_bb_seq (b
, NULL
);
1781 if (cfgcleanup_altered_bbs
)
1782 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1786 /* Return the one of two successors of BB that is not reachable by a
1787 complex edge, if there is one. Else, return BB. We use
1788 this in optimizations that use post-dominators for their heuristics,
1789 to catch the cases in C++ where function calls are involved. */
1792 single_noncomplex_succ (basic_block bb
)
1795 if (EDGE_COUNT (bb
->succs
) != 2)
1798 e0
= EDGE_SUCC (bb
, 0);
1799 e1
= EDGE_SUCC (bb
, 1);
1800 if (e0
->flags
& EDGE_COMPLEX
)
1802 if (e1
->flags
& EDGE_COMPLEX
)
1808 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1811 notice_special_calls (gimple call
)
1813 int flags
= gimple_call_flags (call
);
1815 if (flags
& ECF_MAY_BE_ALLOCA
)
1816 cfun
->calls_alloca
= true;
1817 if (flags
& ECF_RETURNS_TWICE
)
1818 cfun
->calls_setjmp
= true;
1822 /* Clear flags set by notice_special_calls. Used by dead code removal
1823 to update the flags. */
1826 clear_special_calls (void)
1828 cfun
->calls_alloca
= false;
1829 cfun
->calls_setjmp
= false;
1832 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1835 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1837 /* Since this block is no longer reachable, we can just delete all
1838 of its PHI nodes. */
1839 remove_phi_nodes (bb
);
1841 /* Remove edges to BB's successors. */
1842 while (EDGE_COUNT (bb
->succs
) > 0)
1843 remove_edge (EDGE_SUCC (bb
, 0));
1847 /* Remove statements of basic block BB. */
1850 remove_bb (basic_block bb
)
1852 gimple_stmt_iterator i
;
1856 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1857 if (dump_flags
& TDF_DETAILS
)
1859 dump_bb (dump_file
, bb
, 0, dump_flags
);
1860 fprintf (dump_file
, "\n");
1866 struct loop
*loop
= bb
->loop_father
;
1868 /* If a loop gets removed, clean up the information associated
1870 if (loop
->latch
== bb
1871 || loop
->header
== bb
)
1872 free_numbers_of_iterations_estimates_loop (loop
);
1875 /* Remove all the instructions in the block. */
1876 if (bb_seq (bb
) != NULL
)
1878 /* Walk backwards so as to get a chance to substitute all
1879 released DEFs into debug stmts. See
1880 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1882 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
1884 gimple stmt
= gsi_stmt (i
);
1885 if (gimple_code (stmt
) == GIMPLE_LABEL
1886 && (FORCED_LABEL (gimple_label_label (stmt
))
1887 || DECL_NONLOCAL (gimple_label_label (stmt
))))
1890 gimple_stmt_iterator new_gsi
;
1892 /* A non-reachable non-local label may still be referenced.
1893 But it no longer needs to carry the extra semantics of
1895 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
1897 DECL_NONLOCAL (gimple_label_label (stmt
)) = 0;
1898 FORCED_LABEL (gimple_label_label (stmt
)) = 1;
1901 new_bb
= bb
->prev_bb
;
1902 new_gsi
= gsi_start_bb (new_bb
);
1903 gsi_remove (&i
, false);
1904 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
1908 /* Release SSA definitions if we are in SSA. Note that we
1909 may be called when not in SSA. For example,
1910 final_cleanup calls this function via
1911 cleanup_tree_cfg. */
1912 if (gimple_in_ssa_p (cfun
))
1913 release_defs (stmt
);
1915 gsi_remove (&i
, true);
1919 i
= gsi_last_bb (bb
);
1925 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
1926 bb
->il
.gimple
.seq
= NULL
;
1927 bb
->il
.gimple
.phi_nodes
= NULL
;
1931 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1932 predicate VAL, return the edge that will be taken out of the block.
1933 If VAL does not match a unique edge, NULL is returned. */
1936 find_taken_edge (basic_block bb
, tree val
)
1940 stmt
= last_stmt (bb
);
1943 gcc_assert (is_ctrl_stmt (stmt
));
1948 if (!is_gimple_min_invariant (val
))
1951 if (gimple_code (stmt
) == GIMPLE_COND
)
1952 return find_taken_edge_cond_expr (bb
, val
);
1954 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
1955 return find_taken_edge_switch_expr (bb
, val
);
1957 if (computed_goto_p (stmt
))
1959 /* Only optimize if the argument is a label, if the argument is
1960 not a label then we can not construct a proper CFG.
1962 It may be the case that we only need to allow the LABEL_REF to
1963 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1964 appear inside a LABEL_EXPR just to be safe. */
1965 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
1966 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
1967 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
1974 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1975 statement, determine which of the outgoing edges will be taken out of the
1976 block. Return NULL if either edge may be taken. */
1979 find_taken_edge_computed_goto (basic_block bb
, tree val
)
1984 dest
= label_to_block (val
);
1987 e
= find_edge (bb
, dest
);
1988 gcc_assert (e
!= NULL
);
1994 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1995 statement, determine which of the two edges will be taken out of the
1996 block. Return NULL if either edge may be taken. */
1999 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2001 edge true_edge
, false_edge
;
2003 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2005 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2006 return (integer_zerop (val
) ? false_edge
: true_edge
);
2009 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2010 statement, determine which edge will be taken out of the block. Return
2011 NULL if any edge may be taken. */
2014 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2016 basic_block dest_bb
;
2021 switch_stmt
= last_stmt (bb
);
2022 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2023 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2025 e
= find_edge (bb
, dest_bb
);
2031 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2032 We can make optimal use here of the fact that the case labels are
2033 sorted: We can do a binary search for a case matching VAL. */
2036 find_case_label_for_value (gimple switch_stmt
, tree val
)
2038 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2039 tree default_case
= gimple_switch_default_label (switch_stmt
);
2041 for (low
= 0, high
= n
; high
- low
> 1; )
2043 size_t i
= (high
+ low
) / 2;
2044 tree t
= gimple_switch_label (switch_stmt
, i
);
2047 /* Cache the result of comparing CASE_LOW and val. */
2048 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2055 if (CASE_HIGH (t
) == NULL
)
2057 /* A singe-valued case label. */
2063 /* A case range. We can only handle integer ranges. */
2064 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2069 return default_case
;
2073 /* Dump a basic block on stderr. */
2076 gimple_debug_bb (basic_block bb
)
2078 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2082 /* Dump basic block with index N on stderr. */
2085 gimple_debug_bb_n (int n
)
2087 gimple_debug_bb (BASIC_BLOCK (n
));
2088 return BASIC_BLOCK (n
);
2092 /* Dump the CFG on stderr.
2094 FLAGS are the same used by the tree dumping functions
2095 (see TDF_* in dumpfile.h). */
2098 gimple_debug_cfg (int flags
)
2100 gimple_dump_cfg (stderr
, flags
);
2104 /* Dump the program showing basic block boundaries on the given FILE.
2106 FLAGS are the same used by the tree dumping functions (see TDF_* in
2110 gimple_dump_cfg (FILE *file
, int flags
)
2112 if (flags
& TDF_DETAILS
)
2114 dump_function_header (file
, current_function_decl
, flags
);
2115 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2116 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2119 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2120 fprintf (file
, "\n");
2123 if (flags
& TDF_STATS
)
2124 dump_cfg_stats (file
);
2126 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2130 /* Dump CFG statistics on FILE. */
2133 dump_cfg_stats (FILE *file
)
2135 static long max_num_merged_labels
= 0;
2136 unsigned long size
, total
= 0;
2139 const char * const fmt_str
= "%-30s%-13s%12s\n";
2140 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2141 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2142 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2143 const char *funcname
= current_function_name ();
2145 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2147 fprintf (file
, "---------------------------------------------------------\n");
2148 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2149 fprintf (file
, fmt_str
, "", " instances ", "used ");
2150 fprintf (file
, "---------------------------------------------------------\n");
2152 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2154 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2155 SCALE (size
), LABEL (size
));
2159 num_edges
+= EDGE_COUNT (bb
->succs
);
2160 size
= num_edges
* sizeof (struct edge_def
);
2162 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2164 fprintf (file
, "---------------------------------------------------------\n");
2165 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2167 fprintf (file
, "---------------------------------------------------------\n");
2168 fprintf (file
, "\n");
2170 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2171 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2173 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2174 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2176 fprintf (file
, "\n");
2180 /* Dump CFG statistics on stderr. Keep extern so that it's always
2181 linked in the final executable. */
2184 debug_cfg_stats (void)
2186 dump_cfg_stats (stderr
);
2189 /*---------------------------------------------------------------------------
2190 Miscellaneous helpers
2191 ---------------------------------------------------------------------------*/
2193 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2194 flow. Transfers of control flow associated with EH are excluded. */
2197 call_can_make_abnormal_goto (gimple t
)
2199 /* If the function has no non-local labels, then a call cannot make an
2200 abnormal transfer of control. */
2201 if (!cfun
->has_nonlocal_label
2202 && !cfun
->calls_setjmp
)
2205 /* Likewise if the call has no side effects. */
2206 if (!gimple_has_side_effects (t
))
2209 /* Likewise if the called function is leaf. */
2210 if (gimple_call_flags (t
) & ECF_LEAF
)
2217 /* Return true if T can make an abnormal transfer of control flow.
2218 Transfers of control flow associated with EH are excluded. */
2221 stmt_can_make_abnormal_goto (gimple t
)
2223 if (computed_goto_p (t
))
2225 if (is_gimple_call (t
))
2226 return call_can_make_abnormal_goto (t
);
2231 /* Return true if T represents a stmt that always transfers control. */
2234 is_ctrl_stmt (gimple t
)
2236 switch (gimple_code (t
))
2250 /* Return true if T is a statement that may alter the flow of control
2251 (e.g., a call to a non-returning function). */
2254 is_ctrl_altering_stmt (gimple t
)
2258 switch (gimple_code (t
))
2262 int flags
= gimple_call_flags (t
);
2264 /* A call alters control flow if it can make an abnormal goto. */
2265 if (call_can_make_abnormal_goto (t
))
2268 /* A call also alters control flow if it does not return. */
2269 if (flags
& ECF_NORETURN
)
2272 /* TM ending statements have backedges out of the transaction.
2273 Return true so we split the basic block containing them.
2274 Note that the TM_BUILTIN test is merely an optimization. */
2275 if ((flags
& ECF_TM_BUILTIN
)
2276 && is_tm_ending_fndecl (gimple_call_fndecl (t
)))
2279 /* BUILT_IN_RETURN call is same as return statement. */
2280 if (gimple_call_builtin_p (t
, BUILT_IN_RETURN
))
2285 case GIMPLE_EH_DISPATCH
:
2286 /* EH_DISPATCH branches to the individual catch handlers at
2287 this level of a try or allowed-exceptions region. It can
2288 fallthru to the next statement as well. */
2292 if (gimple_asm_nlabels (t
) > 0)
2297 /* OpenMP directives alter control flow. */
2300 case GIMPLE_TRANSACTION
:
2301 /* A transaction start alters control flow. */
2308 /* If a statement can throw, it alters control flow. */
2309 return stmt_can_throw_internal (t
);
2313 /* Return true if T is a simple local goto. */
2316 simple_goto_p (gimple t
)
2318 return (gimple_code (t
) == GIMPLE_GOTO
2319 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2323 /* Return true if STMT should start a new basic block. PREV_STMT is
2324 the statement preceding STMT. It is used when STMT is a label or a
2325 case label. Labels should only start a new basic block if their
2326 previous statement wasn't a label. Otherwise, sequence of labels
2327 would generate unnecessary basic blocks that only contain a single
2331 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2336 /* Labels start a new basic block only if the preceding statement
2337 wasn't a label of the same type. This prevents the creation of
2338 consecutive blocks that have nothing but a single label. */
2339 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2341 /* Nonlocal and computed GOTO targets always start a new block. */
2342 if (DECL_NONLOCAL (gimple_label_label (stmt
))
2343 || FORCED_LABEL (gimple_label_label (stmt
)))
2346 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2348 if (DECL_NONLOCAL (gimple_label_label (prev_stmt
)))
2351 cfg_stats
.num_merged_labels
++;
2357 else if (gimple_code (stmt
) == GIMPLE_CALL
2358 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2359 /* setjmp acts similar to a nonlocal GOTO target and thus should
2360 start a new block. */
2367 /* Return true if T should end a basic block. */
2370 stmt_ends_bb_p (gimple t
)
2372 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2375 /* Remove block annotations and other data structures. */
2378 delete_tree_cfg_annotations (void)
2380 vec_free (label_to_block_map
);
2384 /* Return the first statement in basic block BB. */
2387 first_stmt (basic_block bb
)
2389 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2392 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2400 /* Return the first non-label statement in basic block BB. */
2403 first_non_label_stmt (basic_block bb
)
2405 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2406 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2408 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2411 /* Return the last statement in basic block BB. */
2414 last_stmt (basic_block bb
)
2416 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2419 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2427 /* Return the last statement of an otherwise empty block. Return NULL
2428 if the block is totally empty, or if it contains more than one
2432 last_and_only_stmt (basic_block bb
)
2434 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2440 last
= gsi_stmt (i
);
2441 gsi_prev_nondebug (&i
);
2445 /* Empty statements should no longer appear in the instruction stream.
2446 Everything that might have appeared before should be deleted by
2447 remove_useless_stmts, and the optimizers should just gsi_remove
2448 instead of smashing with build_empty_stmt.
2450 Thus the only thing that should appear here in a block containing
2451 one executable statement is a label. */
2452 prev
= gsi_stmt (i
);
2453 if (gimple_code (prev
) == GIMPLE_LABEL
)
2459 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2462 reinstall_phi_args (edge new_edge
, edge old_edge
)
2464 edge_var_map_vector
*v
;
2467 gimple_stmt_iterator phis
;
2469 v
= redirect_edge_var_map_vector (old_edge
);
2473 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2474 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2475 i
++, gsi_next (&phis
))
2477 gimple phi
= gsi_stmt (phis
);
2478 tree result
= redirect_edge_var_map_result (vm
);
2479 tree arg
= redirect_edge_var_map_def (vm
);
2481 gcc_assert (result
== gimple_phi_result (phi
));
2483 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2486 redirect_edge_var_map_clear (old_edge
);
2489 /* Returns the basic block after which the new basic block created
2490 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2491 near its "logical" location. This is of most help to humans looking
2492 at debugging dumps. */
2495 split_edge_bb_loc (edge edge_in
)
2497 basic_block dest
= edge_in
->dest
;
2498 basic_block dest_prev
= dest
->prev_bb
;
2502 edge e
= find_edge (dest_prev
, dest
);
2503 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2504 return edge_in
->src
;
2509 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2510 Abort on abnormal edges. */
2513 gimple_split_edge (edge edge_in
)
2515 basic_block new_bb
, after_bb
, dest
;
2518 /* Abnormal edges cannot be split. */
2519 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2521 dest
= edge_in
->dest
;
2523 after_bb
= split_edge_bb_loc (edge_in
);
2525 new_bb
= create_empty_bb (after_bb
);
2526 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2527 new_bb
->count
= edge_in
->count
;
2528 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2529 new_edge
->probability
= REG_BR_PROB_BASE
;
2530 new_edge
->count
= edge_in
->count
;
2532 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2533 gcc_assert (e
== edge_in
);
2534 reinstall_phi_args (new_edge
, e
);
2540 /* Verify properties of the address expression T with base object BASE. */
2543 verify_address (tree t
, tree base
)
2546 bool old_side_effects
;
2548 bool new_side_effects
;
2550 old_constant
= TREE_CONSTANT (t
);
2551 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2553 recompute_tree_invariant_for_addr_expr (t
);
2554 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2555 new_constant
= TREE_CONSTANT (t
);
2557 if (old_constant
!= new_constant
)
2559 error ("constant not recomputed when ADDR_EXPR changed");
2562 if (old_side_effects
!= new_side_effects
)
2564 error ("side effects not recomputed when ADDR_EXPR changed");
2568 if (!(TREE_CODE (base
) == VAR_DECL
2569 || TREE_CODE (base
) == PARM_DECL
2570 || TREE_CODE (base
) == RESULT_DECL
))
2573 if (DECL_GIMPLE_REG_P (base
))
2575 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2582 /* Callback for walk_tree, check that all elements with address taken are
2583 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2584 inside a PHI node. */
2587 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2594 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2595 #define CHECK_OP(N, MSG) \
2596 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2597 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2599 switch (TREE_CODE (t
))
2602 if (SSA_NAME_IN_FREE_LIST (t
))
2604 error ("SSA name in freelist but still referenced");
2610 error ("INDIRECT_REF in gimple IL");
2614 x
= TREE_OPERAND (t
, 0);
2615 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2616 || !is_gimple_mem_ref_addr (x
))
2618 error ("invalid first operand of MEM_REF");
2621 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2622 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2624 error ("invalid offset operand of MEM_REF");
2625 return TREE_OPERAND (t
, 1);
2627 if (TREE_CODE (x
) == ADDR_EXPR
2628 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2634 x
= fold (ASSERT_EXPR_COND (t
));
2635 if (x
== boolean_false_node
)
2637 error ("ASSERT_EXPR with an always-false condition");
2643 error ("MODIFY_EXPR not expected while having tuples");
2650 gcc_assert (is_gimple_address (t
));
2652 /* Skip any references (they will be checked when we recurse down the
2653 tree) and ensure that any variable used as a prefix is marked
2655 for (x
= TREE_OPERAND (t
, 0);
2656 handled_component_p (x
);
2657 x
= TREE_OPERAND (x
, 0))
2660 if ((tem
= verify_address (t
, x
)))
2663 if (!(TREE_CODE (x
) == VAR_DECL
2664 || TREE_CODE (x
) == PARM_DECL
2665 || TREE_CODE (x
) == RESULT_DECL
))
2668 if (!TREE_ADDRESSABLE (x
))
2670 error ("address taken, but ADDRESSABLE bit not set");
2678 x
= COND_EXPR_COND (t
);
2679 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2681 error ("non-integral used in condition");
2684 if (!is_gimple_condexpr (x
))
2686 error ("invalid conditional operand");
2691 case NON_LVALUE_EXPR
:
2692 case TRUTH_NOT_EXPR
:
2696 case FIX_TRUNC_EXPR
:
2701 CHECK_OP (0, "invalid operand to unary operator");
2707 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2709 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2713 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2715 if (!tree_fits_uhwi_p (TREE_OPERAND (t
, 1))
2716 || !tree_fits_uhwi_p (TREE_OPERAND (t
, 2)))
2718 error ("invalid position or size operand to BIT_FIELD_REF");
2721 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2722 && (TYPE_PRECISION (TREE_TYPE (t
))
2723 != tree_to_uhwi (TREE_OPERAND (t
, 1))))
2725 error ("integral result type precision does not match "
2726 "field size of BIT_FIELD_REF");
2729 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2730 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2731 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2732 != tree_to_uhwi (TREE_OPERAND (t
, 1))))
2734 error ("mode precision of non-integral result does not "
2735 "match field size of BIT_FIELD_REF");
2739 t
= TREE_OPERAND (t
, 0);
2744 case ARRAY_RANGE_REF
:
2745 case VIEW_CONVERT_EXPR
:
2746 /* We have a nest of references. Verify that each of the operands
2747 that determine where to reference is either a constant or a variable,
2748 verify that the base is valid, and then show we've already checked
2750 while (handled_component_p (t
))
2752 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2753 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2754 else if (TREE_CODE (t
) == ARRAY_REF
2755 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2757 CHECK_OP (1, "invalid array index");
2758 if (TREE_OPERAND (t
, 2))
2759 CHECK_OP (2, "invalid array lower bound");
2760 if (TREE_OPERAND (t
, 3))
2761 CHECK_OP (3, "invalid array stride");
2763 else if (TREE_CODE (t
) == BIT_FIELD_REF
2764 || TREE_CODE (t
) == REALPART_EXPR
2765 || TREE_CODE (t
) == IMAGPART_EXPR
)
2767 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2772 t
= TREE_OPERAND (t
, 0);
2775 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2777 error ("invalid reference prefix");
2784 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2785 POINTER_PLUS_EXPR. */
2786 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2788 error ("invalid operand to plus/minus, type is a pointer");
2791 CHECK_OP (0, "invalid operand to binary operator");
2792 CHECK_OP (1, "invalid operand to binary operator");
2795 case POINTER_PLUS_EXPR
:
2796 /* Check to make sure the first operand is a pointer or reference type. */
2797 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2799 error ("invalid operand to pointer plus, first operand is not a pointer");
2802 /* Check to make sure the second operand is a ptrofftype. */
2803 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2805 error ("invalid operand to pointer plus, second operand is not an "
2806 "integer type of appropriate width");
2816 case UNORDERED_EXPR
:
2825 case TRUNC_DIV_EXPR
:
2827 case FLOOR_DIV_EXPR
:
2828 case ROUND_DIV_EXPR
:
2829 case TRUNC_MOD_EXPR
:
2831 case FLOOR_MOD_EXPR
:
2832 case ROUND_MOD_EXPR
:
2834 case EXACT_DIV_EXPR
:
2844 CHECK_OP (0, "invalid operand to binary operator");
2845 CHECK_OP (1, "invalid operand to binary operator");
2849 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2853 case CASE_LABEL_EXPR
:
2856 error ("invalid CASE_CHAIN");
2870 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2871 Returns true if there is an error, otherwise false. */
2874 verify_types_in_gimple_min_lval (tree expr
)
2878 if (is_gimple_id (expr
))
2881 if (TREE_CODE (expr
) != TARGET_MEM_REF
2882 && TREE_CODE (expr
) != MEM_REF
)
2884 error ("invalid expression for min lvalue");
2888 /* TARGET_MEM_REFs are strange beasts. */
2889 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
2892 op
= TREE_OPERAND (expr
, 0);
2893 if (!is_gimple_val (op
))
2895 error ("invalid operand in indirect reference");
2896 debug_generic_stmt (op
);
2899 /* Memory references now generally can involve a value conversion. */
2904 /* Verify if EXPR is a valid GIMPLE reference expression. If
2905 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2906 if there is an error, otherwise false. */
2909 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
2911 while (handled_component_p (expr
))
2913 tree op
= TREE_OPERAND (expr
, 0);
2915 if (TREE_CODE (expr
) == ARRAY_REF
2916 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
2918 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
2919 || (TREE_OPERAND (expr
, 2)
2920 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
2921 || (TREE_OPERAND (expr
, 3)
2922 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
2924 error ("invalid operands to array reference");
2925 debug_generic_stmt (expr
);
2930 /* Verify if the reference array element types are compatible. */
2931 if (TREE_CODE (expr
) == ARRAY_REF
2932 && !useless_type_conversion_p (TREE_TYPE (expr
),
2933 TREE_TYPE (TREE_TYPE (op
))))
2935 error ("type mismatch in array reference");
2936 debug_generic_stmt (TREE_TYPE (expr
));
2937 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2940 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
2941 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
2942 TREE_TYPE (TREE_TYPE (op
))))
2944 error ("type mismatch in array range reference");
2945 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
2946 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2950 if ((TREE_CODE (expr
) == REALPART_EXPR
2951 || TREE_CODE (expr
) == IMAGPART_EXPR
)
2952 && !useless_type_conversion_p (TREE_TYPE (expr
),
2953 TREE_TYPE (TREE_TYPE (op
))))
2955 error ("type mismatch in real/imagpart reference");
2956 debug_generic_stmt (TREE_TYPE (expr
));
2957 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
2961 if (TREE_CODE (expr
) == COMPONENT_REF
2962 && !useless_type_conversion_p (TREE_TYPE (expr
),
2963 TREE_TYPE (TREE_OPERAND (expr
, 1))))
2965 error ("type mismatch in component reference");
2966 debug_generic_stmt (TREE_TYPE (expr
));
2967 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
2971 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2973 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2974 that their operand is not an SSA name or an invariant when
2975 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2976 bug). Otherwise there is nothing to verify, gross mismatches at
2977 most invoke undefined behavior. */
2979 && (TREE_CODE (op
) == SSA_NAME
2980 || is_gimple_min_invariant (op
)))
2982 error ("conversion of an SSA_NAME on the left hand side");
2983 debug_generic_stmt (expr
);
2986 else if (TREE_CODE (op
) == SSA_NAME
2987 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
2989 error ("conversion of register to a different size");
2990 debug_generic_stmt (expr
);
2993 else if (!handled_component_p (op
))
3000 if (TREE_CODE (expr
) == MEM_REF
)
3002 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3004 error ("invalid address operand in MEM_REF");
3005 debug_generic_stmt (expr
);
3008 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3009 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3011 error ("invalid offset operand in MEM_REF");
3012 debug_generic_stmt (expr
);
3016 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3018 if (!TMR_BASE (expr
)
3019 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3021 error ("invalid address operand in TARGET_MEM_REF");
3024 if (!TMR_OFFSET (expr
)
3025 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3026 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3028 error ("invalid offset operand in TARGET_MEM_REF");
3029 debug_generic_stmt (expr
);
3034 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3035 && verify_types_in_gimple_min_lval (expr
));
3038 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3039 list of pointer-to types that is trivially convertible to DEST. */
3042 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3046 if (!TYPE_POINTER_TO (src_obj
))
3049 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3050 if (useless_type_conversion_p (dest
, src
))
3056 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3057 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3060 valid_fixed_convert_types_p (tree type1
, tree type2
)
3062 return (FIXED_POINT_TYPE_P (type1
)
3063 && (INTEGRAL_TYPE_P (type2
)
3064 || SCALAR_FLOAT_TYPE_P (type2
)
3065 || FIXED_POINT_TYPE_P (type2
)));
3068 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3069 is a problem, otherwise false. */
3072 verify_gimple_call (gimple stmt
)
3074 tree fn
= gimple_call_fn (stmt
);
3075 tree fntype
, fndecl
;
3078 if (gimple_call_internal_p (stmt
))
3082 error ("gimple call has two targets");
3083 debug_generic_stmt (fn
);
3091 error ("gimple call has no target");
3096 if (fn
&& !is_gimple_call_addr (fn
))
3098 error ("invalid function in gimple call");
3099 debug_generic_stmt (fn
);
3104 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3105 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3106 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3108 error ("non-function in gimple call");
3112 fndecl
= gimple_call_fndecl (stmt
);
3114 && TREE_CODE (fndecl
) == FUNCTION_DECL
3115 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3116 && !DECL_PURE_P (fndecl
)
3117 && !TREE_READONLY (fndecl
))
3119 error ("invalid pure const state for function");
3123 if (gimple_call_lhs (stmt
)
3124 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3125 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3127 error ("invalid LHS in gimple call");
3131 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3133 error ("LHS in noreturn call");
3137 fntype
= gimple_call_fntype (stmt
);
3139 && gimple_call_lhs (stmt
)
3140 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3142 /* ??? At least C++ misses conversions at assignments from
3143 void * call results.
3144 ??? Java is completely off. Especially with functions
3145 returning java.lang.Object.
3146 For now simply allow arbitrary pointer type conversions. */
3147 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3148 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3150 error ("invalid conversion in gimple call");
3151 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3152 debug_generic_stmt (TREE_TYPE (fntype
));
3156 if (gimple_call_chain (stmt
)
3157 && !is_gimple_val (gimple_call_chain (stmt
)))
3159 error ("invalid static chain in gimple call");
3160 debug_generic_stmt (gimple_call_chain (stmt
));
3164 /* If there is a static chain argument, this should not be an indirect
3165 call, and the decl should have DECL_STATIC_CHAIN set. */
3166 if (gimple_call_chain (stmt
))
3168 if (!gimple_call_fndecl (stmt
))
3170 error ("static chain in indirect gimple call");
3173 fn
= TREE_OPERAND (fn
, 0);
3175 if (!DECL_STATIC_CHAIN (fn
))
3177 error ("static chain with function that doesn%'t use one");
3182 /* ??? The C frontend passes unpromoted arguments in case it
3183 didn't see a function declaration before the call. So for now
3184 leave the call arguments mostly unverified. Once we gimplify
3185 unit-at-a-time we have a chance to fix this. */
3187 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3189 tree arg
= gimple_call_arg (stmt
, i
);
3190 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3191 && !is_gimple_val (arg
))
3192 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3193 && !is_gimple_lvalue (arg
)))
3195 error ("invalid argument to gimple call");
3196 debug_generic_expr (arg
);
3204 /* Verifies the gimple comparison with the result type TYPE and
3205 the operands OP0 and OP1. */
3208 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3210 tree op0_type
= TREE_TYPE (op0
);
3211 tree op1_type
= TREE_TYPE (op1
);
3213 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3215 error ("invalid operands in gimple comparison");
3219 /* For comparisons we do not have the operations type as the
3220 effective type the comparison is carried out in. Instead
3221 we require that either the first operand is trivially
3222 convertible into the second, or the other way around.
3223 Because we special-case pointers to void we allow
3224 comparisons of pointers with the same mode as well. */
3225 if (!useless_type_conversion_p (op0_type
, op1_type
)
3226 && !useless_type_conversion_p (op1_type
, op0_type
)
3227 && (!POINTER_TYPE_P (op0_type
)
3228 || !POINTER_TYPE_P (op1_type
)
3229 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3231 error ("mismatching comparison operand types");
3232 debug_generic_expr (op0_type
);
3233 debug_generic_expr (op1_type
);
3237 /* The resulting type of a comparison may be an effective boolean type. */
3238 if (INTEGRAL_TYPE_P (type
)
3239 && (TREE_CODE (type
) == BOOLEAN_TYPE
3240 || TYPE_PRECISION (type
) == 1))
3242 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3243 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3245 error ("vector comparison returning a boolean");
3246 debug_generic_expr (op0_type
);
3247 debug_generic_expr (op1_type
);
3251 /* Or an integer vector type with the same size and element count
3252 as the comparison operand types. */
3253 else if (TREE_CODE (type
) == VECTOR_TYPE
3254 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3256 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3257 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3259 error ("non-vector operands in vector comparison");
3260 debug_generic_expr (op0_type
);
3261 debug_generic_expr (op1_type
);
3265 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3266 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3267 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3268 /* The result of a vector comparison is of signed
3270 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3272 error ("invalid vector comparison resulting type");
3273 debug_generic_expr (type
);
3279 error ("bogus comparison result type");
3280 debug_generic_expr (type
);
3287 /* Verify a gimple assignment statement STMT with an unary rhs.
3288 Returns true if anything is wrong. */
3291 verify_gimple_assign_unary (gimple stmt
)
3293 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3294 tree lhs
= gimple_assign_lhs (stmt
);
3295 tree lhs_type
= TREE_TYPE (lhs
);
3296 tree rhs1
= gimple_assign_rhs1 (stmt
);
3297 tree rhs1_type
= TREE_TYPE (rhs1
);
3299 if (!is_gimple_reg (lhs
))
3301 error ("non-register as LHS of unary operation");
3305 if (!is_gimple_val (rhs1
))
3307 error ("invalid operand in unary operation");
3311 /* First handle conversions. */
3316 /* Allow conversions from pointer type to integral type only if
3317 there is no sign or zero extension involved.
3318 For targets were the precision of ptrofftype doesn't match that
3319 of pointers we need to allow arbitrary conversions to ptrofftype. */
3320 if ((POINTER_TYPE_P (lhs_type
)
3321 && INTEGRAL_TYPE_P (rhs1_type
))
3322 || (POINTER_TYPE_P (rhs1_type
)
3323 && INTEGRAL_TYPE_P (lhs_type
)
3324 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3325 || ptrofftype_p (sizetype
))))
3328 /* Allow conversion from integral to offset type and vice versa. */
3329 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3330 && INTEGRAL_TYPE_P (rhs1_type
))
3331 || (INTEGRAL_TYPE_P (lhs_type
)
3332 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3335 /* Otherwise assert we are converting between types of the
3337 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3339 error ("invalid types in nop conversion");
3340 debug_generic_expr (lhs_type
);
3341 debug_generic_expr (rhs1_type
);
3348 case ADDR_SPACE_CONVERT_EXPR
:
3350 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3351 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3352 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3354 error ("invalid types in address space conversion");
3355 debug_generic_expr (lhs_type
);
3356 debug_generic_expr (rhs1_type
);
3363 case FIXED_CONVERT_EXPR
:
3365 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3366 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3368 error ("invalid types in fixed-point conversion");
3369 debug_generic_expr (lhs_type
);
3370 debug_generic_expr (rhs1_type
);
3379 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3380 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3381 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3383 error ("invalid types in conversion to floating point");
3384 debug_generic_expr (lhs_type
);
3385 debug_generic_expr (rhs1_type
);
3392 case FIX_TRUNC_EXPR
:
3394 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3395 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3396 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3398 error ("invalid types in conversion to integer");
3399 debug_generic_expr (lhs_type
);
3400 debug_generic_expr (rhs1_type
);
3407 case VEC_UNPACK_HI_EXPR
:
3408 case VEC_UNPACK_LO_EXPR
:
3409 case REDUC_MAX_EXPR
:
3410 case REDUC_MIN_EXPR
:
3411 case REDUC_PLUS_EXPR
:
3412 case VEC_UNPACK_FLOAT_HI_EXPR
:
3413 case VEC_UNPACK_FLOAT_LO_EXPR
:
3421 case NON_LVALUE_EXPR
:
3429 /* For the remaining codes assert there is no conversion involved. */
3430 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3432 error ("non-trivial conversion in unary operation");
3433 debug_generic_expr (lhs_type
);
3434 debug_generic_expr (rhs1_type
);
3441 /* Verify a gimple assignment statement STMT with a binary rhs.
3442 Returns true if anything is wrong. */
3445 verify_gimple_assign_binary (gimple stmt
)
3447 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3448 tree lhs
= gimple_assign_lhs (stmt
);
3449 tree lhs_type
= TREE_TYPE (lhs
);
3450 tree rhs1
= gimple_assign_rhs1 (stmt
);
3451 tree rhs1_type
= TREE_TYPE (rhs1
);
3452 tree rhs2
= gimple_assign_rhs2 (stmt
);
3453 tree rhs2_type
= TREE_TYPE (rhs2
);
3455 if (!is_gimple_reg (lhs
))
3457 error ("non-register as LHS of binary operation");
3461 if (!is_gimple_val (rhs1
)
3462 || !is_gimple_val (rhs2
))
3464 error ("invalid operands in binary operation");
3468 /* First handle operations that involve different types. */
3473 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3474 || !(INTEGRAL_TYPE_P (rhs1_type
)
3475 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3476 || !(INTEGRAL_TYPE_P (rhs2_type
)
3477 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3479 error ("type mismatch in complex expression");
3480 debug_generic_expr (lhs_type
);
3481 debug_generic_expr (rhs1_type
);
3482 debug_generic_expr (rhs2_type
);
3494 /* Shifts and rotates are ok on integral types, fixed point
3495 types and integer vector types. */
3496 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3497 && !FIXED_POINT_TYPE_P (rhs1_type
)
3498 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3499 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3500 || (!INTEGRAL_TYPE_P (rhs2_type
)
3501 /* Vector shifts of vectors are also ok. */
3502 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3503 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3504 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3505 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3506 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3508 error ("type mismatch in shift expression");
3509 debug_generic_expr (lhs_type
);
3510 debug_generic_expr (rhs1_type
);
3511 debug_generic_expr (rhs2_type
);
3518 case VEC_LSHIFT_EXPR
:
3519 case VEC_RSHIFT_EXPR
:
3521 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3522 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3523 || POINTER_TYPE_P (TREE_TYPE (rhs1_type
))
3524 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type
))
3525 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3526 || (!INTEGRAL_TYPE_P (rhs2_type
)
3527 && (TREE_CODE (rhs2_type
) != VECTOR_TYPE
3528 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3529 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3531 error ("type mismatch in vector shift expression");
3532 debug_generic_expr (lhs_type
);
3533 debug_generic_expr (rhs1_type
);
3534 debug_generic_expr (rhs2_type
);
3537 /* For shifting a vector of non-integral components we
3538 only allow shifting by a constant multiple of the element size. */
3539 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3540 && (TREE_CODE (rhs2
) != INTEGER_CST
3541 || !div_if_zero_remainder (EXACT_DIV_EXPR
, rhs2
,
3542 TYPE_SIZE (TREE_TYPE (rhs1_type
)))))
3544 error ("non-element sized vector shift of floating point vector");
3551 case WIDEN_LSHIFT_EXPR
:
3553 if (!INTEGRAL_TYPE_P (lhs_type
)
3554 || !INTEGRAL_TYPE_P (rhs1_type
)
3555 || TREE_CODE (rhs2
) != INTEGER_CST
3556 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3558 error ("type mismatch in widening vector shift expression");
3559 debug_generic_expr (lhs_type
);
3560 debug_generic_expr (rhs1_type
);
3561 debug_generic_expr (rhs2_type
);
3568 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3569 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3571 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3572 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3573 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3574 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3575 || TREE_CODE (rhs2
) != INTEGER_CST
3576 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3577 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3579 error ("type mismatch in widening vector shift expression");
3580 debug_generic_expr (lhs_type
);
3581 debug_generic_expr (rhs1_type
);
3582 debug_generic_expr (rhs2_type
);
3592 tree lhs_etype
= lhs_type
;
3593 tree rhs1_etype
= rhs1_type
;
3594 tree rhs2_etype
= rhs2_type
;
3595 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3597 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3598 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3600 error ("invalid non-vector operands to vector valued plus");
3603 lhs_etype
= TREE_TYPE (lhs_type
);
3604 rhs1_etype
= TREE_TYPE (rhs1_type
);
3605 rhs2_etype
= TREE_TYPE (rhs2_type
);
3607 if (POINTER_TYPE_P (lhs_etype
)
3608 || POINTER_TYPE_P (rhs1_etype
)
3609 || POINTER_TYPE_P (rhs2_etype
))
3611 error ("invalid (pointer) operands to plus/minus");
3615 /* Continue with generic binary expression handling. */
3619 case POINTER_PLUS_EXPR
:
3621 if (!POINTER_TYPE_P (rhs1_type
)
3622 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3623 || !ptrofftype_p (rhs2_type
))
3625 error ("type mismatch in pointer plus expression");
3626 debug_generic_stmt (lhs_type
);
3627 debug_generic_stmt (rhs1_type
);
3628 debug_generic_stmt (rhs2_type
);
3635 case TRUTH_ANDIF_EXPR
:
3636 case TRUTH_ORIF_EXPR
:
3637 case TRUTH_AND_EXPR
:
3639 case TRUTH_XOR_EXPR
:
3649 case UNORDERED_EXPR
:
3657 /* Comparisons are also binary, but the result type is not
3658 connected to the operand types. */
3659 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3661 case WIDEN_MULT_EXPR
:
3662 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3664 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3665 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3667 case WIDEN_SUM_EXPR
:
3668 case VEC_WIDEN_MULT_HI_EXPR
:
3669 case VEC_WIDEN_MULT_LO_EXPR
:
3670 case VEC_WIDEN_MULT_EVEN_EXPR
:
3671 case VEC_WIDEN_MULT_ODD_EXPR
:
3672 case VEC_PACK_TRUNC_EXPR
:
3673 case VEC_PACK_SAT_EXPR
:
3674 case VEC_PACK_FIX_TRUNC_EXPR
:
3679 case MULT_HIGHPART_EXPR
:
3680 case TRUNC_DIV_EXPR
:
3682 case FLOOR_DIV_EXPR
:
3683 case ROUND_DIV_EXPR
:
3684 case TRUNC_MOD_EXPR
:
3686 case FLOOR_MOD_EXPR
:
3687 case ROUND_MOD_EXPR
:
3689 case EXACT_DIV_EXPR
:
3695 /* Continue with generic binary expression handling. */
3702 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3703 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3705 error ("type mismatch in binary expression");
3706 debug_generic_stmt (lhs_type
);
3707 debug_generic_stmt (rhs1_type
);
3708 debug_generic_stmt (rhs2_type
);
3715 /* Verify a gimple assignment statement STMT with a ternary rhs.
3716 Returns true if anything is wrong. */
3719 verify_gimple_assign_ternary (gimple stmt
)
3721 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3722 tree lhs
= gimple_assign_lhs (stmt
);
3723 tree lhs_type
= TREE_TYPE (lhs
);
3724 tree rhs1
= gimple_assign_rhs1 (stmt
);
3725 tree rhs1_type
= TREE_TYPE (rhs1
);
3726 tree rhs2
= gimple_assign_rhs2 (stmt
);
3727 tree rhs2_type
= TREE_TYPE (rhs2
);
3728 tree rhs3
= gimple_assign_rhs3 (stmt
);
3729 tree rhs3_type
= TREE_TYPE (rhs3
);
3731 if (!is_gimple_reg (lhs
))
3733 error ("non-register as LHS of ternary operation");
3737 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3738 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3739 || !is_gimple_val (rhs2
)
3740 || !is_gimple_val (rhs3
))
3742 error ("invalid operands in ternary operation");
3746 /* First handle operations that involve different types. */
3749 case WIDEN_MULT_PLUS_EXPR
:
3750 case WIDEN_MULT_MINUS_EXPR
:
3751 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3752 && !FIXED_POINT_TYPE_P (rhs1_type
))
3753 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3754 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3755 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3756 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3758 error ("type mismatch in widening multiply-accumulate expression");
3759 debug_generic_expr (lhs_type
);
3760 debug_generic_expr (rhs1_type
);
3761 debug_generic_expr (rhs2_type
);
3762 debug_generic_expr (rhs3_type
);
3768 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3769 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3770 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3772 error ("type mismatch in fused multiply-add expression");
3773 debug_generic_expr (lhs_type
);
3774 debug_generic_expr (rhs1_type
);
3775 debug_generic_expr (rhs2_type
);
3776 debug_generic_expr (rhs3_type
);
3783 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3784 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3786 error ("type mismatch in conditional expression");
3787 debug_generic_expr (lhs_type
);
3788 debug_generic_expr (rhs2_type
);
3789 debug_generic_expr (rhs3_type
);
3795 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3796 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3798 error ("type mismatch in vector permute expression");
3799 debug_generic_expr (lhs_type
);
3800 debug_generic_expr (rhs1_type
);
3801 debug_generic_expr (rhs2_type
);
3802 debug_generic_expr (rhs3_type
);
3806 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3807 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3808 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3810 error ("vector types expected in vector permute expression");
3811 debug_generic_expr (lhs_type
);
3812 debug_generic_expr (rhs1_type
);
3813 debug_generic_expr (rhs2_type
);
3814 debug_generic_expr (rhs3_type
);
3818 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3819 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3820 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3821 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3822 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3824 error ("vectors with different element number found "
3825 "in vector permute expression");
3826 debug_generic_expr (lhs_type
);
3827 debug_generic_expr (rhs1_type
);
3828 debug_generic_expr (rhs2_type
);
3829 debug_generic_expr (rhs3_type
);
3833 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3834 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3835 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3837 error ("invalid mask type in vector permute expression");
3838 debug_generic_expr (lhs_type
);
3839 debug_generic_expr (rhs1_type
);
3840 debug_generic_expr (rhs2_type
);
3841 debug_generic_expr (rhs3_type
);
3848 case REALIGN_LOAD_EXPR
:
3858 /* Verify a gimple assignment statement STMT with a single rhs.
3859 Returns true if anything is wrong. */
3862 verify_gimple_assign_single (gimple stmt
)
3864 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3865 tree lhs
= gimple_assign_lhs (stmt
);
3866 tree lhs_type
= TREE_TYPE (lhs
);
3867 tree rhs1
= gimple_assign_rhs1 (stmt
);
3868 tree rhs1_type
= TREE_TYPE (rhs1
);
3871 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3873 error ("non-trivial conversion at assignment");
3874 debug_generic_expr (lhs_type
);
3875 debug_generic_expr (rhs1_type
);
3879 if (gimple_clobber_p (stmt
)
3880 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
3882 error ("non-decl/MEM_REF LHS in clobber statement");
3883 debug_generic_expr (lhs
);
3887 if (handled_component_p (lhs
))
3888 res
|= verify_types_in_gimple_reference (lhs
, true);
3890 /* Special codes we cannot handle via their class. */
3895 tree op
= TREE_OPERAND (rhs1
, 0);
3896 if (!is_gimple_addressable (op
))
3898 error ("invalid operand in unary expression");
3902 /* Technically there is no longer a need for matching types, but
3903 gimple hygiene asks for this check. In LTO we can end up
3904 combining incompatible units and thus end up with addresses
3905 of globals that change their type to a common one. */
3907 && !types_compatible_p (TREE_TYPE (op
),
3908 TREE_TYPE (TREE_TYPE (rhs1
)))
3909 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
3912 error ("type mismatch in address expression");
3913 debug_generic_stmt (TREE_TYPE (rhs1
));
3914 debug_generic_stmt (TREE_TYPE (op
));
3918 return verify_types_in_gimple_reference (op
, true);
3923 error ("INDIRECT_REF in gimple IL");
3929 case ARRAY_RANGE_REF
:
3930 case VIEW_CONVERT_EXPR
:
3933 case TARGET_MEM_REF
:
3935 if (!is_gimple_reg (lhs
)
3936 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3938 error ("invalid rhs for gimple memory store");
3939 debug_generic_stmt (lhs
);
3940 debug_generic_stmt (rhs1
);
3943 return res
|| verify_types_in_gimple_reference (rhs1
, false);
3955 /* tcc_declaration */
3960 if (!is_gimple_reg (lhs
)
3961 && !is_gimple_reg (rhs1
)
3962 && is_gimple_reg_type (TREE_TYPE (lhs
)))
3964 error ("invalid rhs for gimple memory store");
3965 debug_generic_stmt (lhs
);
3966 debug_generic_stmt (rhs1
);
3972 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
3975 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
3977 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
3979 /* For vector CONSTRUCTORs we require that either it is empty
3980 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3981 (then the element count must be correct to cover the whole
3982 outer vector and index must be NULL on all elements, or it is
3983 a CONSTRUCTOR of scalar elements, where we as an exception allow
3984 smaller number of elements (assuming zero filling) and
3985 consecutive indexes as compared to NULL indexes (such
3986 CONSTRUCTORs can appear in the IL from FEs). */
3987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
3989 if (elt_t
== NULL_TREE
)
3991 elt_t
= TREE_TYPE (elt_v
);
3992 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
3994 tree elt_t
= TREE_TYPE (elt_v
);
3995 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
3998 error ("incorrect type of vector CONSTRUCTOR"
4000 debug_generic_stmt (rhs1
);
4003 else if (CONSTRUCTOR_NELTS (rhs1
)
4004 * TYPE_VECTOR_SUBPARTS (elt_t
)
4005 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4007 error ("incorrect number of vector CONSTRUCTOR"
4009 debug_generic_stmt (rhs1
);
4013 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4016 error ("incorrect type of vector CONSTRUCTOR elements");
4017 debug_generic_stmt (rhs1
);
4020 else if (CONSTRUCTOR_NELTS (rhs1
)
4021 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4023 error ("incorrect number of vector CONSTRUCTOR elements");
4024 debug_generic_stmt (rhs1
);
4028 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4030 error ("incorrect type of vector CONSTRUCTOR elements");
4031 debug_generic_stmt (rhs1
);
4034 if (elt_i
!= NULL_TREE
4035 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4036 || TREE_CODE (elt_i
) != INTEGER_CST
4037 || compare_tree_int (elt_i
, i
) != 0))
4039 error ("vector CONSTRUCTOR with non-NULL element index");
4040 debug_generic_stmt (rhs1
);
4048 case WITH_SIZE_EXPR
:
4058 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4059 is a problem, otherwise false. */
4062 verify_gimple_assign (gimple stmt
)
4064 switch (gimple_assign_rhs_class (stmt
))
4066 case GIMPLE_SINGLE_RHS
:
4067 return verify_gimple_assign_single (stmt
);
4069 case GIMPLE_UNARY_RHS
:
4070 return verify_gimple_assign_unary (stmt
);
4072 case GIMPLE_BINARY_RHS
:
4073 return verify_gimple_assign_binary (stmt
);
4075 case GIMPLE_TERNARY_RHS
:
4076 return verify_gimple_assign_ternary (stmt
);
4083 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4084 is a problem, otherwise false. */
4087 verify_gimple_return (gimple stmt
)
4089 tree op
= gimple_return_retval (stmt
);
4090 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4092 /* We cannot test for present return values as we do not fix up missing
4093 return values from the original source. */
4097 if (!is_gimple_val (op
)
4098 && TREE_CODE (op
) != RESULT_DECL
)
4100 error ("invalid operand in return statement");
4101 debug_generic_stmt (op
);
4105 if ((TREE_CODE (op
) == RESULT_DECL
4106 && DECL_BY_REFERENCE (op
))
4107 || (TREE_CODE (op
) == SSA_NAME
4108 && SSA_NAME_VAR (op
)
4109 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4110 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4111 op
= TREE_TYPE (op
);
4113 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4115 error ("invalid conversion in return statement");
4116 debug_generic_stmt (restype
);
4117 debug_generic_stmt (TREE_TYPE (op
));
4125 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4126 is a problem, otherwise false. */
4129 verify_gimple_goto (gimple stmt
)
4131 tree dest
= gimple_goto_dest (stmt
);
4133 /* ??? We have two canonical forms of direct goto destinations, a
4134 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4135 if (TREE_CODE (dest
) != LABEL_DECL
4136 && (!is_gimple_val (dest
)
4137 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4139 error ("goto destination is neither a label nor a pointer");
4146 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4147 is a problem, otherwise false. */
4150 verify_gimple_switch (gimple stmt
)
4153 tree elt
, prev_upper_bound
= NULL_TREE
;
4154 tree index_type
, elt_type
= NULL_TREE
;
4156 if (!is_gimple_val (gimple_switch_index (stmt
)))
4158 error ("invalid operand to switch statement");
4159 debug_generic_stmt (gimple_switch_index (stmt
));
4163 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4164 if (! INTEGRAL_TYPE_P (index_type
))
4166 error ("non-integral type switch statement");
4167 debug_generic_expr (index_type
);
4171 elt
= gimple_switch_label (stmt
, 0);
4172 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4174 error ("invalid default case label in switch statement");
4175 debug_generic_expr (elt
);
4179 n
= gimple_switch_num_labels (stmt
);
4180 for (i
= 1; i
< n
; i
++)
4182 elt
= gimple_switch_label (stmt
, i
);
4184 if (! CASE_LOW (elt
))
4186 error ("invalid case label in switch statement");
4187 debug_generic_expr (elt
);
4191 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4193 error ("invalid case range in switch statement");
4194 debug_generic_expr (elt
);
4200 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4201 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4203 error ("type mismatch for case label in switch statement");
4204 debug_generic_expr (elt
);
4210 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4211 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4213 error ("type precision mismatch in switch statement");
4218 if (prev_upper_bound
)
4220 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4222 error ("case labels not sorted in switch statement");
4227 prev_upper_bound
= CASE_HIGH (elt
);
4228 if (! prev_upper_bound
)
4229 prev_upper_bound
= CASE_LOW (elt
);
4235 /* Verify a gimple debug statement STMT.
4236 Returns true if anything is wrong. */
4239 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4241 /* There isn't much that could be wrong in a gimple debug stmt. A
4242 gimple debug bind stmt, for example, maps a tree, that's usually
4243 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4244 component or member of an aggregate type, to another tree, that
4245 can be an arbitrary expression. These stmts expand into debug
4246 insns, and are converted to debug notes by var-tracking.c. */
4250 /* Verify a gimple label statement STMT.
4251 Returns true if anything is wrong. */
4254 verify_gimple_label (gimple stmt
)
4256 tree decl
= gimple_label_label (stmt
);
4260 if (TREE_CODE (decl
) != LABEL_DECL
)
4262 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4263 && DECL_CONTEXT (decl
) != current_function_decl
)
4265 error ("label's context is not the current function decl");
4269 uid
= LABEL_DECL_UID (decl
);
4271 && (uid
== -1 || (*label_to_block_map
)[uid
] != gimple_bb (stmt
)))
4273 error ("incorrect entry in label_to_block_map");
4277 uid
= EH_LANDING_PAD_NR (decl
);
4280 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4281 if (decl
!= lp
->post_landing_pad
)
4283 error ("incorrect setting of landing pad number");
4291 /* Verify the GIMPLE statement STMT. Returns true if there is an
4292 error, otherwise false. */
4295 verify_gimple_stmt (gimple stmt
)
4297 switch (gimple_code (stmt
))
4300 return verify_gimple_assign (stmt
);
4303 return verify_gimple_label (stmt
);
4306 return verify_gimple_call (stmt
);
4309 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4311 error ("invalid comparison code in gimple cond");
4314 if (!(!gimple_cond_true_label (stmt
)
4315 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4316 || !(!gimple_cond_false_label (stmt
)
4317 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4319 error ("invalid labels in gimple cond");
4323 return verify_gimple_comparison (boolean_type_node
,
4324 gimple_cond_lhs (stmt
),
4325 gimple_cond_rhs (stmt
));
4328 return verify_gimple_goto (stmt
);
4331 return verify_gimple_switch (stmt
);
4334 return verify_gimple_return (stmt
);
4339 case GIMPLE_TRANSACTION
:
4340 return verify_gimple_transaction (stmt
);
4342 /* Tuples that do not have tree operands. */
4344 case GIMPLE_PREDICT
:
4346 case GIMPLE_EH_DISPATCH
:
4347 case GIMPLE_EH_MUST_NOT_THROW
:
4351 /* OpenMP directives are validated by the FE and never operated
4352 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4353 non-gimple expressions when the main index variable has had
4354 its address taken. This does not affect the loop itself
4355 because the header of an GIMPLE_OMP_FOR is merely used to determine
4356 how to setup the parallel iteration. */
4360 return verify_gimple_debug (stmt
);
4367 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4368 and false otherwise. */
4371 verify_gimple_phi (gimple phi
)
4375 tree phi_result
= gimple_phi_result (phi
);
4380 error ("invalid PHI result");
4384 virtual_p
= virtual_operand_p (phi_result
);
4385 if (TREE_CODE (phi_result
) != SSA_NAME
4387 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4389 error ("invalid PHI result");
4393 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4395 tree t
= gimple_phi_arg_def (phi
, i
);
4399 error ("missing PHI def");
4403 /* Addressable variables do have SSA_NAMEs but they
4404 are not considered gimple values. */
4405 else if ((TREE_CODE (t
) == SSA_NAME
4406 && virtual_p
!= virtual_operand_p (t
))
4408 && (TREE_CODE (t
) != SSA_NAME
4409 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4411 && !is_gimple_val (t
)))
4413 error ("invalid PHI argument");
4414 debug_generic_expr (t
);
4417 #ifdef ENABLE_TYPES_CHECKING
4418 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4420 error ("incompatible types in PHI argument %u", i
);
4421 debug_generic_stmt (TREE_TYPE (phi_result
));
4422 debug_generic_stmt (TREE_TYPE (t
));
4431 /* Verify the GIMPLE statements inside the sequence STMTS. */
4434 verify_gimple_in_seq_2 (gimple_seq stmts
)
4436 gimple_stmt_iterator ittr
;
4439 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4441 gimple stmt
= gsi_stmt (ittr
);
4443 switch (gimple_code (stmt
))
4446 err
|= verify_gimple_in_seq_2 (gimple_bind_body (stmt
));
4450 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4451 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4454 case GIMPLE_EH_FILTER
:
4455 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4458 case GIMPLE_EH_ELSE
:
4459 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt
));
4460 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt
));
4464 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (stmt
));
4467 case GIMPLE_TRANSACTION
:
4468 err
|= verify_gimple_transaction (stmt
);
4473 bool err2
= verify_gimple_stmt (stmt
);
4475 debug_gimple_stmt (stmt
);
4484 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4485 is a problem, otherwise false. */
4488 verify_gimple_transaction (gimple stmt
)
4490 tree lab
= gimple_transaction_label (stmt
);
4491 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4493 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4497 /* Verify the GIMPLE statements inside the statement list STMTS. */
4500 verify_gimple_in_seq (gimple_seq stmts
)
4502 timevar_push (TV_TREE_STMT_VERIFY
);
4503 if (verify_gimple_in_seq_2 (stmts
))
4504 internal_error ("verify_gimple failed");
4505 timevar_pop (TV_TREE_STMT_VERIFY
);
4508 /* Return true when the T can be shared. */
4511 tree_node_can_be_shared (tree t
)
4513 if (IS_TYPE_OR_DECL_P (t
)
4514 || is_gimple_min_invariant (t
)
4515 || TREE_CODE (t
) == SSA_NAME
4516 || t
== error_mark_node
4517 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4520 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4529 /* Called via walk_tree. Verify tree sharing. */
4532 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4534 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4536 if (tree_node_can_be_shared (*tp
))
4538 *walk_subtrees
= false;
4542 if (pointer_set_insert (visited
, *tp
))
4548 /* Called via walk_gimple_stmt. Verify tree sharing. */
4551 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4553 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4554 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4557 static bool eh_error_found
;
4559 verify_eh_throw_stmt_node (void **slot
, void *data
)
4561 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4562 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4564 if (!pointer_set_contains (visited
, node
->stmt
))
4566 error ("dead STMT in EH table");
4567 debug_gimple_stmt (node
->stmt
);
4568 eh_error_found
= true;
4573 /* Verify if the location LOCs block is in BLOCKS. */
4576 verify_location (pointer_set_t
*blocks
, location_t loc
)
4578 tree block
= LOCATION_BLOCK (loc
);
4579 if (block
!= NULL_TREE
4580 && !pointer_set_contains (blocks
, block
))
4582 error ("location references block not in block tree");
4585 if (block
!= NULL_TREE
)
4586 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4590 /* Called via walk_tree. Verify that expressions have no blocks. */
4593 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4597 *walk_subtrees
= false;
4601 location_t loc
= EXPR_LOCATION (*tp
);
4602 if (LOCATION_BLOCK (loc
) != NULL
)
4608 /* Called via walk_tree. Verify locations of expressions. */
4611 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4613 struct pointer_set_t
*blocks
= (struct pointer_set_t
*) data
;
4615 if (TREE_CODE (*tp
) == VAR_DECL
4616 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4618 tree t
= DECL_DEBUG_EXPR (*tp
);
4619 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4623 if ((TREE_CODE (*tp
) == VAR_DECL
4624 || TREE_CODE (*tp
) == PARM_DECL
4625 || TREE_CODE (*tp
) == RESULT_DECL
)
4626 && DECL_HAS_VALUE_EXPR_P (*tp
))
4628 tree t
= DECL_VALUE_EXPR (*tp
);
4629 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4636 *walk_subtrees
= false;
4640 location_t loc
= EXPR_LOCATION (*tp
);
4641 if (verify_location (blocks
, loc
))
4647 /* Called via walk_gimple_op. Verify locations of expressions. */
4650 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4652 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4653 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4656 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4659 collect_subblocks (pointer_set_t
*blocks
, tree block
)
4662 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4664 pointer_set_insert (blocks
, t
);
4665 collect_subblocks (blocks
, t
);
4669 /* Verify the GIMPLE statements in the CFG of FN. */
4672 verify_gimple_in_cfg (struct function
*fn
)
4676 struct pointer_set_t
*visited
, *visited_stmts
, *blocks
;
4678 timevar_push (TV_TREE_STMT_VERIFY
);
4679 visited
= pointer_set_create ();
4680 visited_stmts
= pointer_set_create ();
4682 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4683 blocks
= pointer_set_create ();
4684 if (DECL_INITIAL (fn
->decl
))
4686 pointer_set_insert (blocks
, DECL_INITIAL (fn
->decl
));
4687 collect_subblocks (blocks
, DECL_INITIAL (fn
->decl
));
4690 FOR_EACH_BB_FN (bb
, fn
)
4692 gimple_stmt_iterator gsi
;
4694 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4696 gimple phi
= gsi_stmt (gsi
);
4700 pointer_set_insert (visited_stmts
, phi
);
4702 if (gimple_bb (phi
) != bb
)
4704 error ("gimple_bb (phi) is set to a wrong basic block");
4708 err2
|= verify_gimple_phi (phi
);
4710 /* Only PHI arguments have locations. */
4711 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4713 error ("PHI node with location");
4717 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4719 tree arg
= gimple_phi_arg_def (phi
, i
);
4720 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4724 error ("incorrect sharing of tree nodes");
4725 debug_generic_expr (addr
);
4728 location_t loc
= gimple_phi_arg_location (phi
, i
);
4729 if (virtual_operand_p (gimple_phi_result (phi
))
4730 && loc
!= UNKNOWN_LOCATION
)
4732 error ("virtual PHI with argument locations");
4735 addr
= walk_tree (&arg
, verify_expr_location_1
, blocks
, NULL
);
4738 debug_generic_expr (addr
);
4741 err2
|= verify_location (blocks
, loc
);
4745 debug_gimple_stmt (phi
);
4749 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4751 gimple stmt
= gsi_stmt (gsi
);
4753 struct walk_stmt_info wi
;
4757 pointer_set_insert (visited_stmts
, stmt
);
4759 if (gimple_bb (stmt
) != bb
)
4761 error ("gimple_bb (stmt) is set to a wrong basic block");
4765 err2
|= verify_gimple_stmt (stmt
);
4766 err2
|= verify_location (blocks
, gimple_location (stmt
));
4768 memset (&wi
, 0, sizeof (wi
));
4769 wi
.info
= (void *) visited
;
4770 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4773 error ("incorrect sharing of tree nodes");
4774 debug_generic_expr (addr
);
4778 memset (&wi
, 0, sizeof (wi
));
4779 wi
.info
= (void *) blocks
;
4780 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4783 debug_generic_expr (addr
);
4787 /* ??? Instead of not checking these stmts at all the walker
4788 should know its context via wi. */
4789 if (!is_gimple_debug (stmt
)
4790 && !is_gimple_omp (stmt
))
4792 memset (&wi
, 0, sizeof (wi
));
4793 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
4796 debug_generic_expr (addr
);
4797 inform (gimple_location (stmt
), "in statement");
4802 /* If the statement is marked as part of an EH region, then it is
4803 expected that the statement could throw. Verify that when we
4804 have optimizations that simplify statements such that we prove
4805 that they cannot throw, that we update other data structures
4807 lp_nr
= lookup_stmt_eh_lp (stmt
);
4810 if (!stmt_could_throw_p (stmt
))
4812 error ("statement marked for throw, but doesn%'t");
4816 && !gsi_one_before_end_p (gsi
)
4817 && stmt_can_throw_internal (stmt
))
4819 error ("statement marked for throw in middle of block");
4825 debug_gimple_stmt (stmt
);
4830 eh_error_found
= false;
4831 if (get_eh_throw_stmt_table (cfun
))
4832 htab_traverse (get_eh_throw_stmt_table (cfun
),
4833 verify_eh_throw_stmt_node
,
4836 if (err
|| eh_error_found
)
4837 internal_error ("verify_gimple failed");
4839 pointer_set_destroy (visited
);
4840 pointer_set_destroy (visited_stmts
);
4841 pointer_set_destroy (blocks
);
4842 verify_histograms ();
4843 timevar_pop (TV_TREE_STMT_VERIFY
);
4847 /* Verifies that the flow information is OK. */
4850 gimple_verify_flow_info (void)
4854 gimple_stmt_iterator gsi
;
4859 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4860 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4862 error ("ENTRY_BLOCK has IL associated with it");
4866 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4867 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4869 error ("EXIT_BLOCK has IL associated with it");
4873 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
4874 if (e
->flags
& EDGE_FALLTHRU
)
4876 error ("fallthru to exit from bb %d", e
->src
->index
);
4882 bool found_ctrl_stmt
= false;
4886 /* Skip labels on the start of basic block. */
4887 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4890 gimple prev_stmt
= stmt
;
4892 stmt
= gsi_stmt (gsi
);
4894 if (gimple_code (stmt
) != GIMPLE_LABEL
)
4897 label
= gimple_label_label (stmt
);
4898 if (prev_stmt
&& DECL_NONLOCAL (label
))
4900 error ("nonlocal label ");
4901 print_generic_expr (stderr
, label
, 0);
4902 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4907 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
4909 error ("EH landing pad label ");
4910 print_generic_expr (stderr
, label
, 0);
4911 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
4916 if (label_to_block (label
) != bb
)
4919 print_generic_expr (stderr
, label
, 0);
4920 fprintf (stderr
, " to block does not match in bb %d",
4925 if (decl_function_context (label
) != current_function_decl
)
4928 print_generic_expr (stderr
, label
, 0);
4929 fprintf (stderr
, " has incorrect context in bb %d",
4935 /* Verify that body of basic block BB is free of control flow. */
4936 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
4938 gimple stmt
= gsi_stmt (gsi
);
4940 if (found_ctrl_stmt
)
4942 error ("control flow in the middle of basic block %d",
4947 if (stmt_ends_bb_p (stmt
))
4948 found_ctrl_stmt
= true;
4950 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4953 print_generic_expr (stderr
, gimple_label_label (stmt
), 0);
4954 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
4959 gsi
= gsi_last_bb (bb
);
4960 if (gsi_end_p (gsi
))
4963 stmt
= gsi_stmt (gsi
);
4965 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4968 err
|= verify_eh_edges (stmt
);
4970 if (is_ctrl_stmt (stmt
))
4972 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4973 if (e
->flags
& EDGE_FALLTHRU
)
4975 error ("fallthru edge after a control statement in bb %d",
4981 if (gimple_code (stmt
) != GIMPLE_COND
)
4983 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4984 after anything else but if statement. */
4985 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4986 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
4988 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4994 switch (gimple_code (stmt
))
5001 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5005 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5006 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5007 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5008 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5009 || EDGE_COUNT (bb
->succs
) >= 3)
5011 error ("wrong outgoing edge flags at end of bb %d",
5019 if (simple_goto_p (stmt
))
5021 error ("explicit goto at end of bb %d", bb
->index
);
5026 /* FIXME. We should double check that the labels in the
5027 destination blocks have their address taken. */
5028 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5029 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5030 | EDGE_FALSE_VALUE
))
5031 || !(e
->flags
& EDGE_ABNORMAL
))
5033 error ("wrong outgoing edge flags at end of bb %d",
5041 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5043 /* ... fallthru ... */
5045 if (!single_succ_p (bb
)
5046 || (single_succ_edge (bb
)->flags
5047 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5048 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5050 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5053 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5055 error ("return edge does not point to exit in bb %d",
5067 n
= gimple_switch_num_labels (stmt
);
5069 /* Mark all the destination basic blocks. */
5070 for (i
= 0; i
< n
; ++i
)
5072 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5073 basic_block label_bb
= label_to_block (lab
);
5074 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5075 label_bb
->aux
= (void *)1;
5078 /* Verify that the case labels are sorted. */
5079 prev
= gimple_switch_label (stmt
, 0);
5080 for (i
= 1; i
< n
; ++i
)
5082 tree c
= gimple_switch_label (stmt
, i
);
5085 error ("found default case not at the start of "
5091 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5093 error ("case labels not sorted: ");
5094 print_generic_expr (stderr
, prev
, 0);
5095 fprintf (stderr
," is greater than ");
5096 print_generic_expr (stderr
, c
, 0);
5097 fprintf (stderr
," but comes before it.\n");
5102 /* VRP will remove the default case if it can prove it will
5103 never be executed. So do not verify there always exists
5104 a default case here. */
5106 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5110 error ("extra outgoing edge %d->%d",
5111 bb
->index
, e
->dest
->index
);
5115 e
->dest
->aux
= (void *)2;
5116 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5117 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5119 error ("wrong outgoing edge flags at end of bb %d",
5125 /* Check that we have all of them. */
5126 for (i
= 0; i
< n
; ++i
)
5128 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5129 basic_block label_bb
= label_to_block (lab
);
5131 if (label_bb
->aux
!= (void *)2)
5133 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5138 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5139 e
->dest
->aux
= (void *)0;
5143 case GIMPLE_EH_DISPATCH
:
5144 err
|= verify_eh_dispatch_edge (stmt
);
5152 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5153 verify_dominators (CDI_DOMINATORS
);
5159 /* Updates phi nodes after creating a forwarder block joined
5160 by edge FALLTHRU. */
5163 gimple_make_forwarder_block (edge fallthru
)
5167 basic_block dummy
, bb
;
5169 gimple_stmt_iterator gsi
;
5171 dummy
= fallthru
->src
;
5172 bb
= fallthru
->dest
;
5174 if (single_pred_p (bb
))
5177 /* If we redirected a branch we must create new PHI nodes at the
5179 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5181 gimple phi
, new_phi
;
5183 phi
= gsi_stmt (gsi
);
5184 var
= gimple_phi_result (phi
);
5185 new_phi
= create_phi_node (var
, bb
);
5186 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5187 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5191 /* Add the arguments we have stored on edges. */
5192 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5197 flush_pending_stmts (e
);
5202 /* Return a non-special label in the head of basic block BLOCK.
5203 Create one if it doesn't exist. */
5206 gimple_block_label (basic_block bb
)
5208 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5213 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5215 stmt
= gsi_stmt (i
);
5216 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5218 label
= gimple_label_label (stmt
);
5219 if (!DECL_NONLOCAL (label
))
5222 gsi_move_before (&i
, &s
);
5227 label
= create_artificial_label (UNKNOWN_LOCATION
);
5228 stmt
= gimple_build_label (label
);
5229 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5234 /* Attempt to perform edge redirection by replacing a possibly complex
5235 jump instruction by a goto or by removing the jump completely.
5236 This can apply only if all edges now point to the same block. The
5237 parameters and return values are equivalent to
5238 redirect_edge_and_branch. */
5241 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5243 basic_block src
= e
->src
;
5244 gimple_stmt_iterator i
;
5247 /* We can replace or remove a complex jump only when we have exactly
5249 if (EDGE_COUNT (src
->succs
) != 2
5250 /* Verify that all targets will be TARGET. Specifically, the
5251 edge that is not E must also go to TARGET. */
5252 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5255 i
= gsi_last_bb (src
);
5259 stmt
= gsi_stmt (i
);
5261 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5263 gsi_remove (&i
, true);
5264 e
= ssa_redirect_edge (e
, target
);
5265 e
->flags
= EDGE_FALLTHRU
;
5273 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5274 edge representing the redirected branch. */
5277 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5279 basic_block bb
= e
->src
;
5280 gimple_stmt_iterator gsi
;
5284 if (e
->flags
& EDGE_ABNORMAL
)
5287 if (e
->dest
== dest
)
5290 if (e
->flags
& EDGE_EH
)
5291 return redirect_eh_edge (e
, dest
);
5293 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5295 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5300 gsi
= gsi_last_bb (bb
);
5301 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5303 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5306 /* For COND_EXPR, we only need to redirect the edge. */
5310 /* No non-abnormal edges should lead from a non-simple goto, and
5311 simple ones should be represented implicitly. */
5316 tree label
= gimple_block_label (dest
);
5317 tree cases
= get_cases_for_edge (e
, stmt
);
5319 /* If we have a list of cases associated with E, then use it
5320 as it's a lot faster than walking the entire case vector. */
5323 edge e2
= find_edge (e
->src
, dest
);
5330 CASE_LABEL (cases
) = label
;
5331 cases
= CASE_CHAIN (cases
);
5334 /* If there was already an edge in the CFG, then we need
5335 to move all the cases associated with E to E2. */
5338 tree cases2
= get_cases_for_edge (e2
, stmt
);
5340 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5341 CASE_CHAIN (cases2
) = first
;
5343 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5347 size_t i
, n
= gimple_switch_num_labels (stmt
);
5349 for (i
= 0; i
< n
; i
++)
5351 tree elt
= gimple_switch_label (stmt
, i
);
5352 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5353 CASE_LABEL (elt
) = label
;
5361 int i
, n
= gimple_asm_nlabels (stmt
);
5364 for (i
= 0; i
< n
; ++i
)
5366 tree cons
= gimple_asm_label_op (stmt
, i
);
5367 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5370 label
= gimple_block_label (dest
);
5371 TREE_VALUE (cons
) = label
;
5375 /* If we didn't find any label matching the former edge in the
5376 asm labels, we must be redirecting the fallthrough
5378 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5383 gsi_remove (&gsi
, true);
5384 e
->flags
|= EDGE_FALLTHRU
;
5387 case GIMPLE_OMP_RETURN
:
5388 case GIMPLE_OMP_CONTINUE
:
5389 case GIMPLE_OMP_SECTIONS_SWITCH
:
5390 case GIMPLE_OMP_FOR
:
5391 /* The edges from OMP constructs can be simply redirected. */
5394 case GIMPLE_EH_DISPATCH
:
5395 if (!(e
->flags
& EDGE_FALLTHRU
))
5396 redirect_eh_dispatch_edge (stmt
, e
, dest
);
5399 case GIMPLE_TRANSACTION
:
5400 /* The ABORT edge has a stored label associated with it, otherwise
5401 the edges are simply redirectable. */
5403 gimple_transaction_set_label (stmt
, gimple_block_label (dest
));
5407 /* Otherwise it must be a fallthru edge, and we don't need to
5408 do anything besides redirecting it. */
5409 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5413 /* Update/insert PHI nodes as necessary. */
5415 /* Now update the edges in the CFG. */
5416 e
= ssa_redirect_edge (e
, dest
);
5421 /* Returns true if it is possible to remove edge E by redirecting
5422 it to the destination of the other edge from E->src. */
5425 gimple_can_remove_branch_p (const_edge e
)
5427 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5433 /* Simple wrapper, as we can always redirect fallthru edges. */
5436 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5438 e
= gimple_redirect_edge_and_branch (e
, dest
);
5445 /* Splits basic block BB after statement STMT (but at least after the
5446 labels). If STMT is NULL, BB is split just after the labels. */
5449 gimple_split_block (basic_block bb
, void *stmt
)
5451 gimple_stmt_iterator gsi
;
5452 gimple_stmt_iterator gsi_tgt
;
5459 new_bb
= create_empty_bb (bb
);
5461 /* Redirect the outgoing edges. */
5462 new_bb
->succs
= bb
->succs
;
5464 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5467 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5470 /* Move everything from GSI to the new basic block. */
5471 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5473 act
= gsi_stmt (gsi
);
5474 if (gimple_code (act
) == GIMPLE_LABEL
)
5487 if (gsi_end_p (gsi
))
5490 /* Split the statement list - avoid re-creating new containers as this
5491 brings ugly quadratic memory consumption in the inliner.
5492 (We are still quadratic since we need to update stmt BB pointers,
5494 gsi_split_seq_before (&gsi
, &list
);
5495 set_bb_seq (new_bb
, list
);
5496 for (gsi_tgt
= gsi_start (list
);
5497 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5498 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5504 /* Moves basic block BB after block AFTER. */
5507 gimple_move_block_after (basic_block bb
, basic_block after
)
5509 if (bb
->prev_bb
== after
)
5513 link_block (bb
, after
);
5519 /* Return TRUE if block BB has no executable statements, otherwise return
5523 gimple_empty_block_p (basic_block bb
)
5525 /* BB must have no executable statements. */
5526 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5529 if (gsi_end_p (gsi
))
5531 if (is_gimple_debug (gsi_stmt (gsi
)))
5532 gsi_next_nondebug (&gsi
);
5533 return gsi_end_p (gsi
);
5537 /* Split a basic block if it ends with a conditional branch and if the
5538 other part of the block is not empty. */
5541 gimple_split_block_before_cond_jump (basic_block bb
)
5543 gimple last
, split_point
;
5544 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5545 if (gsi_end_p (gsi
))
5547 last
= gsi_stmt (gsi
);
5548 if (gimple_code (last
) != GIMPLE_COND
5549 && gimple_code (last
) != GIMPLE_SWITCH
)
5551 gsi_prev_nondebug (&gsi
);
5552 split_point
= gsi_stmt (gsi
);
5553 return split_block (bb
, split_point
)->dest
;
5557 /* Return true if basic_block can be duplicated. */
5560 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5565 /* Create a duplicate of the basic block BB. NOTE: This does not
5566 preserve SSA form. */
5569 gimple_duplicate_bb (basic_block bb
)
5572 gimple_stmt_iterator gsi
, gsi_tgt
;
5573 gimple_seq phis
= phi_nodes (bb
);
5574 gimple phi
, stmt
, copy
;
5576 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5578 /* Copy the PHI nodes. We ignore PHI node arguments here because
5579 the incoming edges have not been setup yet. */
5580 for (gsi
= gsi_start (phis
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5582 phi
= gsi_stmt (gsi
);
5583 copy
= create_phi_node (NULL_TREE
, new_bb
);
5584 create_new_def_for (gimple_phi_result (phi
), copy
,
5585 gimple_phi_result_ptr (copy
));
5586 gimple_set_uid (copy
, gimple_uid (phi
));
5589 gsi_tgt
= gsi_start_bb (new_bb
);
5590 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5592 def_operand_p def_p
;
5593 ssa_op_iter op_iter
;
5596 stmt
= gsi_stmt (gsi
);
5597 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5600 /* Don't duplicate label debug stmts. */
5601 if (gimple_debug_bind_p (stmt
)
5602 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5606 /* Create a new copy of STMT and duplicate STMT's virtual
5608 copy
= gimple_copy (stmt
);
5609 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5611 maybe_duplicate_eh_stmt (copy
, stmt
);
5612 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5614 /* When copying around a stmt writing into a local non-user
5615 aggregate, make sure it won't share stack slot with other
5617 lhs
= gimple_get_lhs (stmt
);
5618 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5620 tree base
= get_base_address (lhs
);
5622 && (TREE_CODE (base
) == VAR_DECL
5623 || TREE_CODE (base
) == RESULT_DECL
)
5624 && DECL_IGNORED_P (base
)
5625 && !TREE_STATIC (base
)
5626 && !DECL_EXTERNAL (base
)
5627 && (TREE_CODE (base
) != VAR_DECL
5628 || !DECL_HAS_VALUE_EXPR_P (base
)))
5629 DECL_NONSHAREABLE (base
) = 1;
5632 /* Create new names for all the definitions created by COPY and
5633 add replacement mappings for each new name. */
5634 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5635 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5641 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5644 add_phi_args_after_copy_edge (edge e_copy
)
5646 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5649 gimple phi
, phi_copy
;
5651 gimple_stmt_iterator psi
, psi_copy
;
5653 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5656 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5658 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5659 dest
= get_bb_original (e_copy
->dest
);
5661 dest
= e_copy
->dest
;
5663 e
= find_edge (bb
, dest
);
5666 /* During loop unrolling the target of the latch edge is copied.
5667 In this case we are not looking for edge to dest, but to
5668 duplicated block whose original was dest. */
5669 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5671 if ((e
->dest
->flags
& BB_DUPLICATED
)
5672 && get_bb_original (e
->dest
) == dest
)
5676 gcc_assert (e
!= NULL
);
5679 for (psi
= gsi_start_phis (e
->dest
),
5680 psi_copy
= gsi_start_phis (e_copy
->dest
);
5682 gsi_next (&psi
), gsi_next (&psi_copy
))
5684 phi
= gsi_stmt (psi
);
5685 phi_copy
= gsi_stmt (psi_copy
);
5686 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5687 add_phi_arg (phi_copy
, def
, e_copy
,
5688 gimple_phi_arg_location_from_edge (phi
, e
));
5693 /* Basic block BB_COPY was created by code duplication. Add phi node
5694 arguments for edges going out of BB_COPY. The blocks that were
5695 duplicated have BB_DUPLICATED set. */
5698 add_phi_args_after_copy_bb (basic_block bb_copy
)
5703 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5705 add_phi_args_after_copy_edge (e_copy
);
5709 /* Blocks in REGION_COPY array of length N_REGION were created by
5710 duplication of basic blocks. Add phi node arguments for edges
5711 going from these blocks. If E_COPY is not NULL, also add
5712 phi node arguments for its destination.*/
5715 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5720 for (i
= 0; i
< n_region
; i
++)
5721 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5723 for (i
= 0; i
< n_region
; i
++)
5724 add_phi_args_after_copy_bb (region_copy
[i
]);
5726 add_phi_args_after_copy_edge (e_copy
);
5728 for (i
= 0; i
< n_region
; i
++)
5729 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5732 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5733 important exit edge EXIT. By important we mean that no SSA name defined
5734 inside region is live over the other exit edges of the region. All entry
5735 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5736 to the duplicate of the region. Dominance and loop information is
5737 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5738 UPDATE_DOMINANCE is false then we assume that the caller will update the
5739 dominance information after calling this function. The new basic
5740 blocks are stored to REGION_COPY in the same order as they had in REGION,
5741 provided that REGION_COPY is not NULL.
5742 The function returns false if it is unable to copy the region,
5746 gimple_duplicate_sese_region (edge entry
, edge exit
,
5747 basic_block
*region
, unsigned n_region
,
5748 basic_block
*region_copy
,
5749 bool update_dominance
)
5752 bool free_region_copy
= false, copying_header
= false;
5753 struct loop
*loop
= entry
->dest
->loop_father
;
5755 vec
<basic_block
> doms
;
5757 int total_freq
= 0, entry_freq
= 0;
5758 gcov_type total_count
= 0, entry_count
= 0;
5760 if (!can_copy_bbs_p (region
, n_region
))
5763 /* Some sanity checking. Note that we do not check for all possible
5764 missuses of the functions. I.e. if you ask to copy something weird,
5765 it will work, but the state of structures probably will not be
5767 for (i
= 0; i
< n_region
; i
++)
5769 /* We do not handle subloops, i.e. all the blocks must belong to the
5771 if (region
[i
]->loop_father
!= loop
)
5774 if (region
[i
] != entry
->dest
5775 && region
[i
] == loop
->header
)
5779 set_loop_copy (loop
, loop
);
5781 /* In case the function is used for loop header copying (which is the primary
5782 use), ensure that EXIT and its copy will be new latch and entry edges. */
5783 if (loop
->header
== entry
->dest
)
5785 copying_header
= true;
5786 set_loop_copy (loop
, loop_outer (loop
));
5788 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5791 for (i
= 0; i
< n_region
; i
++)
5792 if (region
[i
] != exit
->src
5793 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5799 region_copy
= XNEWVEC (basic_block
, n_region
);
5800 free_region_copy
= true;
5803 initialize_original_copy_tables ();
5805 /* Record blocks outside the region that are dominated by something
5807 if (update_dominance
)
5810 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5813 if (entry
->dest
->count
)
5815 total_count
= entry
->dest
->count
;
5816 entry_count
= entry
->count
;
5817 /* Fix up corner cases, to avoid division by zero or creation of negative
5819 if (entry_count
> total_count
)
5820 entry_count
= total_count
;
5824 total_freq
= entry
->dest
->frequency
;
5825 entry_freq
= EDGE_FREQUENCY (entry
);
5826 /* Fix up corner cases, to avoid division by zero or creation of negative
5828 if (total_freq
== 0)
5830 else if (entry_freq
> total_freq
)
5831 entry_freq
= total_freq
;
5834 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5835 split_edge_bb_loc (entry
), update_dominance
);
5838 scale_bbs_frequencies_gcov_type (region
, n_region
,
5839 total_count
- entry_count
,
5841 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
5846 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
5848 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
5853 loop
->header
= exit
->dest
;
5854 loop
->latch
= exit
->src
;
5857 /* Redirect the entry and add the phi node arguments. */
5858 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
5859 gcc_assert (redirected
!= NULL
);
5860 flush_pending_stmts (entry
);
5862 /* Concerning updating of dominators: We must recount dominators
5863 for entry block and its copy. Anything that is outside of the
5864 region, but was dominated by something inside needs recounting as
5866 if (update_dominance
)
5868 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
5869 doms
.safe_push (get_bb_original (entry
->dest
));
5870 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
5874 /* Add the other PHI node arguments. */
5875 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
5877 if (free_region_copy
)
5880 free_original_copy_tables ();
5884 /* Checks if BB is part of the region defined by N_REGION BBS. */
5886 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
5890 for (n
= 0; n
< n_region
; n
++)
5898 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5899 are stored to REGION_COPY in the same order in that they appear
5900 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5901 the region, EXIT an exit from it. The condition guarding EXIT
5902 is moved to ENTRY. Returns true if duplication succeeds, false
5928 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
5929 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
5930 basic_block
*region_copy ATTRIBUTE_UNUSED
)
5933 bool free_region_copy
= false;
5934 struct loop
*loop
= exit
->dest
->loop_father
;
5935 struct loop
*orig_loop
= entry
->dest
->loop_father
;
5936 basic_block switch_bb
, entry_bb
, nentry_bb
;
5937 vec
<basic_block
> doms
;
5938 int total_freq
= 0, exit_freq
= 0;
5939 gcov_type total_count
= 0, exit_count
= 0;
5940 edge exits
[2], nexits
[2], e
;
5941 gimple_stmt_iterator gsi
;
5944 basic_block exit_bb
;
5945 gimple_stmt_iterator psi
;
5948 struct loop
*target
, *aloop
, *cloop
;
5950 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
5952 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
5954 if (!can_copy_bbs_p (region
, n_region
))
5957 initialize_original_copy_tables ();
5958 set_loop_copy (orig_loop
, loop
);
5961 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
5963 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
5965 cloop
= duplicate_loop (aloop
, target
);
5966 duplicate_subloops (aloop
, cloop
);
5972 region_copy
= XNEWVEC (basic_block
, n_region
);
5973 free_region_copy
= true;
5976 gcc_assert (!need_ssa_update_p (cfun
));
5978 /* Record blocks outside the region that are dominated by something
5980 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5982 if (exit
->src
->count
)
5984 total_count
= exit
->src
->count
;
5985 exit_count
= exit
->count
;
5986 /* Fix up corner cases, to avoid division by zero or creation of negative
5988 if (exit_count
> total_count
)
5989 exit_count
= total_count
;
5993 total_freq
= exit
->src
->frequency
;
5994 exit_freq
= EDGE_FREQUENCY (exit
);
5995 /* Fix up corner cases, to avoid division by zero or creation of negative
5997 if (total_freq
== 0)
5999 if (exit_freq
> total_freq
)
6000 exit_freq
= total_freq
;
6003 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6004 split_edge_bb_loc (exit
), true);
6007 scale_bbs_frequencies_gcov_type (region
, n_region
,
6008 total_count
- exit_count
,
6010 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6015 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6017 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6020 /* Create the switch block, and put the exit condition to it. */
6021 entry_bb
= entry
->dest
;
6022 nentry_bb
= get_bb_copy (entry_bb
);
6023 if (!last_stmt (entry
->src
)
6024 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6025 switch_bb
= entry
->src
;
6027 switch_bb
= split_edge (entry
);
6028 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6030 gsi
= gsi_last_bb (switch_bb
);
6031 cond_stmt
= last_stmt (exit
->src
);
6032 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6033 cond_stmt
= gimple_copy (cond_stmt
);
6035 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6037 sorig
= single_succ_edge (switch_bb
);
6038 sorig
->flags
= exits
[1]->flags
;
6039 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6041 /* Register the new edge from SWITCH_BB in loop exit lists. */
6042 rescan_loop_exit (snew
, true, false);
6044 /* Add the PHI node arguments. */
6045 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6047 /* Get rid of now superfluous conditions and associated edges (and phi node
6049 exit_bb
= exit
->dest
;
6051 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6052 PENDING_STMT (e
) = NULL
;
6054 /* The latch of ORIG_LOOP was copied, and so was the backedge
6055 to the original header. We redirect this backedge to EXIT_BB. */
6056 for (i
= 0; i
< n_region
; i
++)
6057 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6059 gcc_assert (single_succ_edge (region_copy
[i
]));
6060 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6061 PENDING_STMT (e
) = NULL
;
6062 for (psi
= gsi_start_phis (exit_bb
);
6066 phi
= gsi_stmt (psi
);
6067 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6068 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6071 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6072 PENDING_STMT (e
) = NULL
;
6074 /* Anything that is outside of the region, but was dominated by something
6075 inside needs to update dominance info. */
6076 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6078 /* Update the SSA web. */
6079 update_ssa (TODO_update_ssa
);
6081 if (free_region_copy
)
6084 free_original_copy_tables ();
6088 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6089 adding blocks when the dominator traversal reaches EXIT. This
6090 function silently assumes that ENTRY strictly dominates EXIT. */
6093 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6094 vec
<basic_block
> *bbs_p
)
6098 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6100 son
= next_dom_son (CDI_DOMINATORS
, son
))
6102 bbs_p
->safe_push (son
);
6104 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6108 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6109 The duplicates are recorded in VARS_MAP. */
6112 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
6115 tree t
= *tp
, new_t
;
6116 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6119 if (DECL_CONTEXT (t
) == to_context
)
6122 loc
= pointer_map_contains (vars_map
, t
);
6126 loc
= pointer_map_insert (vars_map
, t
);
6130 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6131 add_local_decl (f
, new_t
);
6135 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6136 new_t
= copy_node (t
);
6138 DECL_CONTEXT (new_t
) = to_context
;
6143 new_t
= (tree
) *loc
;
6149 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6150 VARS_MAP maps old ssa names and var_decls to the new ones. */
6153 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
6159 gcc_assert (!virtual_operand_p (name
));
6161 loc
= pointer_map_contains (vars_map
, name
);
6165 tree decl
= SSA_NAME_VAR (name
);
6168 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6169 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6170 decl
, SSA_NAME_DEF_STMT (name
));
6171 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6172 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6176 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6177 name
, SSA_NAME_DEF_STMT (name
));
6179 loc
= pointer_map_insert (vars_map
, name
);
6183 new_name
= (tree
) *loc
;
6194 struct pointer_map_t
*vars_map
;
6195 htab_t new_label_map
;
6196 struct pointer_map_t
*eh_map
;
6200 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6201 contained in *TP if it has been ORIG_BLOCK previously and change the
6202 DECL_CONTEXT of every local variable referenced in *TP. */
6205 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6207 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6208 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6213 tree block
= TREE_BLOCK (t
);
6214 if (block
== p
->orig_block
6215 || (p
->orig_block
== NULL_TREE
6216 && block
!= NULL_TREE
))
6217 TREE_SET_BLOCK (t
, p
->new_block
);
6218 #ifdef ENABLE_CHECKING
6219 else if (block
!= NULL_TREE
)
6221 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6222 block
= BLOCK_SUPERCONTEXT (block
);
6223 gcc_assert (block
== p
->orig_block
);
6227 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6229 if (TREE_CODE (t
) == SSA_NAME
)
6230 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6231 else if (TREE_CODE (t
) == LABEL_DECL
)
6233 if (p
->new_label_map
)
6235 struct tree_map in
, *out
;
6237 out
= (struct tree_map
*)
6238 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6243 DECL_CONTEXT (t
) = p
->to_context
;
6245 else if (p
->remap_decls_p
)
6247 /* Replace T with its duplicate. T should no longer appear in the
6248 parent function, so this looks wasteful; however, it may appear
6249 in referenced_vars, and more importantly, as virtual operands of
6250 statements, and in alias lists of other variables. It would be
6251 quite difficult to expunge it from all those places. ??? It might
6252 suffice to do this for addressable variables. */
6253 if ((TREE_CODE (t
) == VAR_DECL
6254 && !is_global_var (t
))
6255 || TREE_CODE (t
) == CONST_DECL
)
6256 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6260 else if (TYPE_P (t
))
6266 /* Helper for move_stmt_r. Given an EH region number for the source
6267 function, map that to the duplicate EH regio number in the dest. */
6270 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6272 eh_region old_r
, new_r
;
6275 old_r
= get_eh_region_from_number (old_nr
);
6276 slot
= pointer_map_contains (p
->eh_map
, old_r
);
6277 new_r
= (eh_region
) *slot
;
6279 return new_r
->index
;
6282 /* Similar, but operate on INTEGER_CSTs. */
6285 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6289 old_nr
= tree_to_shwi (old_t_nr
);
6290 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6292 return build_int_cst (integer_type_node
, new_nr
);
6295 /* Like move_stmt_op, but for gimple statements.
6297 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6298 contained in the current statement in *GSI_P and change the
6299 DECL_CONTEXT of every local variable referenced in the current
6303 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6304 struct walk_stmt_info
*wi
)
6306 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6307 gimple stmt
= gsi_stmt (*gsi_p
);
6308 tree block
= gimple_block (stmt
);
6310 if (block
== p
->orig_block
6311 || (p
->orig_block
== NULL_TREE
6312 && block
!= NULL_TREE
))
6313 gimple_set_block (stmt
, p
->new_block
);
6315 switch (gimple_code (stmt
))
6318 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6320 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6321 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6322 switch (DECL_FUNCTION_CODE (fndecl
))
6324 case BUILT_IN_EH_COPY_VALUES
:
6325 r
= gimple_call_arg (stmt
, 1);
6326 r
= move_stmt_eh_region_tree_nr (r
, p
);
6327 gimple_call_set_arg (stmt
, 1, r
);
6330 case BUILT_IN_EH_POINTER
:
6331 case BUILT_IN_EH_FILTER
:
6332 r
= gimple_call_arg (stmt
, 0);
6333 r
= move_stmt_eh_region_tree_nr (r
, p
);
6334 gimple_call_set_arg (stmt
, 0, r
);
6345 int r
= gimple_resx_region (stmt
);
6346 r
= move_stmt_eh_region_nr (r
, p
);
6347 gimple_resx_set_region (stmt
, r
);
6351 case GIMPLE_EH_DISPATCH
:
6353 int r
= gimple_eh_dispatch_region (stmt
);
6354 r
= move_stmt_eh_region_nr (r
, p
);
6355 gimple_eh_dispatch_set_region (stmt
, r
);
6359 case GIMPLE_OMP_RETURN
:
6360 case GIMPLE_OMP_CONTINUE
:
6363 if (is_gimple_omp (stmt
))
6365 /* Do not remap variables inside OMP directives. Variables
6366 referenced in clauses and directive header belong to the
6367 parent function and should not be moved into the child
6369 bool save_remap_decls_p
= p
->remap_decls_p
;
6370 p
->remap_decls_p
= false;
6371 *handled_ops_p
= true;
6373 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6376 p
->remap_decls_p
= save_remap_decls_p
;
6384 /* Move basic block BB from function CFUN to function DEST_FN. The
6385 block is moved out of the original linked list and placed after
6386 block AFTER in the new list. Also, the block is removed from the
6387 original array of blocks and placed in DEST_FN's array of blocks.
6388 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6389 updated to reflect the moved edges.
6391 The local variables are remapped to new instances, VARS_MAP is used
6392 to record the mapping. */
6395 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6396 basic_block after
, bool update_edge_count_p
,
6397 struct move_stmt_d
*d
)
6399 struct control_flow_graph
*cfg
;
6402 gimple_stmt_iterator si
;
6403 unsigned old_len
, new_len
;
6405 /* Remove BB from dominance structures. */
6406 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6408 /* Move BB from its current loop to the copy in the new function. */
6411 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6413 bb
->loop_father
= new_loop
;
6416 /* Link BB to the new linked list. */
6417 move_block_after (bb
, after
);
6419 /* Update the edge count in the corresponding flowgraphs. */
6420 if (update_edge_count_p
)
6421 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6423 cfun
->cfg
->x_n_edges
--;
6424 dest_cfun
->cfg
->x_n_edges
++;
6427 /* Remove BB from the original basic block array. */
6428 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6429 cfun
->cfg
->x_n_basic_blocks
--;
6431 /* Grow DEST_CFUN's basic block array if needed. */
6432 cfg
= dest_cfun
->cfg
;
6433 cfg
->x_n_basic_blocks
++;
6434 if (bb
->index
>= cfg
->x_last_basic_block
)
6435 cfg
->x_last_basic_block
= bb
->index
+ 1;
6437 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6438 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6440 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6441 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6444 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6446 /* Remap the variables in phi nodes. */
6447 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); )
6449 gimple phi
= gsi_stmt (si
);
6451 tree op
= PHI_RESULT (phi
);
6455 if (virtual_operand_p (op
))
6457 /* Remove the phi nodes for virtual operands (alias analysis will be
6458 run for the new function, anyway). */
6459 remove_phi_node (&si
, true);
6463 SET_PHI_RESULT (phi
,
6464 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6465 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6467 op
= USE_FROM_PTR (use
);
6468 if (TREE_CODE (op
) == SSA_NAME
)
6469 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6472 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6474 location_t locus
= gimple_phi_arg_location (phi
, i
);
6475 tree block
= LOCATION_BLOCK (locus
);
6477 if (locus
== UNKNOWN_LOCATION
)
6479 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6481 if (d
->new_block
== NULL_TREE
)
6482 locus
= LOCATION_LOCUS (locus
);
6484 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6485 gimple_phi_arg_set_location (phi
, i
, locus
);
6492 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6494 gimple stmt
= gsi_stmt (si
);
6495 struct walk_stmt_info wi
;
6497 memset (&wi
, 0, sizeof (wi
));
6499 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6501 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6503 tree label
= gimple_label_label (stmt
);
6504 int uid
= LABEL_DECL_UID (label
);
6506 gcc_assert (uid
> -1);
6508 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6509 if (old_len
<= (unsigned) uid
)
6511 new_len
= 3 * uid
/ 2 + 1;
6512 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6515 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6516 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6518 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6520 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6521 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6524 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6525 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6527 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6528 gimple_remove_stmt_histograms (cfun
, stmt
);
6530 /* We cannot leave any operands allocated from the operand caches of
6531 the current function. */
6532 free_stmt_operands (stmt
);
6533 push_cfun (dest_cfun
);
6538 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6539 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6541 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6542 if (d
->orig_block
== NULL_TREE
6543 || block
== d
->orig_block
)
6544 e
->goto_locus
= d
->new_block
?
6545 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6546 LOCATION_LOCUS (e
->goto_locus
);
6550 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6551 the outermost EH region. Use REGION as the incoming base EH region. */
6554 find_outermost_region_in_block (struct function
*src_cfun
,
6555 basic_block bb
, eh_region region
)
6557 gimple_stmt_iterator si
;
6559 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6561 gimple stmt
= gsi_stmt (si
);
6562 eh_region stmt_region
;
6565 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6566 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6570 region
= stmt_region
;
6571 else if (stmt_region
!= region
)
6573 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6574 gcc_assert (region
!= NULL
);
6583 new_label_mapper (tree decl
, void *data
)
6585 htab_t hash
= (htab_t
) data
;
6589 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6591 m
= XNEW (struct tree_map
);
6592 m
->hash
= DECL_UID (decl
);
6593 m
->base
.from
= decl
;
6594 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6595 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6596 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6597 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6599 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6600 gcc_assert (*slot
== NULL
);
6607 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6611 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6616 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6619 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6621 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6624 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6626 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6627 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6629 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6634 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6635 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6638 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6642 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6645 /* Discard it from the old loop array. */
6646 (*get_loops (fn1
))[loop
->num
] = NULL
;
6648 /* Place it in the new loop array, assigning it a new number. */
6649 loop
->num
= number_of_loops (fn2
);
6650 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6652 /* Recurse to children. */
6653 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6654 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6657 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6658 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6659 single basic block in the original CFG and the new basic block is
6660 returned. DEST_CFUN must not have a CFG yet.
6662 Note that the region need not be a pure SESE region. Blocks inside
6663 the region may contain calls to abort/exit. The only restriction
6664 is that ENTRY_BB should be the only entry point and it must
6667 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6668 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6669 to the new function.
6671 All local variables referenced in the region are assumed to be in
6672 the corresponding BLOCK_VARS and unexpanded variable lists
6673 associated with DEST_CFUN. */
6676 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6677 basic_block exit_bb
, tree orig_block
)
6679 vec
<basic_block
> bbs
, dom_bbs
;
6680 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6681 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6682 struct function
*saved_cfun
= cfun
;
6683 int *entry_flag
, *exit_flag
;
6684 unsigned *entry_prob
, *exit_prob
;
6685 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6688 htab_t new_label_map
;
6689 struct pointer_map_t
*vars_map
, *eh_map
;
6690 struct loop
*loop
= entry_bb
->loop_father
;
6691 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6692 struct move_stmt_d d
;
6694 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6696 gcc_assert (entry_bb
!= exit_bb
6698 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6700 /* Collect all the blocks in the region. Manually add ENTRY_BB
6701 because it won't be added by dfs_enumerate_from. */
6703 bbs
.safe_push (entry_bb
);
6704 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6706 /* The blocks that used to be dominated by something in BBS will now be
6707 dominated by the new block. */
6708 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6712 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6713 the predecessor edges to ENTRY_BB and the successor edges to
6714 EXIT_BB so that we can re-attach them to the new basic block that
6715 will replace the region. */
6716 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6717 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6718 entry_flag
= XNEWVEC (int, num_entry_edges
);
6719 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6721 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6723 entry_prob
[i
] = e
->probability
;
6724 entry_flag
[i
] = e
->flags
;
6725 entry_pred
[i
++] = e
->src
;
6731 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6732 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6733 exit_flag
= XNEWVEC (int, num_exit_edges
);
6734 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6736 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6738 exit_prob
[i
] = e
->probability
;
6739 exit_flag
[i
] = e
->flags
;
6740 exit_succ
[i
++] = e
->dest
;
6752 /* Switch context to the child function to initialize DEST_FN's CFG. */
6753 gcc_assert (dest_cfun
->cfg
== NULL
);
6754 push_cfun (dest_cfun
);
6756 init_empty_tree_cfg ();
6758 /* Initialize EH information for the new function. */
6760 new_label_map
= NULL
;
6763 eh_region region
= NULL
;
6765 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6766 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6768 init_eh_for_function ();
6771 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6772 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6773 new_label_mapper
, new_label_map
);
6777 /* Initialize an empty loop tree. */
6778 struct loops
*loops
= ggc_alloc_cleared_loops ();
6779 init_loops_structure (dest_cfun
, loops
, 1);
6780 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6781 set_loops_for_fn (dest_cfun
, loops
);
6783 /* Move the outlined loop tree part. */
6784 num_nodes
= bbs
.length ();
6785 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6787 if (bb
->loop_father
->header
== bb
)
6789 struct loop
*this_loop
= bb
->loop_father
;
6790 struct loop
*outer
= loop_outer (this_loop
);
6792 /* If the SESE region contains some bbs ending with
6793 a noreturn call, those are considered to belong
6794 to the outermost loop in saved_cfun, rather than
6795 the entry_bb's loop_father. */
6799 num_nodes
-= this_loop
->num_nodes
;
6800 flow_loop_tree_node_remove (bb
->loop_father
);
6801 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
6802 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
6805 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
6808 /* Remove loop exits from the outlined region. */
6809 if (loops_for_fn (saved_cfun
)->exits
)
6810 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6812 void **slot
= htab_find_slot_with_hash
6813 (loops_for_fn (saved_cfun
)->exits
, e
,
6814 htab_hash_pointer (e
), NO_INSERT
);
6816 htab_clear_slot (loops_for_fn (saved_cfun
)->exits
, slot
);
6821 /* Adjust the number of blocks in the tree root of the outlined part. */
6822 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
6824 /* Setup a mapping to be used by move_block_to_fn. */
6825 loop
->aux
= current_loops
->tree_root
;
6826 loop0
->aux
= current_loops
->tree_root
;
6830 /* Move blocks from BBS into DEST_CFUN. */
6831 gcc_assert (bbs
.length () >= 2);
6832 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6833 vars_map
= pointer_map_create ();
6835 memset (&d
, 0, sizeof (d
));
6836 d
.orig_block
= orig_block
;
6837 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
6838 d
.from_context
= cfun
->decl
;
6839 d
.to_context
= dest_cfun
->decl
;
6840 d
.vars_map
= vars_map
;
6841 d
.new_label_map
= new_label_map
;
6843 d
.remap_decls_p
= true;
6845 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6847 /* No need to update edge counts on the last block. It has
6848 already been updated earlier when we detached the region from
6849 the original CFG. */
6850 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
6856 /* Loop sizes are no longer correct, fix them up. */
6857 loop
->num_nodes
-= num_nodes
;
6858 for (struct loop
*outer
= loop_outer (loop
);
6859 outer
; outer
= loop_outer (outer
))
6860 outer
->num_nodes
-= num_nodes
;
6861 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
6863 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vect_loops
)
6866 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
6871 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
6873 dest_cfun
->has_simduid_loops
= true;
6875 if (aloop
->force_vect
)
6876 dest_cfun
->has_force_vect_loops
= true;
6880 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6884 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6886 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
6887 = BLOCK_SUBBLOCKS (orig_block
);
6888 for (block
= BLOCK_SUBBLOCKS (orig_block
);
6889 block
; block
= BLOCK_CHAIN (block
))
6890 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
6891 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
6894 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
6895 vars_map
, dest_cfun
->decl
);
6898 htab_delete (new_label_map
);
6900 pointer_map_destroy (eh_map
);
6901 pointer_map_destroy (vars_map
);
6903 /* Rewire the entry and exit blocks. The successor to the entry
6904 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6905 the child function. Similarly, the predecessor of DEST_FN's
6906 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6907 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6908 various CFG manipulation function get to the right CFG.
6910 FIXME, this is silly. The CFG ought to become a parameter to
6912 push_cfun (dest_cfun
);
6913 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
6915 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
6918 /* Back in the original function, the SESE region has disappeared,
6919 create a new basic block in its place. */
6920 bb
= create_empty_bb (entry_pred
[0]);
6922 add_bb_to_loop (bb
, loop
);
6923 for (i
= 0; i
< num_entry_edges
; i
++)
6925 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
6926 e
->probability
= entry_prob
[i
];
6929 for (i
= 0; i
< num_exit_edges
; i
++)
6931 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
6932 e
->probability
= exit_prob
[i
];
6935 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
6936 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
6937 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
6955 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6959 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
6961 tree arg
, var
, old_current_fndecl
= current_function_decl
;
6962 struct function
*dsf
;
6963 bool ignore_topmost_bind
= false, any_var
= false;
6966 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
6967 && decl_is_tm_clone (fndecl
));
6968 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
6970 current_function_decl
= fndecl
;
6971 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
6973 arg
= DECL_ARGUMENTS (fndecl
);
6976 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
6977 fprintf (file
, " ");
6978 print_generic_expr (file
, arg
, dump_flags
);
6979 if (flags
& TDF_VERBOSE
)
6980 print_node (file
, "", arg
, 4);
6981 if (DECL_CHAIN (arg
))
6982 fprintf (file
, ", ");
6983 arg
= DECL_CHAIN (arg
);
6985 fprintf (file
, ")\n");
6987 if (flags
& TDF_VERBOSE
)
6988 print_node (file
, "", fndecl
, 2);
6990 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
6991 if (dsf
&& (flags
& TDF_EH
))
6992 dump_eh_tree (file
, dsf
);
6994 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
6996 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
6997 current_function_decl
= old_current_fndecl
;
7001 /* When GIMPLE is lowered, the variables are no longer available in
7002 BIND_EXPRs, so display them separately. */
7003 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7006 ignore_topmost_bind
= true;
7008 fprintf (file
, "{\n");
7009 if (!vec_safe_is_empty (fun
->local_decls
))
7010 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7012 print_generic_decl (file
, var
, flags
);
7013 if (flags
& TDF_VERBOSE
)
7014 print_node (file
, "", var
, 4);
7015 fprintf (file
, "\n");
7019 if (gimple_in_ssa_p (cfun
))
7020 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7022 tree name
= ssa_name (ix
);
7023 if (name
&& !SSA_NAME_VAR (name
))
7025 fprintf (file
, " ");
7026 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7027 fprintf (file
, " ");
7028 print_generic_expr (file
, name
, flags
);
7029 fprintf (file
, ";\n");
7036 if (fun
&& fun
->decl
== fndecl
7038 && basic_block_info_for_function (fun
))
7040 /* If the CFG has been built, emit a CFG-based dump. */
7041 if (!ignore_topmost_bind
)
7042 fprintf (file
, "{\n");
7044 if (any_var
&& n_basic_blocks_for_fn (fun
))
7045 fprintf (file
, "\n");
7047 FOR_EACH_BB_FN (bb
, fun
)
7048 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7050 fprintf (file
, "}\n");
7052 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7054 /* The function is now in GIMPLE form but the CFG has not been
7055 built yet. Emit the single sequence of GIMPLE statements
7056 that make up its body. */
7057 gimple_seq body
= gimple_body (fndecl
);
7059 if (gimple_seq_first_stmt (body
)
7060 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7061 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7062 print_gimple_seq (file
, body
, 0, flags
);
7065 if (!ignore_topmost_bind
)
7066 fprintf (file
, "{\n");
7069 fprintf (file
, "\n");
7071 print_gimple_seq (file
, body
, 2, flags
);
7072 fprintf (file
, "}\n");
7079 /* Make a tree based dump. */
7080 chain
= DECL_SAVED_TREE (fndecl
);
7081 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7083 if (ignore_topmost_bind
)
7085 chain
= BIND_EXPR_BODY (chain
);
7093 if (!ignore_topmost_bind
)
7094 fprintf (file
, "{\n");
7099 fprintf (file
, "\n");
7101 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7102 if (ignore_topmost_bind
)
7103 fprintf (file
, "}\n");
7106 if (flags
& TDF_ENUMERATE_LOCALS
)
7107 dump_enumerated_decls (file
, flags
);
7108 fprintf (file
, "\n\n");
7110 current_function_decl
= old_current_fndecl
;
7113 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7116 debug_function (tree fn
, int flags
)
7118 dump_function_to_file (fn
, stderr
, flags
);
7122 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7125 print_pred_bbs (FILE *file
, basic_block bb
)
7130 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7131 fprintf (file
, "bb_%d ", e
->src
->index
);
7135 /* Print on FILE the indexes for the successors of basic_block BB. */
7138 print_succ_bbs (FILE *file
, basic_block bb
)
7143 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7144 fprintf (file
, "bb_%d ", e
->dest
->index
);
7147 /* Print to FILE the basic block BB following the VERBOSITY level. */
7150 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7152 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7153 memset ((void *) s_indent
, ' ', (size_t) indent
);
7154 s_indent
[indent
] = '\0';
7156 /* Print basic_block's header. */
7159 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7160 print_pred_bbs (file
, bb
);
7161 fprintf (file
, "}, succs = {");
7162 print_succ_bbs (file
, bb
);
7163 fprintf (file
, "})\n");
7166 /* Print basic_block's body. */
7169 fprintf (file
, "%s {\n", s_indent
);
7170 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7171 fprintf (file
, "%s }\n", s_indent
);
7175 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7177 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7178 VERBOSITY level this outputs the contents of the loop, or just its
7182 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7190 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7191 memset ((void *) s_indent
, ' ', (size_t) indent
);
7192 s_indent
[indent
] = '\0';
7194 /* Print loop's header. */
7195 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7197 fprintf (file
, "header = %d", loop
->header
->index
);
7200 fprintf (file
, "deleted)\n");
7204 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7206 fprintf (file
, ", multiple latches");
7207 fprintf (file
, ", niter = ");
7208 print_generic_expr (file
, loop
->nb_iterations
, 0);
7210 if (loop
->any_upper_bound
)
7212 fprintf (file
, ", upper_bound = ");
7213 dump_double_int (file
, loop
->nb_iterations_upper_bound
, true);
7216 if (loop
->any_estimate
)
7218 fprintf (file
, ", estimate = ");
7219 dump_double_int (file
, loop
->nb_iterations_estimate
, true);
7221 fprintf (file
, ")\n");
7223 /* Print loop's body. */
7226 fprintf (file
, "%s{\n", s_indent
);
7228 if (bb
->loop_father
== loop
)
7229 print_loops_bb (file
, bb
, indent
, verbosity
);
7231 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7232 fprintf (file
, "%s}\n", s_indent
);
7236 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7237 spaces. Following VERBOSITY level this outputs the contents of the
7238 loop, or just its structure. */
7241 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7247 print_loop (file
, loop
, indent
, verbosity
);
7248 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7251 /* Follow a CFG edge from the entry point of the program, and on entry
7252 of a loop, pretty print the loop structure on FILE. */
7255 print_loops (FILE *file
, int verbosity
)
7259 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7260 if (bb
&& bb
->loop_father
)
7261 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7267 debug (struct loop
&ref
)
7269 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7273 debug (struct loop
*ptr
)
7278 fprintf (stderr
, "<nil>\n");
7281 /* Dump a loop verbosely. */
7284 debug_verbose (struct loop
&ref
)
7286 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7290 debug_verbose (struct loop
*ptr
)
7295 fprintf (stderr
, "<nil>\n");
7299 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7302 debug_loops (int verbosity
)
7304 print_loops (stderr
, verbosity
);
7307 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7310 debug_loop (struct loop
*loop
, int verbosity
)
7312 print_loop (stderr
, loop
, 0, verbosity
);
7315 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7319 debug_loop_num (unsigned num
, int verbosity
)
7321 debug_loop (get_loop (cfun
, num
), verbosity
);
7324 /* Return true if BB ends with a call, possibly followed by some
7325 instructions that must stay with the call. Return false,
7329 gimple_block_ends_with_call_p (basic_block bb
)
7331 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7332 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7336 /* Return true if BB ends with a conditional branch. Return false,
7340 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7342 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7343 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7347 /* Return true if we need to add fake edge to exit at statement T.
7348 Helper function for gimple_flow_call_edges_add. */
7351 need_fake_edge_p (gimple t
)
7353 tree fndecl
= NULL_TREE
;
7356 /* NORETURN and LONGJMP calls already have an edge to exit.
7357 CONST and PURE calls do not need one.
7358 We don't currently check for CONST and PURE here, although
7359 it would be a good idea, because those attributes are
7360 figured out from the RTL in mark_constant_function, and
7361 the counter incrementation code from -fprofile-arcs
7362 leads to different results from -fbranch-probabilities. */
7363 if (is_gimple_call (t
))
7365 fndecl
= gimple_call_fndecl (t
);
7366 call_flags
= gimple_call_flags (t
);
7369 if (is_gimple_call (t
)
7371 && DECL_BUILT_IN (fndecl
)
7372 && (call_flags
& ECF_NOTHROW
)
7373 && !(call_flags
& ECF_RETURNS_TWICE
)
7374 /* fork() doesn't really return twice, but the effect of
7375 wrapping it in __gcov_fork() which calls __gcov_flush()
7376 and clears the counters before forking has the same
7377 effect as returning twice. Force a fake edge. */
7378 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7379 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7382 if (is_gimple_call (t
))
7388 if (!(call_flags
& ECF_NORETURN
))
7392 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7393 if ((e
->flags
& EDGE_FAKE
) == 0)
7397 if (gimple_code (t
) == GIMPLE_ASM
7398 && (gimple_asm_volatile_p (t
) || gimple_asm_input_p (t
)))
7405 /* Add fake edges to the function exit for any non constant and non
7406 noreturn calls (or noreturn calls with EH/abnormal edges),
7407 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7408 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7411 The goal is to expose cases in which entering a basic block does
7412 not imply that all subsequent instructions must be executed. */
7415 gimple_flow_call_edges_add (sbitmap blocks
)
7418 int blocks_split
= 0;
7419 int last_bb
= last_basic_block
;
7420 bool check_last_block
= false;
7422 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7426 check_last_block
= true;
7428 check_last_block
= bitmap_bit_p (blocks
,
7429 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7431 /* In the last basic block, before epilogue generation, there will be
7432 a fallthru edge to EXIT. Special care is required if the last insn
7433 of the last basic block is a call because make_edge folds duplicate
7434 edges, which would result in the fallthru edge also being marked
7435 fake, which would result in the fallthru edge being removed by
7436 remove_fake_edges, which would result in an invalid CFG.
7438 Moreover, we can't elide the outgoing fake edge, since the block
7439 profiler needs to take this into account in order to solve the minimal
7440 spanning tree in the case that the call doesn't return.
7442 Handle this by adding a dummy instruction in a new last basic block. */
7443 if (check_last_block
)
7445 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7446 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7449 if (!gsi_end_p (gsi
))
7452 if (t
&& need_fake_edge_p (t
))
7456 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7459 gsi_insert_on_edge (e
, gimple_build_nop ());
7460 gsi_commit_edge_inserts ();
7465 /* Now add fake edges to the function exit for any non constant
7466 calls since there is no way that we can determine if they will
7468 for (i
= 0; i
< last_bb
; i
++)
7470 basic_block bb
= BASIC_BLOCK (i
);
7471 gimple_stmt_iterator gsi
;
7472 gimple stmt
, last_stmt
;
7477 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7480 gsi
= gsi_last_nondebug_bb (bb
);
7481 if (!gsi_end_p (gsi
))
7483 last_stmt
= gsi_stmt (gsi
);
7486 stmt
= gsi_stmt (gsi
);
7487 if (need_fake_edge_p (stmt
))
7491 /* The handling above of the final block before the
7492 epilogue should be enough to verify that there is
7493 no edge to the exit block in CFG already.
7494 Calling make_edge in such case would cause us to
7495 mark that edge as fake and remove it later. */
7496 #ifdef ENABLE_CHECKING
7497 if (stmt
== last_stmt
)
7499 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7500 gcc_assert (e
== NULL
);
7504 /* Note that the following may create a new basic block
7505 and renumber the existing basic blocks. */
7506 if (stmt
!= last_stmt
)
7508 e
= split_block (bb
, stmt
);
7512 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7516 while (!gsi_end_p (gsi
));
7521 verify_flow_info ();
7523 return blocks_split
;
7526 /* Removes edge E and all the blocks dominated by it, and updates dominance
7527 information. The IL in E->src needs to be updated separately.
7528 If dominance info is not available, only the edge E is removed.*/
7531 remove_edge_and_dominated_blocks (edge e
)
7533 vec
<basic_block
> bbs_to_remove
= vNULL
;
7534 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7538 bool none_removed
= false;
7540 basic_block bb
, dbb
;
7543 if (!dom_info_available_p (CDI_DOMINATORS
))
7549 /* No updating is needed for edges to exit. */
7550 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7552 if (cfgcleanup_altered_bbs
)
7553 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7558 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7559 that is not dominated by E->dest, then this set is empty. Otherwise,
7560 all the basic blocks dominated by E->dest are removed.
7562 Also, to DF_IDOM we store the immediate dominators of the blocks in
7563 the dominance frontier of E (i.e., of the successors of the
7564 removed blocks, if there are any, and of E->dest otherwise). */
7565 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7570 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7572 none_removed
= true;
7577 df
= BITMAP_ALLOC (NULL
);
7578 df_idom
= BITMAP_ALLOC (NULL
);
7581 bitmap_set_bit (df_idom
,
7582 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7585 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7586 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7588 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7590 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7591 bitmap_set_bit (df
, f
->dest
->index
);
7594 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7595 bitmap_clear_bit (df
, bb
->index
);
7597 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7599 bb
= BASIC_BLOCK (i
);
7600 bitmap_set_bit (df_idom
,
7601 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7605 if (cfgcleanup_altered_bbs
)
7607 /* Record the set of the altered basic blocks. */
7608 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7609 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7612 /* Remove E and the cancelled blocks. */
7617 /* Walk backwards so as to get a chance to substitute all
7618 released DEFs into debug stmts. See
7619 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7621 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7622 delete_basic_block (bbs_to_remove
[i
]);
7625 /* Update the dominance information. The immediate dominator may change only
7626 for blocks whose immediate dominator belongs to DF_IDOM:
7628 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7629 removal. Let Z the arbitrary block such that idom(Z) = Y and
7630 Z dominates X after the removal. Before removal, there exists a path P
7631 from Y to X that avoids Z. Let F be the last edge on P that is
7632 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7633 dominates W, and because of P, Z does not dominate W), and W belongs to
7634 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7635 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7637 bb
= BASIC_BLOCK (i
);
7638 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7640 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7641 bbs_to_fix_dom
.safe_push (dbb
);
7644 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7647 BITMAP_FREE (df_idom
);
7648 bbs_to_remove
.release ();
7649 bbs_to_fix_dom
.release ();
7652 /* Purge dead EH edges from basic block BB. */
7655 gimple_purge_dead_eh_edges (basic_block bb
)
7657 bool changed
= false;
7660 gimple stmt
= last_stmt (bb
);
7662 if (stmt
&& stmt_can_throw_internal (stmt
))
7665 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7667 if (e
->flags
& EDGE_EH
)
7669 remove_edge_and_dominated_blocks (e
);
7679 /* Purge dead EH edges from basic block listed in BLOCKS. */
7682 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7684 bool changed
= false;
7688 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7690 basic_block bb
= BASIC_BLOCK (i
);
7692 /* Earlier gimple_purge_dead_eh_edges could have removed
7693 this basic block already. */
7694 gcc_assert (bb
|| changed
);
7696 changed
|= gimple_purge_dead_eh_edges (bb
);
7702 /* Purge dead abnormal call edges from basic block BB. */
7705 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7707 bool changed
= false;
7710 gimple stmt
= last_stmt (bb
);
7712 if (!cfun
->has_nonlocal_label
7713 && !cfun
->calls_setjmp
)
7716 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7719 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7721 if (e
->flags
& EDGE_ABNORMAL
)
7723 if (e
->flags
& EDGE_FALLTHRU
)
7724 e
->flags
&= ~EDGE_ABNORMAL
;
7726 remove_edge_and_dominated_blocks (e
);
7736 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7739 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7741 bool changed
= false;
7745 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7747 basic_block bb
= BASIC_BLOCK (i
);
7749 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7750 this basic block already. */
7751 gcc_assert (bb
|| changed
);
7753 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7759 /* This function is called whenever a new edge is created or
7763 gimple_execute_on_growing_pred (edge e
)
7765 basic_block bb
= e
->dest
;
7767 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7768 reserve_phi_args_for_new_edge (bb
);
7771 /* This function is called immediately before edge E is removed from
7772 the edge vector E->dest->preds. */
7775 gimple_execute_on_shrinking_pred (edge e
)
7777 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7778 remove_phi_args (e
);
7781 /*---------------------------------------------------------------------------
7782 Helper functions for Loop versioning
7783 ---------------------------------------------------------------------------*/
7785 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7786 of 'first'. Both of them are dominated by 'new_head' basic block. When
7787 'new_head' was created by 'second's incoming edge it received phi arguments
7788 on the edge by split_edge(). Later, additional edge 'e' was created to
7789 connect 'new_head' and 'first'. Now this routine adds phi args on this
7790 additional edge 'e' that new_head to second edge received as part of edge
7794 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
7795 basic_block new_head
, edge e
)
7798 gimple_stmt_iterator psi1
, psi2
;
7800 edge e2
= find_edge (new_head
, second
);
7802 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7803 edge, we should always have an edge from NEW_HEAD to SECOND. */
7804 gcc_assert (e2
!= NULL
);
7806 /* Browse all 'second' basic block phi nodes and add phi args to
7807 edge 'e' for 'first' head. PHI args are always in correct order. */
7809 for (psi2
= gsi_start_phis (second
),
7810 psi1
= gsi_start_phis (first
);
7811 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
7812 gsi_next (&psi2
), gsi_next (&psi1
))
7814 phi1
= gsi_stmt (psi1
);
7815 phi2
= gsi_stmt (psi2
);
7816 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
7817 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
7822 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7823 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7824 the destination of the ELSE part. */
7827 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
7828 basic_block second_head ATTRIBUTE_UNUSED
,
7829 basic_block cond_bb
, void *cond_e
)
7831 gimple_stmt_iterator gsi
;
7832 gimple new_cond_expr
;
7833 tree cond_expr
= (tree
) cond_e
;
7836 /* Build new conditional expr */
7837 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
7838 NULL_TREE
, NULL_TREE
);
7840 /* Add new cond in cond_bb. */
7841 gsi
= gsi_last_bb (cond_bb
);
7842 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
7844 /* Adjust edges appropriately to connect new head with first head
7845 as well as second head. */
7846 e0
= single_succ_edge (cond_bb
);
7847 e0
->flags
&= ~EDGE_FALLTHRU
;
7848 e0
->flags
|= EDGE_FALSE_VALUE
;
7852 /* Do book-keeping of basic block BB for the profile consistency checker.
7853 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7854 then do post-pass accounting. Store the counting in RECORD. */
7856 gimple_account_profile_record (basic_block bb
, int after_pass
,
7857 struct profile_record
*record
)
7859 gimple_stmt_iterator i
;
7860 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
7862 record
->size
[after_pass
]
7863 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
7864 if (profile_status
== PROFILE_READ
)
7865 record
->time
[after_pass
]
7866 += estimate_num_insns (gsi_stmt (i
),
7867 &eni_time_weights
) * bb
->count
;
7868 else if (profile_status
== PROFILE_GUESSED
)
7869 record
->time
[after_pass
]
7870 += estimate_num_insns (gsi_stmt (i
),
7871 &eni_time_weights
) * bb
->frequency
;
7875 struct cfg_hooks gimple_cfg_hooks
= {
7877 gimple_verify_flow_info
,
7878 gimple_dump_bb
, /* dump_bb */
7879 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
7880 create_bb
, /* create_basic_block */
7881 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
7882 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
7883 gimple_can_remove_branch_p
, /* can_remove_branch_p */
7884 remove_bb
, /* delete_basic_block */
7885 gimple_split_block
, /* split_block */
7886 gimple_move_block_after
, /* move_block_after */
7887 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
7888 gimple_merge_blocks
, /* merge_blocks */
7889 gimple_predict_edge
, /* predict_edge */
7890 gimple_predicted_by_p
, /* predicted_by_p */
7891 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
7892 gimple_duplicate_bb
, /* duplicate_block */
7893 gimple_split_edge
, /* split_edge */
7894 gimple_make_forwarder_block
, /* make_forward_block */
7895 NULL
, /* tidy_fallthru_edge */
7896 NULL
, /* force_nonfallthru */
7897 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
7898 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
7899 gimple_flow_call_edges_add
, /* flow_call_edges_add */
7900 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
7901 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
7902 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
7903 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
7904 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
7905 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
7906 flush_pending_stmts
, /* flush_pending_stmts */
7907 gimple_empty_block_p
, /* block_empty_p */
7908 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
7909 gimple_account_profile_record
,
7913 /* Split all critical edges. */
7916 split_critical_edges (void)
7922 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7923 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7924 mappings around the calls to split_edge. */
7925 start_recording_case_labels ();
7928 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7930 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
7932 /* PRE inserts statements to edges and expects that
7933 since split_critical_edges was done beforehand, committing edge
7934 insertions will not split more edges. In addition to critical
7935 edges we must split edges that have multiple successors and
7936 end by control flow statements, such as RESX.
7937 Go ahead and split them too. This matches the logic in
7938 gimple_find_edge_insert_loc. */
7939 else if ((!single_pred_p (e
->dest
)
7940 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
7941 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7942 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
7943 && !(e
->flags
& EDGE_ABNORMAL
))
7945 gimple_stmt_iterator gsi
;
7947 gsi
= gsi_last_bb (e
->src
);
7948 if (!gsi_end_p (gsi
)
7949 && stmt_ends_bb_p (gsi_stmt (gsi
))
7950 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
7951 && !gimple_call_builtin_p (gsi_stmt (gsi
),
7957 end_recording_case_labels ();
7963 const pass_data pass_data_split_crit_edges
=
7965 GIMPLE_PASS
, /* type */
7966 "crited", /* name */
7967 OPTGROUP_NONE
, /* optinfo_flags */
7968 false, /* has_gate */
7969 true, /* has_execute */
7970 TV_TREE_SPLIT_EDGES
, /* tv_id */
7971 PROP_cfg
, /* properties_required */
7972 PROP_no_crit_edges
, /* properties_provided */
7973 0, /* properties_destroyed */
7974 0, /* todo_flags_start */
7975 TODO_verify_flow
, /* todo_flags_finish */
7978 class pass_split_crit_edges
: public gimple_opt_pass
7981 pass_split_crit_edges (gcc::context
*ctxt
)
7982 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
7985 /* opt_pass methods: */
7986 unsigned int execute () { return split_critical_edges (); }
7988 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
7989 }; // class pass_split_crit_edges
7994 make_pass_split_crit_edges (gcc::context
*ctxt
)
7996 return new pass_split_crit_edges (ctxt
);
8000 /* Build a ternary operation and gimplify it. Emit code before GSI.
8001 Return the gimple_val holding the result. */
8004 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8005 tree type
, tree a
, tree b
, tree c
)
8008 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8010 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8013 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8017 /* Build a binary operation and gimplify it. Emit code before GSI.
8018 Return the gimple_val holding the result. */
8021 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8022 tree type
, tree a
, tree b
)
8026 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8029 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8033 /* Build a unary operation and gimplify it. Emit code before GSI.
8034 Return the gimple_val holding the result. */
8037 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8042 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8045 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8051 /* Emit return warnings. */
8054 execute_warn_function_return (void)
8056 source_location location
;
8061 if (!targetm
.warn_func_return (cfun
->decl
))
8064 /* If we have a path to EXIT, then we do return. */
8065 if (TREE_THIS_VOLATILE (cfun
->decl
)
8066 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0)
8068 location
= UNKNOWN_LOCATION
;
8069 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8071 last
= last_stmt (e
->src
);
8072 if ((gimple_code (last
) == GIMPLE_RETURN
8073 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8074 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8077 if (location
== UNKNOWN_LOCATION
)
8078 location
= cfun
->function_end_locus
;
8079 warning_at (location
, 0, "%<noreturn%> function does return");
8082 /* If we see "return;" in some basic block, then we do reach the end
8083 without returning a value. */
8084 else if (warn_return_type
8085 && !TREE_NO_WARNING (cfun
->decl
)
8086 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
) > 0
8087 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun
->decl
))))
8089 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
8091 gimple last
= last_stmt (e
->src
);
8092 if (gimple_code (last
) == GIMPLE_RETURN
8093 && gimple_return_retval (last
) == NULL
8094 && !gimple_no_warning_p (last
))
8096 location
= gimple_location (last
);
8097 if (location
== UNKNOWN_LOCATION
)
8098 location
= cfun
->function_end_locus
;
8099 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8100 TREE_NO_WARNING (cfun
->decl
) = 1;
8109 /* Given a basic block B which ends with a conditional and has
8110 precisely two successors, determine which of the edges is taken if
8111 the conditional is true and which is taken if the conditional is
8112 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8115 extract_true_false_edges_from_block (basic_block b
,
8119 edge e
= EDGE_SUCC (b
, 0);
8121 if (e
->flags
& EDGE_TRUE_VALUE
)
8124 *false_edge
= EDGE_SUCC (b
, 1);
8129 *true_edge
= EDGE_SUCC (b
, 1);
8135 const pass_data pass_data_warn_function_return
=
8137 GIMPLE_PASS
, /* type */
8138 "*warn_function_return", /* name */
8139 OPTGROUP_NONE
, /* optinfo_flags */
8140 false, /* has_gate */
8141 true, /* has_execute */
8142 TV_NONE
, /* tv_id */
8143 PROP_cfg
, /* properties_required */
8144 0, /* properties_provided */
8145 0, /* properties_destroyed */
8146 0, /* todo_flags_start */
8147 0, /* todo_flags_finish */
8150 class pass_warn_function_return
: public gimple_opt_pass
8153 pass_warn_function_return (gcc::context
*ctxt
)
8154 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8157 /* opt_pass methods: */
8158 unsigned int execute () { return execute_warn_function_return (); }
8160 }; // class pass_warn_function_return
8165 make_pass_warn_function_return (gcc::context
*ctxt
)
8167 return new pass_warn_function_return (ctxt
);
8170 /* Walk a gimplified function and warn for functions whose return value is
8171 ignored and attribute((warn_unused_result)) is set. This is done before
8172 inlining, so we don't have to worry about that. */
8175 do_warn_unused_result (gimple_seq seq
)
8178 gimple_stmt_iterator i
;
8180 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8182 gimple g
= gsi_stmt (i
);
8184 switch (gimple_code (g
))
8187 do_warn_unused_result (gimple_bind_body (g
));
8190 do_warn_unused_result (gimple_try_eval (g
));
8191 do_warn_unused_result (gimple_try_cleanup (g
));
8194 do_warn_unused_result (gimple_catch_handler (g
));
8196 case GIMPLE_EH_FILTER
:
8197 do_warn_unused_result (gimple_eh_filter_failure (g
));
8201 if (gimple_call_lhs (g
))
8203 if (gimple_call_internal_p (g
))
8206 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8207 LHS. All calls whose value is ignored should be
8208 represented like this. Look for the attribute. */
8209 fdecl
= gimple_call_fndecl (g
);
8210 ftype
= gimple_call_fntype (g
);
8212 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8214 location_t loc
= gimple_location (g
);
8217 warning_at (loc
, OPT_Wunused_result
,
8218 "ignoring return value of %qD, "
8219 "declared with attribute warn_unused_result",
8222 warning_at (loc
, OPT_Wunused_result
,
8223 "ignoring return value of function "
8224 "declared with attribute warn_unused_result");
8229 /* Not a container, not a call, or a call whose value is used. */
8236 run_warn_unused_result (void)
8238 do_warn_unused_result (gimple_body (current_function_decl
));
8243 gate_warn_unused_result (void)
8245 return flag_warn_unused_result
;
8250 const pass_data pass_data_warn_unused_result
=
8252 GIMPLE_PASS
, /* type */
8253 "*warn_unused_result", /* name */
8254 OPTGROUP_NONE
, /* optinfo_flags */
8255 true, /* has_gate */
8256 true, /* has_execute */
8257 TV_NONE
, /* tv_id */
8258 PROP_gimple_any
, /* properties_required */
8259 0, /* properties_provided */
8260 0, /* properties_destroyed */
8261 0, /* todo_flags_start */
8262 0, /* todo_flags_finish */
8265 class pass_warn_unused_result
: public gimple_opt_pass
8268 pass_warn_unused_result (gcc::context
*ctxt
)
8269 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8272 /* opt_pass methods: */
8273 bool gate () { return gate_warn_unused_result (); }
8274 unsigned int execute () { return run_warn_unused_result (); }
8276 }; // class pass_warn_unused_result
8281 make_pass_warn_unused_result (gcc::context
*ctxt
)
8283 return new pass_warn_unused_result (ctxt
);
8286 /* IPA passes, compilation of earlier functions or inlining
8287 might have changed some properties, such as marked functions nothrow,
8288 pure, const or noreturn.
8289 Remove redundant edges and basic blocks, and create new ones if necessary.
8291 This pass can't be executed as stand alone pass from pass manager, because
8292 in between inlining and this fixup the verify_flow_info would fail. */
8295 execute_fixup_cfg (void)
8298 gimple_stmt_iterator gsi
;
8299 int todo
= gimple_in_ssa_p (cfun
) ? TODO_verify_ssa
: 0;
8300 gcov_type count_scale
;
8305 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl
)->count
,
8306 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8308 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8309 cgraph_get_node (current_function_decl
)->count
;
8310 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8311 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8314 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8315 e
->count
= apply_scale (e
->count
, count_scale
);
8319 bb
->count
= apply_scale (bb
->count
, count_scale
);
8320 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8322 gimple stmt
= gsi_stmt (gsi
);
8323 tree decl
= is_gimple_call (stmt
)
8324 ? gimple_call_fndecl (stmt
)
8328 int flags
= gimple_call_flags (stmt
);
8329 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8331 if (gimple_purge_dead_abnormal_call_edges (bb
))
8332 todo
|= TODO_cleanup_cfg
;
8334 if (gimple_in_ssa_p (cfun
))
8336 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8341 if (flags
& ECF_NORETURN
8342 && fixup_noreturn_call (stmt
))
8343 todo
|= TODO_cleanup_cfg
;
8346 if (maybe_clean_eh_stmt (stmt
)
8347 && gimple_purge_dead_eh_edges (bb
))
8348 todo
|= TODO_cleanup_cfg
;
8351 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8352 e
->count
= apply_scale (e
->count
, count_scale
);
8354 /* If we have a basic block with no successors that does not
8355 end with a control statement or a noreturn call end it with
8356 a call to __builtin_unreachable. This situation can occur
8357 when inlining a noreturn call that does in fact return. */
8358 if (EDGE_COUNT (bb
->succs
) == 0)
8360 gimple stmt
= last_stmt (bb
);
8362 || (!is_ctrl_stmt (stmt
)
8363 && (!is_gimple_call (stmt
)
8364 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8366 stmt
= gimple_build_call
8367 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8368 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8369 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8373 if (count_scale
!= REG_BR_PROB_BASE
)
8374 compute_function_frequency ();
8376 /* We just processed all calls. */
8377 if (cfun
->gimple_df
)
8378 vec_free (MODIFIED_NORETURN_CALLS (cfun
));
8380 /* Dump a textual representation of the flowgraph. */
8382 gimple_dump_cfg (dump_file
, dump_flags
);
8385 && (todo
& TODO_cleanup_cfg
))
8386 loops_state_set (LOOPS_NEED_FIXUP
);
8393 const pass_data pass_data_fixup_cfg
=
8395 GIMPLE_PASS
, /* type */
8396 "*free_cfg_annotations", /* name */
8397 OPTGROUP_NONE
, /* optinfo_flags */
8398 false, /* has_gate */
8399 true, /* has_execute */
8400 TV_NONE
, /* tv_id */
8401 PROP_cfg
, /* properties_required */
8402 0, /* properties_provided */
8403 0, /* properties_destroyed */
8404 0, /* todo_flags_start */
8405 0, /* todo_flags_finish */
8408 class pass_fixup_cfg
: public gimple_opt_pass
8411 pass_fixup_cfg (gcc::context
*ctxt
)
8412 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8415 /* opt_pass methods: */
8416 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8417 unsigned int execute () { return execute_fixup_cfg (); }
8419 }; // class pass_fixup_cfg
8424 make_pass_fixup_cfg (gcc::context
*ctxt
)
8426 return new pass_fixup_cfg (ctxt
);
8429 /* Garbage collection support for edge_def. */
8431 extern void gt_ggc_mx (tree
&);
8432 extern void gt_ggc_mx (gimple
&);
8433 extern void gt_ggc_mx (rtx
&);
8434 extern void gt_ggc_mx (basic_block
&);
8437 gt_ggc_mx (edge_def
*e
)
8439 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8441 gt_ggc_mx (e
->dest
);
8442 if (current_ir_type () == IR_GIMPLE
)
8443 gt_ggc_mx (e
->insns
.g
);
8445 gt_ggc_mx (e
->insns
.r
);
8449 /* PCH support for edge_def. */
8451 extern void gt_pch_nx (tree
&);
8452 extern void gt_pch_nx (gimple
&);
8453 extern void gt_pch_nx (rtx
&);
8454 extern void gt_pch_nx (basic_block
&);
8457 gt_pch_nx (edge_def
*e
)
8459 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8461 gt_pch_nx (e
->dest
);
8462 if (current_ir_type () == IR_GIMPLE
)
8463 gt_pch_nx (e
->insns
.g
);
8465 gt_pch_nx (e
->insns
.r
);
8470 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8472 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8473 op (&(e
->src
), cookie
);
8474 op (&(e
->dest
), cookie
);
8475 if (current_ir_type () == IR_GIMPLE
)
8476 op (&(e
->insns
.g
), cookie
);
8478 op (&(e
->insns
.r
), cookie
);
8479 op (&(block
), cookie
);